feat: HTTP server for exporting Arborix bundles

Introduces a read-only HTTP server (WAI/Warp) backed by the content
store, exposing three bundle-export endpoints:

- GET /bundle/name/:name   — export by stored term name
- GET /bundle/hash/:hash   — export by full Merkle hash
- GET /terms               — plain-text listing (debug)

Also adds `tricu server` (aka `--serve`) CLI mode, move `resolveExportTarget` /
 `namesForHash` / `looksLikeHash` out of `Main.hs` into `ContentStore.hs`,
and cleans up unused exports and imports across `FileEval.hs` and `Wire.hs`.
This commit is contained in:
2026-05-06 14:09:56 -05:00
parent 0cd849447f
commit a36ff638a9
9 changed files with 279 additions and 60 deletions

1
.gitignore vendored
View File

@@ -6,6 +6,7 @@
/Dockerfile /Dockerfile
/config.dhall /config.dhall
/result /result
/result*
.aider* .aider*
WD WD
bin/ bin/

View File

@@ -314,3 +314,21 @@ Without `TRICU_DB_PATH` set, `eval` uses only the terms defined in the input fil
- **Evaluate files:** `nix run .# -- eval -f demos/equality.tri` - **Evaluate files:** `nix run .# -- eval -f demos/equality.tri`
- **GHC options:** `-threaded -rtsopts -with-rtsopts=-N` for parallel runtime. Use `-N` RTS flag for multi-core. - **GHC options:** `-threaded -rtsopts -with-rtsopts=-N` for parallel runtime. Use `-N` RTS flag for multi-core.
- **Upx** is in the devShell for binary compression if needed. - **Upx** is in the devShell for binary compression if needed.
## 12. Viewing Haskell Dependency Docs from Nix
When you need Haddock documentation for a Haskell dependency available in Nixpkgs, build the package's `doc` output directly with `^doc`.
Example:
Replace `megaparsec` with the dependency name you need:
```sh
nix build "nixpkgs#haskellPackages.${pkg}^doc"
```
View the available documentation files:
```sh
find ./result-doc -type f \( -name '*.html' -o -name '*.haddock' \) | sort
```

View File

@@ -6,14 +6,12 @@ tricu (pronounced "tree-shoe") is a programming language experiment in Haskell.
tricu is the word for "tree" in Lojban: `(x1) is a tree of species/cultivar (x2)`. tricu is the word for "tree" in Lojban: `(x1) is a tree of species/cultivar (x2)`.
I have fully embraced the slopmachine (LLM-assisted development) for this project. Nothing is stable or sacred. We will discover sanity at the end of the journey but we won't strive for it until then.
## Acknowledgements ## Acknowledgements
Tree Calculus was discovered by [Barry Jay](https://github.com/barry-jay-personal/blog). The addition of Triage rules were suggested by [Johannes Bader](https://johannes-bader.com/). Johannes is also the creator of [treecalcul.us](https://treecalcul.us) which has a great intuitive code playground using his language LambAda. Tree Calculus was discovered by [Barry Jay](https://github.com/barry-jay-personal/blog). The addition of Triage rules were suggested by [Johannes Bader](https://johannes-bader.com/). Johannes is also the creator of [treecalcul.us](https://treecalcul.us) which has a great intuitive code playground using his language LambAda.
## Versioning
This really is a repo for experimentation so I'm not doing anything sane with the versioning for now. If I decide to stabilize the project we'll start anew at 2.0.
## REPL examples ## REPL examples
``` ```

View File

@@ -4,12 +4,14 @@ import Research
import Control.Monad (foldM, forM_, void) import Control.Monad (foldM, forM_, void)
import Data.ByteString (ByteString) import Data.ByteString (ByteString)
import Data.Char (isHexDigit)
import Data.List (nub, sort) import Data.List (nub, sort)
import Data.Maybe (catMaybes, fromMaybe) import Data.Maybe (catMaybes, fromMaybe)
import Data.Text (Text) import Data.Text (Text)
import Database.SQLite.Simple import Database.SQLite.Simple
import System.Directory (createDirectoryIfMissing, getXdgDirectory, XdgDirectory(..)) import System.Directory (createDirectoryIfMissing, getXdgDirectory, XdgDirectory(..))
import System.Environment (getEnv, lookupEnv) import System.Environment (lookupEnv)
import System.Exit (die)
import System.FilePath ((</>), takeDirectory) import System.FilePath ((</>), takeDirectory)
import qualified Data.Map as Map import qualified Data.Map as Map
@@ -272,3 +274,36 @@ queryMaybeOne conn qry params = do
case results of case results of
[row] -> return $ Just row [row] -> return $ Just row
_ -> return Nothing _ -> return Nothing
-- | Resolve a user-supplied identifier (full/prefix hash, term name) to
-- a single term hash and the list of names bound to it. Dies on
-- ambiguity or missing term (matching the CLI @export@ semantics).
resolveExportTarget :: Connection -> String -> IO (Text, [Text])
resolveExportTarget conn input = do
let raw = T.pack $ dropWhile (== '#') input
byName <- query conn
"SELECT hash FROM terms WHERE (names = ? OR names LIKE ? OR names LIKE ? OR names LIKE ?) ORDER BY created_at DESC"
(raw, raw <> T.pack ",%", T.pack "," <> raw <> T.pack ",%", T.pack "%," <> raw) :: IO [Only T.Text]
case byName of
[Only fullHash] -> namesForHash conn fullHash >>= \names -> return (fullHash, names)
(_:_) -> die $ "Ambiguous term name: " ++ input
[] -> do
byHash <- query conn "SELECT hash FROM terms WHERE hash LIKE ? ORDER BY created_at DESC"
(Only (raw <> T.pack "%")) :: IO [Only T.Text]
case byHash of
[Only fullHash] -> namesForHash conn fullHash >>= \names -> return (fullHash, names)
[] -> if looksLikeHash raw
then return (raw, [])
else die $ "No term found matching: " ++ input
_ -> die $ "Ambiguous hash prefix: " ++ input
namesForHash :: Connection -> Text -> IO [Text]
namesForHash conn h = do
stored <- hashToTerm conn h
return $ maybe [] (parseNameList . termNames) stored
-- | Return 'True' when @t@ looks like a full or partial SHA-256 hex hash.
looksLikeHash :: Text -> Bool
looksLikeHash t =
let len = T.length t
in len >= 16 && len <= 64 && T.all isHexDigit t

View File

@@ -3,7 +3,6 @@ module FileEval
, evaluateFile , evaluateFile
, evaluateFileWithContext , evaluateFileWithContext
, evaluateFileResult , evaluateFileResult
, evaluateFile
, compileFile , compileFile
) where ) where
@@ -19,9 +18,8 @@ import Data.List (partition)
import Data.Maybe (fromMaybe, mapMaybe) import Data.Maybe (fromMaybe, mapMaybe)
import System.Environment (setEnv) import System.Environment (setEnv)
import System.FilePath (takeDirectory, normalise, (</>)) import System.FilePath (takeDirectory, normalise, (</>))
import System.IO (hPutStrLn, stderr)
import System.Exit (die) import System.Exit (die)
import Database.SQLite.Simple (Connection, close) import Database.SQLite.Simple (close)
import qualified Data.ByteString.Lazy as BL import qualified Data.ByteString.Lazy as BL
import qualified Data.Map as Map import qualified Data.Map as Map
@@ -110,9 +108,6 @@ makeRelativeTo f i =
let d = takeDirectory f let d = takeDirectory f
in normalise $ d </> i in normalise $ d </> i
nsDefinitions :: String -> [TricuAST] -> [TricuAST]
nsDefinitions moduleName = map (nsDefinition moduleName)
nsDefinition :: String -> TricuAST -> TricuAST nsDefinition :: String -> TricuAST -> TricuAST
nsDefinition "" def = def nsDefinition "" def = def
nsDefinition moduleName (SDef name args body) nsDefinition moduleName (SDef name args body)

View File

@@ -1,6 +1,7 @@
module Main where module Main where
import ContentStore (getContentStorePath, initContentStore, termNames, hashToTerm, loadEnvironment, parseNameList) import ContentStore (initContentStore, loadEnvironment, resolveExportTarget)
import Server (runServer)
import Eval (evalTricu, mainResult, result) import Eval (evalTricu, mainResult, result)
import FileEval import FileEval
import Parser (parseTricu) import Parser (parseTricu)
@@ -9,8 +10,6 @@ import Research
import Wire import Wire
import Control.Monad (foldM) import Control.Monad (foldM)
import Data.Char (isHexDigit)
import Control.Monad.IO.Class ()
import Data.Text (Text, unpack) import Data.Text (Text, unpack)
import qualified Data.Text as T import qualified Data.Text as T
import Data.Version (showVersion) import Data.Version (showVersion)
@@ -18,12 +17,10 @@ import Paths_tricu (version)
import System.Console.CmdArgs import System.Console.CmdArgs
import System.Environment (lookupEnv) import System.Environment (lookupEnv)
import System.IO (hPutStrLn, stderr) import System.IO (hPutStrLn, stderr)
import System.Exit (die)
import Text.Megaparsec () import Text.Megaparsec ()
import qualified Data.ByteString.Lazy as BL import qualified Data.ByteString.Lazy as BL
import Database.SQLite.Simple (Connection, Only(..), close) import Database.SQLite.Simple (close)
import qualified Database.SQLite.Simple as DB (query)
import qualified Data.Map as Map import qualified Data.Map as Map
@@ -34,6 +31,7 @@ data TricuArgs
| Compile { inputFile :: FilePath, outFile :: FilePath, exportNameOpt :: String } | Compile { inputFile :: FilePath, outFile :: FilePath, exportNameOpt :: String }
| Export { hash :: String, exportNameOpt :: String, outFile :: FilePath } | Export { hash :: String, exportNameOpt :: String, outFile :: FilePath }
| Import { inFile :: FilePath } | Import { inFile :: FilePath }
| Serve { host :: String, port :: Int }
deriving (Show, Data, Typeable) deriving (Show, Data, Typeable)
replMode :: TricuArgs replMode :: TricuArgs
@@ -101,10 +99,19 @@ compileMode = Compile
&= explicit &= explicit
&= name "compile" &= name "compile"
serveMode :: TricuArgs
serveMode = Serve
{ host = "127.0.0.1" &= help "Host to bind the server to." &= name "h" &= typ "HOST"
, port = 8787 &= help "HTTP port to listen on." &= name "p" &= typ "PORT"
}
&= help "Start a read-only HTTP server for exporting Arborix bundles."
&= explicit
&= name "server"
main :: IO () main :: IO ()
main = do main = do
let versionStr = "tricu Evaluator and REPL " ++ showVersion version let versionStr = "tricu Evaluator and REPL " ++ showVersion version
cmdArgsParsed <- cmdArgs $ modes [replMode, evaluateMode, decodeMode, compileMode, exportMode, importMode] cmdArgsParsed <- cmdArgs $ modes [replMode, evaluateMode, decodeMode, compileMode, exportMode, importMode, serveMode]
&= help "tricu: Exploring Tree Calculus" &= help "tricu: Exploring Tree Calculus"
&= program "tricu" &= program "tricu"
&= summary versionStr &= summary versionStr
@@ -119,7 +126,7 @@ main = do
evalResult <- case filePaths of evalResult <- case filePaths of
[] -> do [] -> do
initialEnv <- case maybeDbPath of initialEnv <- case maybeDbPath of
Just dbPath -> do Just _ -> do
conn <- initContentStore conn <- initContentStore
env <- loadEnvironment conn env <- loadEnvironment conn
close conn close conn
@@ -127,7 +134,7 @@ main = do
Nothing -> return Map.empty Nothing -> return Map.empty
input <- getContents input <- getContents
pure $ runTricuTEnv initialEnv input pure $ runTricuTEnv initialEnv input
(filePath:restFilePaths) -> do (_:restFilePaths) -> do
initialEnv <- case maybeDbPath of initialEnv <- case maybeDbPath of
Just _ -> do Just _ -> do
conn <- initContentStore conn <- initContentStore
@@ -144,24 +151,30 @@ main = do
[] -> getContents [] -> getContents
(filePath:_) -> readFile filePath (filePath:_) -> readFile filePath
putStrLn $ decodeResult $ result $ evalTricu Map.empty $ parseTricu value putStrLn $ decodeResult $ result $ evalTricu Map.empty $ parseTricu value
Export { hash = hashStr, exportNameOpt = exportNameArg, outFile = outFile } -> do Export { hash = hashStr, exportNameOpt = exportNameArg, outFile = outFilePath } -> do
conn <- initContentStore conn <- initContentStore
(resolvedHash, storedNames) <- resolveExportTarget conn hashStr (resolvedHash, storedNames) <- resolveExportTarget conn hashStr
exportName <- chooseExportName exportNameArg hashStr storedNames expName <- chooseExportName exportNameArg hashStr storedNames
bundleData <- exportNamedBundle conn [(exportName, resolvedHash)] bundleData <- exportNamedBundle conn [(expName, resolvedHash)]
BL.writeFile outFile (BL.fromStrict bundleData) BL.writeFile outFilePath (BL.fromStrict bundleData)
putStrLn $ "Exported bundle export " ++ unpack exportName ++ " -> " ++ unpack resolvedHash ++ " to " ++ outFile putStrLn $ "Exported bundle export " ++ unpack expName ++ " -> " ++ unpack resolvedHash ++ " to " ++ outFilePath
close conn close conn
Import { inFile = inFile } -> do Import { inFile = importFile } -> do
conn <- initContentStore conn <- initContentStore
bundleData <- BL.readFile inFile bundleData <- BL.readFile importFile
roots <- importBundle conn (BL.toStrict bundleData) roots <- importBundle conn (BL.toStrict bundleData)
putStrLn $ "Imported " ++ show (length roots) ++ " root(s):" putStrLn $ "Imported " ++ show (length roots) ++ " root(s):"
mapM_ (\r -> putStrLn $ " " ++ unpack r) roots mapM_ (\r -> putStrLn $ " " ++ unpack r) roots
close conn close conn
Compile { inputFile = inputFile', outFile = outFile', exportNameOpt = exportNameArg } -> Compile { inputFile = compileInputFile, outFile = compileOutFile, exportNameOpt = exportNameArg } ->
let exportName = if null exportNameArg then Nothing else Just (T.pack exportNameArg) let bundleExportName = if null exportNameArg then Nothing else Just (T.pack exportNameArg)
in compileFile inputFile' outFile' exportName in compileFile compileInputFile compileOutFile bundleExportName
Serve { host = hostStr, port = portNum } -> do
putStrLn $ "Starting Arborix bundle server on " ++ hostStr ++ ":" ++ show portNum
putStrLn $ " GET /bundle/hash/:hash — primary endpoint"
putStrLn $ " GET /bundle/name/:name — convenience endpoint"
putStrLn $ " Content-Type: application/vnd.arborix.bundle"
runServer hostStr portNum
runTricu :: String -> String runTricu :: String -> String
runTricu = formatT TreeCalculus . runTricuT runTricu = formatT TreeCalculus . runTricuT
@@ -207,30 +220,6 @@ runTricuEnvWithEnv env input =
res = result finalEnv res = result finalEnv
in (finalEnv, formatT TreeCalculus res) in (finalEnv, formatT TreeCalculus res)
resolveExportTarget :: Connection -> String -> IO (Text, [Text])
resolveExportTarget conn input = do
let raw = T.pack $ dropWhile (== '#') input
byName <- DB.query conn
"SELECT hash FROM terms WHERE (names = ? OR names LIKE ? OR names LIKE ? OR names LIKE ?) ORDER BY created_at DESC"
(raw, raw <> T.pack ",%", T.pack "," <> raw <> T.pack ",%", T.pack "%," <> raw) :: IO [Only T.Text]
case byName of
[Only fullHash] -> namesForHash conn fullHash >>= \names -> return (fullHash, names)
(_:_) -> die $ "Ambiguous term name: " ++ input
[] -> do
byHash <- DB.query conn "SELECT hash FROM terms WHERE hash LIKE ? ORDER BY created_at DESC"
(Only (raw <> T.pack "%")) :: IO [Only T.Text]
case byHash of
[Only fullHash] -> namesForHash conn fullHash >>= \names -> return (fullHash, names)
[] -> if looksLikeHash raw
then return (raw, [])
else die $ "No term found matching: " ++ input
_ -> die $ "Ambiguous hash prefix: " ++ input
namesForHash :: Connection -> Text -> IO [Text]
namesForHash conn h = do
stored <- hashToTerm conn h
return $ maybe [] (parseNameList . termNames) stored
chooseExportName :: String -> String -> [Text] -> IO Text chooseExportName :: String -> String -> [Text] -> IO Text
chooseExportName explicitName input storedNames chooseExportName explicitName input storedNames
| not (null explicitName) = return $ T.pack explicitName | not (null explicitName) = return $ T.pack explicitName
@@ -248,8 +237,3 @@ firstNonEmpty = go
go (x:xs) go (x:xs)
| T.null x = go xs | T.null x = go xs
| otherwise = Just x | otherwise = Just x
looksLikeHash :: Text -> Bool
looksLikeHash t =
let len = T.length t
in len >= 16 && len <= 64 && T.all isHexDigit t

176
src/Server.hs Normal file
View File

@@ -0,0 +1,176 @@
module Server
( runServer
) where
import ContentStore (initContentStore, nameToTerm, hashToTerm, listStoredTerms,
parseNameList, StoredTerm(..), termHash)
import Database.SQLite.Simple (close)
import Wire (exportNamedBundle)
import Network.HTTP.Types (Header, Status, status200, status400, status404, status405, hContentType)
import Network.Wai
import Network.Wai.Handler.Warp (defaultSettings, runSettings, setHost, setPort)
import Data.String (fromString)
import Data.Text (Text)
import Data.Text.Encoding (encodeUtf8)
import Data.Char (isHexDigit)
import Data.ByteString.Lazy (fromStrict)
import qualified Data.Text as T
-- | Start an HTTP server that serves Arborix bundles from the
-- local content store.
--
-- This is a read-only export surface. Clients fetch bundle bytes
-- and independently inspect / verify / run them. The server does
-- not execute bundles.
--
-- Bind host defaults to @127.0.0.1@.
--
-- Endpoints
-- ---------
-- GET /health — 200 "ok"
-- GET /bundle/name/:name — export by stored term name
-- GET /bundle/hash/:hash — export by full Merkle hash
-- GET /terms — plain-text listing (debug)
--
runServer :: String -> Int -> IO ()
runServer hostStr port =
runSettings settings app
where
settings = setPort port $ setHost (fromString hostStr) defaultSettings
-- | WAI application backed by the content store.
-- Uses the same database path as @eval@ mode (env var
-- @TRICU_DB_PATH@ or the default location).
app :: Application
app request respond = case (requestMethod request, pathInfo request) of
("GET", ["health"]) ->
respond $ healthResponse
("GET", ["bundle", "name", nameText]) -> do
body <- nameHandler nameText
respond body
("GET", ["bundle", "hash", hashText]) -> do
body <- hashHandler hashText
respond body
("GET", ["terms"]) -> do
body <- termsResponse
respond body
("POST", _) ->
respond $ responseLBS status405 [] "Method not allowed"
("PUT", _) ->
respond $ responseLBS status405 [] "Method not allowed"
("DELETE", _) ->
respond $ responseLBS status405 [] "Method not allowed"
_ ->
respond $ responseLBS status404 [] "not found"
healthResponse :: Response
healthResponse = responseLBS status200 [] "ok"
-- | GET /bundle/name/:name
-- Resolve a stored term name, export it as an Arborix bundle,
-- and return the raw bundle bytes.
--
-- Sets @Content-Type@ and @X-Arborix-Root-Hash@ headers.
-- Returns 404 when the name does not resolve to any stored term.
nameHandler :: Text -> IO Response
nameHandler nameText = do
conn <- initContentStore
stored <- nameToTerm conn nameText
case stored of
Nothing -> do
close conn
return $ textResponse status404 ("not found: " <> nameText)
Just term' -> do
let th = termHash term'
namedHashes = [(firstOrRoot (termNames term'), th)]
bundleData <- exportNamedBundle conn namedHashes
let cd = T.pack $ "attachment; filename=" ++ safeFileName (T.unpack nameText) ++ ".bundle"
close conn
return $ responseLBS status200 (bundleHeaders th cd) (fromStrict bundleData)
-- | GET /bundle/hash/:hash
-- Resolve a full Merkle hash and export the root as an Arborix
-- bundle.
--
-- - Malformed hash (non-hex or < 16 chars): 400
-- - Well-formed but absent: 404
-- - Present: 200 with bundle bytes
hashHandler :: Text -> IO Response
hashHandler hashText =
let raw = T.pack (dropWhile (== '#') (T.unpack hashText))
in if not (T.all isHexDigit raw) || T.length raw < 16
then return $ responseLBS status400 [] "400 Bad Request: invalid hash"
else do
conn <- initContentStore
stored <- hashToTerm conn raw
case stored of
Nothing -> do
close conn
return $ textResponse status404 ("not found: " <> hashText)
Just term' -> do
let th = termHash term'
namedHashes' = [(firstOrRoot (termNames term'), th)]
bundleData <- exportNamedBundle conn namedHashes'
close conn
return $ responseLBS status200
(bundleHeaders th "attachment; filename=hash.bundle")
(fromStrict bundleData)
-- | GET /terms
-- Plain-text listing of all stored terms (debugging only).
termsResponse :: IO Response
termsResponse = do
conn <- initContentStore
terms <- listStoredTerms conn
close conn
let lines' = [ names <> " " <> hash <> " " <> T.pack (show created)
| term <- terms
, let names = termNames term
, let hash = termHash term
, let created = termCreatedAt term ]
return $ responseLBS status200
[ (hContentType, encodeUtf8 "text/plain; charset=utf-8")
]
(fromStrict $ encodeUtf8 $ T.unlines lines')
textResponse :: Status -> Text -> Response
textResponse status body =
responseLBS status
[ (hContentType, encodeUtf8 "text/plain; charset=utf-8") ]
(fromStrict $ encodeUtf8 body)
bundleHeaders :: Text -> Text -> [Header]
bundleHeaders root cd =
[ (hContentType, encodeUtf8 "application/vnd.arborix.bundle")
, ("X-Arborix-Root-Hash", encodeUtf8 root)
, ("Content-Disposition", encodeUtf8 cd)
]
-- | Pick the first stored name, falling back to "root" when names are empty.
firstOrRoot :: Text -> Text
firstOrRoot names =
case parseNameList names of
[] -> "root"
(x:_) -> x
-- | Sanitise a string to a safe filename prefix.
safeFileName :: String -> String
safeFileName = map go
where
go c
| c >= 'a' && c <= 'z' = c
| c >= 'A' && c <= 'Z' = c
| c >= '0' && c <= '9' = c
| c == '-' = c
| c == '_' = c
| otherwise = '_'

View File

@@ -44,7 +44,7 @@ import Data.Foldable (traverse_)
import Data.Map (Map) import Data.Map (Map)
import Data.Text (Text, unpack) import Data.Text (Text, unpack)
import Data.Text.Encoding (decodeUtf8, encodeUtf8) import Data.Text.Encoding (decodeUtf8, encodeUtf8)
import Data.Word (Word16, Word32, Word64, Word8) import Data.Word (Word16, Word32, Word64)
import Database.SQLite.Simple (Connection) import Database.SQLite.Simple (Connection)
import GHC.Generics (Generic) import GHC.Generics (Generic)

View File

@@ -51,15 +51,20 @@ executable tricu
, filepath , filepath
, fsnotify , fsnotify
, haskeline , haskeline
, http-types
, megaparsec , megaparsec
, memory , memory
, mtl , mtl
, servant
, sqlite-simple , sqlite-simple
, stm
, tasty , tasty
, tasty-hunit , tasty-hunit
, text , text
, time , time
, transformers , transformers
, wai
, warp
, zlib , zlib
other-modules: other-modules:
ContentStore ContentStore
@@ -70,6 +75,7 @@ executable tricu
Paths_tricu Paths_tricu
REPL REPL
Research Research
Server
Wire Wire
default-language: Haskell2010 default-language: Haskell2010
@@ -99,15 +105,20 @@ test-suite tricu-tests
, filepath , filepath
, fsnotify , fsnotify
, haskeline , haskeline
, http-types
, megaparsec , megaparsec
, memory , memory
, mtl , mtl
, servant
, sqlite-simple , sqlite-simple
, stm
, tasty , tasty
, tasty-hunit , tasty-hunit
, text , text
, time , time
, transformers , transformers
, warp
, wai
, zlib , zlib
default-language: Haskell2010 default-language: Haskell2010
other-modules: other-modules:
@@ -119,4 +130,5 @@ test-suite tricu-tests
Paths_tricu Paths_tricu
REPL REPL
Research Research
Server
Wire Wire