Compare commits

..

No commits in common. "trunk" and "20230410" have entirely different histories.

64 changed files with 318 additions and 3723 deletions

2
.envrc
View File

@ -1,2 +1,2 @@
source_env ..
use nix
use flake syndicate#syndicate_utils

7
.gitignore vendored
View File

@ -1,2 +1,5 @@
/nim.cfg
*.check
/.direnv
http_translator
json_translator
msg
json_socket_translator

498
README.md
View File

@ -1,501 +1,19 @@
# Syndicate utils
## Syndesizer
## http_translator
A Syndicate multitool that includes a number of different actors that become active via configuration.
Dispatches HTTP requests to registered handlers.
Whether you use a single instance for many protocols or many specialized instances is up to you.
See [http_translator.config-example.pr](./http_translator.config-example.pr) for an example configuration.
### Cache
## json_translator
Observes patterns and reässert the values captured for a given lifetime. Takes the argument `<cache { dataspace: #!any lifetime: float }>`. The lifetime of a cache counts down from moment a value is asserted.
Wrapper that executes a command, parses its JSON output, and asserts a Preserves conversion in an `<output …>` record.
Example configuration:
```
? <nixspace ?nixspace> [
; Require the nix_actor during observations.
?nixspace> ? <Observe <rec eval _> _> [
$config <require-service <daemon nix_actor>> ]
?nixspace> ? <Observe <rec realise _> _> [
$config <require-service <daemon nix_actor>> ]
## json_socket_translator
; Cache anything captured by observers in the $nixspace for an hour.
; The nix_actor is not required during caching.
$config <require-service <daemon syndesizer>>
$config ? <service-object <daemon syndesizer> ?cap> [
$cap <cache { dataspace: $nixspace lifetime: 3600.0 }> ]
]
```
### File System Usage
Summarize the size of file-system directory. Equivalent to `du -s -b`.
Query the size of a directory in bytes by observing `<file-system-usage "/SOME/PATH" ?size>`.
```
# Configuration example
? <exposed-dataspace ?ds> [
<require-service <daemon syndesizer>>
? <service-object <daemon syndesizer> ?cap> [
$cap <file-system-usage { dataspace: $ds }>
]
]
```
### HTTP driver
Experimental HTTP server that services requests using [some version](https://git.syndicate-lang.org/syndicate-lang/syndicate-protocols/src/commit/9864ce0ec86fb2f916c2aab318a1e6994ab8834c/schemas/http.prs) of the http Syndicate protocol schema.
```
# Configuration example
let ?not-found = dataspace
$not-found ? <request _ ?res> [
$res ! <status 503 "Service unavailable">
$res ! <done "No binding here.">
]
let ?greeting = dataspace
$greeting ? <request _ ?res> [
$res ! <status 200 "ok">
$res ! <chunk "Hello world">
$res ! <done "!">
]
let ?http = dataspace
$http [
<http-bind #f 80 get [ ] $not-found>
<http-bind #f 80 get [|...|] $not-found>
<http-bind #f 80 get ["hello"] $greeting>
]
? <service-object <daemon http-driver> ?cap> [
$cap <http-driver { dataspace: $http }>
]
<daemon http-driver {
argv: [ "/bin/syndesizer" ]
clearEnv: #t
protocol: application/syndicate
}>
<require-service <daemon http-driver>>
```
### JSON Socket Translator
Communicate with sockets that send and receive lines of JSON using `<send …>` and `<recv …>` messages.
Responds to the gatekeeper step `<json-socket-translator { socket: <unix "…"> / <tcp "…" … }> $resolver>`.
```
# MPV configuration example
<require-service <daemon mpv-server>>
<daemon mpv-server {
argv: [
"/run/current-system/sw/bin/mpv"
"--really-quiet"
"--idle=yes"
"--no-audio-display"
"--input-ipc-server=/run/user/1000/mpv.sock"
"--volume=75"
]
protocol: none
}>
let ?resolver = dataspace
$resolver ? <accepted ?mpvSpace> $mpvSpace [
# announce the dataspace when the translator is connected
$config <mpv $mpvSpace>
$config <bind <ref { oid: "mpv" key: #x"" }> $mpvSpace #f>
# translate <play-file > to an MPV command
?? <play-file ?file> [
! <send { "command": ["loadfile" $file "append-play"] }>
]
# clear the playlist on idle so it doesn't grow indefinitely
?? <recv {"event": "idle"}> [
! <send { "command": ["playlist-clear"] }>
]
]
? <service-state <daemon mpv-server> ready> [
<require-service <daemon syndesizer>>
? <service-object <daemon syndesizer> ?cap> [
$cap <resolve <json-socket-translator {
socket: <unix "/run/user/1000/mpv.sock">
}> $resolver>
]
]
```
### JSON Stdio Translator
Executes a command, parses its JSON output, converts to record `<recv @jsonData any>`, and publishes and messages it to a dataspace.
```
# Configuration example
<require-service <daemon syndesizer>>
let ?ds = dataspace
<bind <ref {oid: "syndicate" key: #x""}> $ds #f>
? <service-object <daemon syndesizer> ?cap> [
$cap <json-stdio-translator {
argv: [
"yt-dlp"
"--dump-json"
"https://youtu.be/RR9GkEXDvog"
]
dataspace: $ds
}>
]
```
### Pulse proxy
An actor that produces proxies that accept assertions put only forwards them during a pulse window.
This can be used to implement polling behavior or periodic service scheduling.
```
#!/usr/bin/env -S syndicate-server --control --config
let ?destination = dataspace
$destination ? ?x [
$log ! <log "destination" { +: $x }>
?- $log ! <log "destination" { -: $x }>
]
? <pulsator ?pulsator> [
$log ! <log "pulsator" { line: $pulsator }>
$pulsator <greeting "hello world">
]
<require-service <daemon syndesizer>>
let ?resolver = <* $config [<rewrite <accepted ?cap> <pulsator $cap>>]>
? <service-object <daemon syndesizer> ?cap> [
$log ! <log "service-object" { line: $cap }>
$cap <resolve <pulse {
target: $destination
interval: 4.0 # Interval between pulses.
period: 1.0 # Duration of pulse window.
dither: 2.0 # Gaussian deviation applied to each interval.
}> $resolver>
]
<daemon syndesizer {
argv: [ "/bin/syndesizer" ]
clearEnv: #t
protocol: application/syndicate
}>
```
### SQLite
Readonly access to SQLite databases. Asserts rows as records in response to SQL query assertions. Dynamic updates are not implemented.
Can be disabled by passing `--define:withSqlite=no` to the Nim compiler.
```
# Configuration example
<require-service <daemon syndesizer>>
let ?sqlspace = dataspace
? <service-object <daemon syndesizer> ?cap> [
$cap <sqlite {
dataspace: $sqlspace
database: "/var/db/example.db"
}>
]
let ?tuplespace = dataspace
$sqlspace <query "SELECT id, name FROM stuff" $tuplespace>
$tuplespace [
? [?id ?name] [
$log ! <log "-" { row: <example-row $id $name> }>
]
? <sqlite-error ?msg ?ctx> [
$log ! <log "-" { msg: $msg ctx: $ctx }>
]
]
```
### XML translator
Translates between Preserves and XML according to the [Conventions for Common Data Types](https://preserves.dev/conventions.html).
Examples:
- `<xml-translation "<foo a=\"1\"> <bar>hello world!</bar></foo>" <foo {"a": 1}<bar "hello world!">>>`
- `<xml-translation "" [#t #f]>`
- `<xml-translation "<<</>>" #f>`
```
# Configuration example
? <sharedspace ?ds> [
$ds ? <Observe <rec xml-translation _> _> $config [
$config <require-service <daemon syndesizer>>
$config ? <service-object <daemon syndesizer> ?cap> [
$cap <xml-translator { dataspace: $ds }>
]
]
]
```
---
## esc-printer-driver
A basic [ESC/P](https://en.wikipedia.org/wiki/ESC/P) printer driver.
Takes a path to a printer device file as a command line argument.
The driver speaks the gatekeeper protocol and responds to the `<print {}>` step with a capability that prints strings it receives as messages.
While the `<bold>` or `<italic>` is asserted to this entity the printer will go into the corresponding font mode (if the printer supports it).
Sample Syndicate server script:
```
<require-service <daemon printer>>
let ?printer-resolver = dataspace
$printer-resolver ? <accepted ?printer> [
$printer <italic>
$printer ! "printer resolved\r\n"
]
? <service-object <daemon printer> ?cap> [
$cap <resolve <printer {}> $printer-resolver>
$log ! <log "-" { line: "printer started"}>
]
<daemon printer {
argv: [ "/bin/esc-printer-driver" "/dev/usb/lp0"]
protocol: application/syndicate
clearEnv: #t
}>
```
## http-client
The inverse of `http-driver`.
### Caveats
- HTTPS is assumed unless the request is to port 80.
- If the request or response sets `Content-Type` to `application/json` or `…/preserves`
the body will be a parsed Preserves value.
- No caching or proxying.
- Internal errors propagate using a `400 Internal client error` response.
Sample Syndicate server script:
```
#!/usr/bin/env -S syndicate-server --control --config
# A dataspace for handling the HTTP response.
let ?response-handler = dataspace
$response-handler [
?? <done { "code": "EUR" "exchange_middle": ?middle } > [
$log ! <log "-" { line: <exchange EUR RSD $middle> }>
$control <exit 0>
]
]
# A dataspace for collecting a dataspace from the http-client.
let ?client-resolver = dataspace
$client-resolver ? <accepted ?client> $client [
<request
# Request Dinar to Euro exchange rate.
<http-request 0 "kurs.resenje.org" 443
get ["api" "v1" "currencies" "eur" "rates" "today"]
{content-type: "application/json"} {} #f
>
$response-handler
>
]
# Pass the resolver dataspace to the client.
? <service-object <daemon http-client> ?cap> [
$cap <resolve <http-client { response-content-type-override: "" }> $client-resolver>
]
<require-service <daemon http-client>>
<daemon http-client {
argv: [ "/bin/http-client" ]
clearEnv: #t
env: {
BUILD_SUM: $sum
}
protocol: application/syndicate
}>
```
## mintsturdyref
A utility for minting [Sturdyrefs](https://synit.org/book/operation/builtin/gatekeeper.html#sturdyrefs).
## mount_actor
Actor for mounting filesystems on Linux.
Sample Syndicate server script:
```
# Assert a file-system we want to mount.
<mount "/dev/sda3" "/boot" "vfat">
# Transform mount assertions into mount status observations.
? <mount ?source ?target ?fs> [
? <mount $source $target $fs _> [ ]
]
# Assert mounting succeded.
? <mount _ ?target _ #t> [
<service-state <mountpoint $target> ready>
]
# Assert mount failed.
? <mount _ ?target _ <failure _>> [
<service-state <mountpoint $target> failed>
]
# Assert the details into the machine dataspace.
? <machine-dataspace ?machine> [
$config ? <mount ?source ?target ?fs ?status> [
$machine <mount $source $target $fs $status>
]
]
# Require the mount_actor daemon.
<require-service <daemon mount_actor>>
<daemon mount_actor {
argv: ["/home/emery/src/bin/mount_actor"]
protocol: application/syndicate
}>
# Pass the daemon the config dataspace.
? <service-object <daemon mount_actor> ?cap> [
$cap { dataspace: $config }
]
```
Utility to communicate with sockets that send and receive lines of JSON. Compatible with [mpv](https://mpv.io/), see [mpv.config-example.pr](./mpv.config-example.pr).
## msg
A utility that parses its command-line arguments as Preserves and send them as messages to `$SYNDICATE_ROUTE`.
When called as `assert` (by a symlink or a rename) it will make assertions instead.
## PostgreSQL
Readonly access to PostgreSQL databases.
Asserts rows as records in response to SQL query assertions.
Dynamic updates are not implemented.
```
let ?postgreStep = <postgre {connection: [["host" "db.example.com"] ["dbname" "example"] ["user" "hackme"]]}>
let ?tuplespace = dataspace
$tuplespace ? ?row [
$log ! <log "-" { line: $row }>
]
let ?resolver = dataspace
$resolver ? <accepted ?sqlspace> [
$sqlspace ? <sql-error ?msg ?context> [
$log ! <log "-" { line: $msg context: $context }>
]
$sqlspace <query [SELECT firstname FROM users] $tuplespace>
]
<require-service <daemon postgre-actor>>
$config ? <service-object <daemon postgre-actor> ?cap> [
$cap <resolve $postgreStep $resolver>
]
<daemon postgre-actor {
argv: [ "/bin/postgre-actor" ]
clearEnv: #t
protocol: application/syndicate
}>
```
## preserve_process_environment
This utility serializes it's process environment to Preserves and prints it to stdout.
It can be used to feed the environment variables of a nested child of the Syndicate server back to the server. For example, to retreive the environmental variables that a desktop manager passed on to its children.
## SQLite
Readonly access to SQLite databases.
Asserts rows as records in response to SQL query assertions.
Dynamic updates are not implemented.
```
# Configuration example
let ?sqliteStep = <sqlite { database: "/var/db/stuff.db" }>
let ?tuplespace = dataspace
$tuplespace ? ?row [
$log ! <log "-" { line: $row }>
]
let ?resolver = dataspace
$resolver [
? <rejected ?detail> [
$log ! <log "-" { line: $detail }>
]
? <accepted ?sqlspace> [
$log ! <log "-" { sqlspace: $sqlspace }>
$sqlspace ? <sql-error ?msg ?context> [
$log ! <log "-" { line: $msg context: $context }>
]
$sqlspace <query [ SELECT local_display_name FROM contacts ] $tuplespace>
]
]
<require-service <daemon sqlite-actor>>
$config ? <service-object <daemon sqlite-actor> ?cap> [
$cap <resolve $sqliteStep $resolver>
]
<daemon sqlite-actor {
argv: [ "/bin/sqlite-actor" ]
clearEnv: #t
protocol: application/syndicate
}>
```
## syndump
Utility for printing assertions and messages. Parses the command-line arguments as a pattern, connects a dataspace via `$SYNDICATE_ROUTE`, and writes observations to standard-output. Published assertions are prefixed by the `+` character, retractions by `-`, and messages by `!`.
Example
```sh
# Print patterns in use, filter down with AWK to only the published patterns.
$ FS=':' syndump '<Observe ? _>' | awk -F : '/^+/ { print $2 }'
```
## XSLT processor
Perform XML stylesheet transformations. For a given textual XSLT stylesheet and a textual XML document generate an abstract XML document in Preserves form. Inputs may be XML text or paths to XML files.
```
# Configuration example
let ?ds = dataspace
$ds [
? <xslt-transform "/stylesheet.xls" "/doc.xml" ?output> [
? <xml-translation ?text $output> [
$log ! <log "-" { xslt-output: $text }>
]
]
]
<require-service <daemon xslt_actor>>
? <service-object <daemon xslt_actor> ?cap> $cap [
<xml-translator { dataspace: $ds }>
<xslt { dataspace: $ds }>
]
```
A utility that sends a message to $SYNDICATE_SOCK in the form `<ARGV…>`.

View File

@ -1,8 +0,0 @@
include_rules
: sbom.json |> !sbom-to-nix |> | ./<lock>
run ./Tuprules.jq sbom.json
: foreach {bin} |> !assert_built |>
: $(BIN_DIR)/msg |> !symlink |> $(BIN_DIR)/beep
: $(BIN_DIR)/msg |> !symlink |> $(BIN_DIR)/assert

View File

@ -1,12 +0,0 @@
#! /usr/bin/env -S jq --raw-output --from-file
.metadata.component.properties as $props |
$props |
( map( select(.name | .[0:10] == "nim:binDir") ) +
map( select(.name | .[0:10] == "nim:srcDir") ) |
map( .value )
) + ["."] | .[0] as $binDir |
$props |
map( select(.name | .[0:8] == "nim:bin:") ) |
map( ": \($binDir)/\(.value).nim |> !nim_bin |> $(BIN_DIR)/\(.name[8:]) {bin}" ) |
join("\n")

View File

@ -1,7 +1,2 @@
include ../syndicate-nim/depends.tup
PROJECT_DIR = $(TUP_CWD)
NIM = $(DIRENV) $(NIM)
NIM_GROUPS += $(SYNDICATE_PROTOCOL)
NIM_GROUPS += $(PROJECT_DIR)/<lock>
NIM_GROUPS += $(PROJECT_DIR)/<schema>
NIM_FLAGS += --path:$(TUP_CWD)/../syndicate-nim/src

View File

@ -1,14 +0,0 @@
version 1.
FileSystemUsage = <file-system-usage @path string @size int>.
# This assertion publishes a dataspace that proxies assertions with
# an exception for <Observe …> which is pulsed every periodSec.
# The pulse resolution is no more than one millisecond.
Pulse = <pulse @periodSec float @proxy #:any>.
XmlTranslation = <xml-translation @xml string @pr any>.
XsltTransform = <xslt-transform @stylesheet string @input string @output any>.
XsltItems = [XsltItem ...].
XsltItem = string.

View File

@ -1,4 +0,0 @@
version 1.
Base64Text = <base64 @txt string @bin bytes> .
Base64File = <base64-file @txt string @path string @size int> .

View File

@ -1,192 +0,0 @@
{
lib,
stdenv,
fetchgit,
fetchzip,
runCommand,
xorg,
nim,
nimOverrides,
}:
let
fetchers = {
fetchzip =
{ url, sha256, ... }:
fetchzip {
name = "source";
inherit url sha256;
};
fetchgit =
{
fetchSubmodules ? false,
leaveDotGit ? false,
rev,
sha256,
url,
...
}:
fetchgit {
inherit
fetchSubmodules
leaveDotGit
rev
sha256
url
;
};
};
filterPropertiesToAttrs =
prefix: properties:
lib.pipe properties [
(builtins.filter ({ name, ... }: (lib.strings.hasPrefix prefix name)))
(map (
{ name, value }:
{
name = lib.strings.removePrefix prefix name;
inherit value;
}
))
builtins.listToAttrs
];
buildNimCfg =
{ backend, components, ... }:
let
componentSrcDirs = map (
{ properties, ... }:
let
fodProps = filterPropertiesToAttrs "nix:fod:" properties;
fod = fetchers.${fodProps.method} fodProps;
srcDir = fodProps.srcDir or "";
in
if srcDir == "" then fod else "${fod}/${srcDir}"
) components;
in
runCommand "nim.cfg"
{
outputs = [
"out"
"src"
];
nativeBuildInputs = [ xorg.lndir ];
}
''
pkgDir=$src/pkg
cat << EOF >> $out
backend:${backend}
path:"$src"
path:"$pkgDir"
EOF
mkdir -p "$pkgDir"
${lib.strings.concatMapStrings (d: ''
lndir "${d}" "$pkgDir"
'') componentSrcDirs}
'';
buildCommands = lib.attrsets.mapAttrsToList (
output: input: ''
nim compile $nimFlags --out:${output} ${input}
''
);
installCommands = lib.attrsets.mapAttrsToList (
output: input: ''
install -Dt $out/bin ${output}
''
);
applySbom =
sbom:
{
passthru ? { },
...
}@prevAttrs:
let
properties = # SBOM metadata.component.properties as an attrset.
lib.attrsets.recursiveUpdate (builtins.listToAttrs sbom.metadata.component.properties)
passthru.properties or { };
nimBin = # A mapping of Nim module file paths to names of programs.
lib.attrsets.recursiveUpdate (lib.pipe properties [
(lib.attrsets.filterAttrs (name: value: lib.strings.hasPrefix "nim:bin:" name))
(lib.attrsets.mapAttrs' (
name: value: {
name = lib.strings.removePrefix "nim:bin:" name;
value = "${properties."nim:binDir" or (properties."nim:srcDir" or ".")}/${value}";
}
))
]) passthru.nimBin or { };
in
{
strictDeps = true;
pname = prevAttrs.pname or sbom.metadata.component.name;
version = prevAttrs.version or sbom.metadata.component.version or null;
configurePhase =
prevAttrs.configurePhase or ''
runHook preConfigure
echo "nim.cfg << $nimCfg"
cat $nimCfg >> nim.cfg
cat << EOF >> nim.cfg
nimcache:"$NIX_BUILD_TOP/nimcache"
parallelBuild:$NIX_BUILD_CORES
EOF
runHook postConfigure
'';
buildPhase =
prevAttrs.buildPhase or ''
runHook preBuild
${lib.strings.concatLines (buildCommands nimBin)}
runHook postBuild
'';
installPhase =
prevAttrs.installPhase or ''
runHook preInstall
${lib.strings.concatLines (installCommands nimBin)}
runHook postInstall
'';
nativeBuildInputs = (prevAttrs.nativeBuildInputs or [ ]) ++ [ nim ];
nimCfg =
prevAttrs.nimCfg or (buildNimCfg {
backend = prevAttrs.nimBackend or properties."nim:backend" or "c";
inherit (sbom) components;
});
passthru = {
inherit sbom properties nimBin;
};
};
applyOverrides =
prevAttrs:
builtins.foldl' (
prevAttrs:
{ name, ... }@component:
if (builtins.hasAttr name nimOverrides) then
prevAttrs // (nimOverrides.${name} component prevAttrs)
else
prevAttrs
) prevAttrs prevAttrs.passthru.sbom.components;
compose =
callerArg: sbom: finalAttrs:
let
callerAttrs = if builtins.isAttrs callerArg then callerArg else callerArg finalAttrs;
sbomAttrs = callerAttrs // (applySbom sbom callerAttrs);
overrideAttrs = sbomAttrs // (applyOverrides sbomAttrs);
in
overrideAttrs;
in
callerArg: sbomArg:
let
sbom = if builtins.isAttrs sbomArg then sbomArg else builtins.fromJSON (builtins.readFile sbomArg);
overrideSbom = f: stdenv.mkDerivation (compose callerArg (sbom // (f sbom)));
in
(stdenv.mkDerivation (compose callerArg sbom)) // { inherit overrideSbom; }

View File

@ -1,89 +1,3 @@
version 1 .
embeddedType EntityRef.Cap .
Base64DecoderArguments = <base64-decoder {
dataspace: #:any
}>.
CacheArguments = <cache {
dataspace: #:any
lifetime: float
}>.
FileSystemStep = <file-system @detail FileSystemDetail> .
FileSystemDetail = {
# iounit: int
root: string
} .
FileSystemUsageArguments = <file-system-usage {
dataspace: #:any
}>.
JsonTranslatorArguments = <json-stdio-translator {
argv: [string ...]
dataspace: #:any
}>.
TcpAddress = <tcp @host string @port int>.
UnixAddress = <unix @path string>.
SocketAddress = TcpAddress / UnixAddress .
HttpClientStep = <http-client @detail HttpClientStepDetail>.
HttpClientStepDetail = {
# Body parsing happens according to a heuristic interpretation
# of Content-Type headers.
# Set this field as "application/octet-stream" to never parse
# response bodies or to "application/json" to parse all response
# bodies as JSON.
response-content-type-override: string
} .
HttpDriverStep= <http-driver { }>.
JsonSocketTranslatorStep = <json-socket-translator {
socket: SocketAddress
}>.
PostgreStep = <postgre {
connection: [PostgreConnectionParameter ...]
}>.
PostgreConnectionParameter = [@key string @val string].
PrinterStep = <printer {}> .
PulseStep = <pulse @detail PulseDetail> .
PulseDetail = {
# Destination for assertions.
target: #:any
# Interval in seconds at which assertions are forwarded.
interval: float
# Period in seconds of assertion.
period: float
# Dither the @interval with a Gaussian deviation of @dither.
dither: float
} .
PulseArguments = <pulse {
dataspace: #:any
}>.
SqliteStep = <sqlite {
database: string
}>.
XmlTranslatorArguments = <xml-translator {
dataspace: #:any
}>.
XsltArguments = <xslt {
dataspace: #:any
}>.
# Reused from syndicate-protocols/transportAddress
Tcp = <tcp @host string @port int>.
JsonSocket = <json-socket @label symbol @path string> .

View File

@ -1,17 +0,0 @@
{
pkgs ? import <nixpkgs> { },
}:
with pkgs;
let
buildNimSbom = pkgs.callPackage ./build-nim-sbom.nix { };
in
buildNimSbom (finalAttrs: {
src = if lib.inNixShell then null else lib.cleanSource ./.;
buildInputs = [
postgresql.out
sqlite
libxml2
libxslt
openssl
];
}) ./sbom.json

View File

@ -1,4 +0,0 @@
version 1 .
embeddedType EntityRef.Cap .
Read = <read @path string @offset int @count int @sink #:bytes> .

22
http_protocol.prs Normal file
View File

@ -0,0 +1,22 @@
version 1 .
Method = =GET / =HEAD / =POST / =PUT / =DELETE / =CONNECT / =OPTIONS / =TRACE / =PATCH .
Methods = #{Method} .
; A URL path split into elements
Path = [string ...] .
Listener = <listen @port int> .
; Register an entity that will handle requests at path prefix.
; TODO: assert the public base URL of the handler to the entity.
Handler = <handler @methods Methods @path Path @entity #!any> .
Headers = {string: [string ...] ...:...} .
; A request awaiting a response at handle.
; TODO: query parameters
Request = <http @handle int @method Method @headers Headers @path Path @body string> .
; A response to handle.
Response = <http @handle int @code int @headers Headers @body string> .

View File

@ -0,0 +1,20 @@
<require-service <daemon http_translator>>
<daemon http_translator {
argv: "/home/repo/syndicate/syndicate_utils/src/http_translator"
protocol: text/syndicate
}>
let ?other = dataspace
$other [
? <http ?handle GET ?headers ?path ?body> [
<http $handle 200 {} "get handler invoked">
]
]
? <service-object <daemon http_translator> ?cap> $cap [
<listen 8888>
; publish GET requests with prefix "/foo/bar" to other dataspace
handler #{GET} ["foo" "bar" ] $other>
]

View File

@ -1,3 +0,0 @@
version 1 .
InotifyMessage = <inotify @path string @event symbol @cookie int @name string> .

137
lock.json
View File

@ -1,137 +0,0 @@
{
"depends": [
{
"date": "2024-05-23T17:44:14+03:00",
"deepClone": false,
"fetchLFS": false,
"fetchSubmodules": true,
"hash": "sha256-qTRhHsOPNov1BQcm3P7NEkEPW6uh80XFfQRBdMp4o0Q=",
"leaveDotGit": false,
"method": "git",
"packages": [
"syndicate"
],
"path": "/nix/store/1lcxrap5n80hy1z4bcmsmdx83n4b9wjf-syndicate-nim",
"rev": "7ab4611824b676157523f2618e7893d5ac99e4f2",
"sha256": "0i53g3578h84gp2lbwx1mddhyh8jrpzdq9h70psqndlgqcg62d59",
"srcDir": "src",
"url": "https://git.syndicate-lang.org/ehmry/syndicate-nim.git"
},
{
"method": "fetchzip",
"packages": [
"bigints"
],
"path": "/nix/store/jvrm392g8adfsgf36prgwkbyd7vh5jsw-source",
"rev": "86ea14d31eea9275e1408ca34e6bfe9c99989a96",
"sha256": "15pcpmnk1bnw3k8769rjzcpg00nahyrypwbxs88jnwr4aczp99j4",
"srcDir": "src",
"url": "https://github.com/ehmry/nim-bigints/archive/86ea14d31eea9275e1408ca34e6bfe9c99989a96.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"cps"
],
"path": "/nix/store/8gbhwni0akqskdb3qhn5nfgv6gkdz0vz-source",
"rev": "c90530ac57f98a842b7be969115c6ef08bdcc564",
"sha256": "0h8ghs2fqg68j3jdcg7grnxssmllmgg99kym2w0a3vlwca1zvr62",
"srcDir": "",
"url": "https://github.com/ehmry/cps/archive/c90530ac57f98a842b7be969115c6ef08bdcc564.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"getdns"
],
"path": "/nix/store/x9xmn7w4k6jg8nv5bnx148ibhnsfh362-source",
"rev": "c73cbe288d9f9480586b8fa87f6d794ffb6a6ce6",
"sha256": "1sbgx2x51szr22i72n7c8jglnfmr8m7y7ga0v85d58fwadiv7g6b",
"srcDir": "src",
"url": "https://git.sr.ht/~ehmry/getdns-nim/archive/c73cbe288d9f9480586b8fa87f6d794ffb6a6ce6.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"nimcrypto"
],
"path": "/nix/store/fkrcpp8lzj2yi21na79xm63xk0ggnqsp-source",
"rev": "485f7b3cfa83c1beecc0e31be0e964d697aa74d7",
"sha256": "1h3dzdbc9kacwpi10mj73yjglvn7kbizj1x8qc9099ax091cj5xn",
"srcDir": "",
"url": "https://github.com/cheatfate/nimcrypto/archive/485f7b3cfa83c1beecc0e31be0e964d697aa74d7.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"npeg"
],
"path": "/nix/store/xpn694ibgipj8xak3j4bky6b3k0vp7hh-source",
"rev": "ec0cc6e64ea4c62d2aa382b176a4838474238f8d",
"sha256": "1fi9ls3xl20bmv1ikillxywl96i9al6zmmxrbffx448gbrxs86kg",
"srcDir": "src",
"url": "https://github.com/zevv/npeg/archive/ec0cc6e64ea4c62d2aa382b176a4838474238f8d.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"preserves"
],
"path": "/nix/store/9zl4s2did00725n8ygbp37agvkskdhcx-source",
"rev": "1fee87590940761e288cf9ab3c7270832403b719",
"sha256": "1ny42rwr3yx52zwvkdg4lh54nxaxrmxdj9dlw3qarvvp2grfq4j2",
"srcDir": "src",
"url": "https://git.syndicate-lang.org/ehmry/preserves-nim/archive/1fee87590940761e288cf9ab3c7270832403b719.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"stew"
],
"path": "/nix/store/mqg8qzsbcc8xqabq2yzvlhvcyqypk72c-source",
"rev": "3c91b8694e15137a81ec7db37c6c58194ec94a6a",
"sha256": "17lfhfxp5nxvld78xa83p258y80ks5jb4n53152cdr57xk86y07w",
"srcDir": "",
"url": "https://github.com/status-im/nim-stew/archive/3c91b8694e15137a81ec7db37c6c58194ec94a6a.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"sys"
],
"path": "/nix/store/syhxsjlsdqfap0hk4qp3s6kayk8cqknd-source",
"rev": "4ef3b624db86e331ba334e705c1aa235d55b05e1",
"sha256": "1q4qgw4an4mmmcbx48l6xk1jig1vc8p9cq9dbx39kpnb0890j32q",
"srcDir": "src",
"url": "https://github.com/ehmry/nim-sys/archive/4ef3b624db86e331ba334e705c1aa235d55b05e1.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"taps"
],
"path": "/nix/store/6y14ia52kr7jyaa0izx37mlablmq9s65-source",
"rev": "8c8572cd971d1283e6621006b310993c632da247",
"sha256": "1dp166bv9x773jmfqppg5i3v3rilgff013vb11yzwcid9l7s3iy8",
"srcDir": "src",
"url": "https://git.sr.ht/~ehmry/nim_taps/archive/8c8572cd971d1283e6621006b310993c632da247.tar.gz"
},
{
"date": "2024-05-22T06:09:38+02:00",
"deepClone": false,
"fetchLFS": false,
"fetchSubmodules": true,
"hash": "sha256-B3fMwgBpO2Ty8143k9V1cnHXa5K8i1+zN+eF/rBLMe0=",
"leaveDotGit": false,
"method": "git",
"packages": [
"solo5_dispatcher"
],
"path": "/nix/store/xqj48v4rqlffl1l94hi02szazj5gla8g-solo5_dispatcher",
"rev": "cc64ef99416b22b12e4a076d33de9e25a163e57d",
"sha256": "1v9i9fqgx1g76yrmz2xwj9mxfwbjfpar6dsyygr68fv9031cqxq7",
"srcDir": "pkg",
"url": "https://git.sr.ht/~ehmry/solo5_dispatcher"
}
]
}

View File

@ -1,5 +0,0 @@
version 1.
Mountpoint = <mount @source string @target string @type string @status Status> .
Status = Failure / @success #t .
Failure = <failure @msg string> .

View File

@ -1,56 +1,28 @@
let ?beepSpace = dataspace
<bind "syndicate" #x"" $beepSpace>
let ?socketPath = "/run/user/1000/mpv.sock"
let ?mpvSpace = dataspace
$mpvSpace [
; announce the dataspace when the translator is connected
? <connected $socketPath> [
$config <mpv $mpvSpace>
]
; translate <play-file > to an MPV command
?? <play-file ?file> [
$log ! <log "-" { line: <play-file $file> }>
! <send-json { "command": ["loadfile" $file "append-play"] }>
]
; log anything that comes back from MPV
; ?? <recv-json ?js> [ $log ! <log "-" { mpv: $js }> ]
; clear the playlist on idle so it doesn't grow indefinitely
?? <recv-json {"event": "idle"}> [
! <send-json { "command": ["playlist-clear"] }>
]
]
; need the translator and the translator needs the daemon
<require-service <daemon mpv-translator>>
<depends-on <daemon mpv-translator> <service-state <daemon mpv-server> ready>>
? <service-object <daemon mpv-translator> ?cap> [
$cap {
dataspace: $mpvSpace
socket: $socketPath
}
]
; assert and retract the daemon as the daemon is built (this is a testing artifact)
? <built json_socket_translator ?path ?sum> [
<daemon mpv-translator {
argv: [$path]
protocol: application/syndicate
env: {BUILD_SUM: $sum}
}>
]
; start mpv regardless
<daemon mpv-server {
argv: [
"/run/current-system/sw/bin/mpv"
"--really-quiet"
"--idle=yes"
"--no-audio-display"
<require-service <daemon mpv>>
<daemon mpv {
argv: ["/run/current-system/sw/bin/mpv"
"--input-ipc-server=/run/user/1000/mpv.sock"
"--idle=yes"
"--really-quiet"
]
protocol: none
}>
<require-service <daemon mpv-translator>>
<daemon mpv-translator {
argv: "/home/repo/syndicate/syndicate_utils/src/json_socket_translator"
protocol: text/syndicate
}>
? <service-object <daemon mpv-translator> ?mpvSpace> [
$mpvSpace <json-socket mpv $socketPath>
$beepSpace ?? <beep ?code> [
$mpvSpace ! <mpv 1 { "command": ["loadfile" "/tmp/beep.ogg"] }>
]
]

View File

@ -1,2 +0,0 @@
version 1.
RoundTripTime = <rtt @address string @minimum float @average float @maximum float>.

View File

@ -1,5 +0,0 @@
version 1.
Environment = { symbol: string ...:... } .
Select = <rofi-select @option string @environment Environment> .
Options = <rofi-options @options [string ...]> .

653
sbom.json
View File

@ -1,653 +0,0 @@
{
"bomFormat": "CycloneDX",
"specVersion": "1.6",
"metadata": {
"component": {
"type": "application",
"bom-ref": "pkg:nim/syndicate_utils",
"name": "syndicate_utils",
"description": "Utilites for Syndicated Actors and Synit",
"version": "20240627",
"authors": [
{
"name": "Emery Hemingway"
}
],
"licenses": [
{
"license": {
"id": "Unlicense"
}
}
],
"properties": [
{
"name": "nim:skipExt",
"value": "nim"
},
{
"name": "nim:bin:postgre-actor",
"value": "postgre_actor"
},
{
"name": "nim:bin:xslt-actor",
"value": "xslt_actor"
},
{
"name": "nim:bin:preserve-process-environment",
"value": "preserve_process_environment"
},
{
"name": "nim:bin:mintsturdyref",
"value": "mintsturdyref"
},
{
"name": "nim:bin:esc-printer-driver",
"value": "esc_printer_driver"
},
{
"name": "nim:bin:msg",
"value": "msg"
},
{
"name": "nim:bin:rofi-script-actor",
"value": "rofi_script_actor"
},
{
"name": "nim:bin:syndesizer",
"value": "syndesizer"
},
{
"name": "nim:bin:http-client",
"value": "http_client"
},
{
"name": "nim:bin:mount-actor",
"value": "mount_actor"
},
{
"name": "nim:bin:syndump",
"value": "syndump"
},
{
"name": "nim:bin:sqlite-actor",
"value": "sqlite_actor"
},
{
"name": "nim:srcDir",
"value": "src"
},
{
"name": "nim:backend",
"value": "c"
}
]
}
},
"components": [
{
"type": "library",
"bom-ref": "pkg:nim/syndicate",
"name": "syndicate",
"version": "trunk",
"externalReferences": [
{
"url": "https://git.syndicate-lang.org/ehmry/syndicate-nim/archive/c21fdb5003417c99b8bb599df03fd7914cba7466.tar.gz",
"type": "source-distribution"
},
{
"url": "https://git.syndicate-lang.org/ehmry/syndicate-nim.git",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/lw30rzfxk35nzkkp4d53s9nr6xalkg8s-source"
},
{
"name": "nix:fod:rev",
"value": "c21fdb5003417c99b8bb599df03fd7914cba7466"
},
{
"name": "nix:fod:sha256",
"value": "0f14w83hpjym23f12brrirqwlib9b7m52m0g63fzmrcl6ig9y915"
},
{
"name": "nix:fod:url",
"value": "https://git.syndicate-lang.org/ehmry/syndicate-nim/archive/c21fdb5003417c99b8bb599df03fd7914cba7466.tar.gz"
},
{
"name": "nix:fod:ref",
"value": "trunk"
},
{
"name": "nix:fod:srcDir",
"value": "src"
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/preserves",
"name": "preserves",
"version": "20240610",
"externalReferences": [
{
"url": "https://git.syndicate-lang.org/ehmry/preserves-nim/archive/560a6417a30a2dff63f24b62498e9fcac2de8354.tar.gz",
"type": "source-distribution"
},
{
"url": "https://git.syndicate-lang.org/ehmry/preserves-nim.git",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/0sszsmz84ppwqsgda8cmli4lfh2mjmin-source"
},
{
"name": "nix:fod:rev",
"value": "560a6417a30a2dff63f24b62498e9fcac2de8354"
},
{
"name": "nix:fod:sha256",
"value": "19r983fy7m54mlaj0adxdp8pxi1x8dp6phkcnr8rz5y5cwndfjx2"
},
{
"name": "nix:fod:url",
"value": "https://git.syndicate-lang.org/ehmry/preserves-nim/archive/560a6417a30a2dff63f24b62498e9fcac2de8354.tar.gz"
},
{
"name": "nix:fod:ref",
"value": "20240610"
},
{
"name": "nix:fod:srcDir",
"value": "src"
},
{
"name": "nix:fod:date",
"value": "2024-05-23T15:58:40+03:00"
},
{
"name": "nix:fod:hash",
"value": "sha256-JvdvLdPajDgIPbLblO0LbOm0wEp530fs8LYmgH885sk="
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/sys",
"name": "sys",
"version": "4ef3b624db86e331ba334e705c1aa235d55b05e1",
"externalReferences": [
{
"url": "https://github.com/ehmry/nim-sys/archive/4ef3b624db86e331ba334e705c1aa235d55b05e1.tar.gz",
"type": "source-distribution"
},
{
"url": "https://github.com/ehmry/nim-sys.git",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/syhxsjlsdqfap0hk4qp3s6kayk8cqknd-source"
},
{
"name": "nix:fod:rev",
"value": "4ef3b624db86e331ba334e705c1aa235d55b05e1"
},
{
"name": "nix:fod:sha256",
"value": "1q4qgw4an4mmmcbx48l6xk1jig1vc8p9cq9dbx39kpnb0890j32q"
},
{
"name": "nix:fod:url",
"value": "https://github.com/ehmry/nim-sys/archive/4ef3b624db86e331ba334e705c1aa235d55b05e1.tar.gz"
},
{
"name": "nix:fod:srcDir",
"value": "src"
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/taps",
"name": "taps",
"version": "20240405",
"externalReferences": [
{
"url": "https://git.sr.ht/~ehmry/nim_taps/archive/8c8572cd971d1283e6621006b310993c632da247.tar.gz",
"type": "source-distribution"
},
{
"url": "https://git.sr.ht/~ehmry/nim_taps",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/6y14ia52kr7jyaa0izx37mlablmq9s65-source"
},
{
"name": "nix:fod:rev",
"value": "8c8572cd971d1283e6621006b310993c632da247"
},
{
"name": "nix:fod:sha256",
"value": "1dp166bv9x773jmfqppg5i3v3rilgff013vb11yzwcid9l7s3iy8"
},
{
"name": "nix:fod:url",
"value": "https://git.sr.ht/~ehmry/nim_taps/archive/8c8572cd971d1283e6621006b310993c632da247.tar.gz"
},
{
"name": "nix:fod:ref",
"value": "20240405"
},
{
"name": "nix:fod:srcDir",
"value": "src"
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/nimcrypto",
"name": "nimcrypto",
"version": "traditional-api",
"externalReferences": [
{
"url": "https://github.com/cheatfate/nimcrypto/archive/602c5d20c69c76137201b5d41f788f72afb95aa8.tar.gz",
"type": "source-distribution"
},
{
"url": "https://github.com/cheatfate/nimcrypto",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/zyr8zwh7vaiycn1s4r8cxwc71f2k5l0h-source"
},
{
"name": "nix:fod:rev",
"value": "602c5d20c69c76137201b5d41f788f72afb95aa8"
},
{
"name": "nix:fod:sha256",
"value": "1dmdmgb6b9m5f8dyxk781nnd61dsk3hdxqks7idk9ncnpj9fng65"
},
{
"name": "nix:fod:url",
"value": "https://github.com/cheatfate/nimcrypto/archive/602c5d20c69c76137201b5d41f788f72afb95aa8.tar.gz"
},
{
"name": "nix:fod:ref",
"value": "traditional-api"
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/npeg",
"name": "npeg",
"version": "1.2.2",
"externalReferences": [
{
"url": "https://github.com/zevv/npeg/archive/ec0cc6e64ea4c62d2aa382b176a4838474238f8d.tar.gz",
"type": "source-distribution"
},
{
"url": "https://github.com/zevv/npeg.git",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/xpn694ibgipj8xak3j4bky6b3k0vp7hh-source"
},
{
"name": "nix:fod:rev",
"value": "ec0cc6e64ea4c62d2aa382b176a4838474238f8d"
},
{
"name": "nix:fod:sha256",
"value": "1fi9ls3xl20bmv1ikillxywl96i9al6zmmxrbffx448gbrxs86kg"
},
{
"name": "nix:fod:url",
"value": "https://github.com/zevv/npeg/archive/ec0cc6e64ea4c62d2aa382b176a4838474238f8d.tar.gz"
},
{
"name": "nix:fod:ref",
"value": "1.2.2"
},
{
"name": "nix:fod:srcDir",
"value": "src"
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/bigints",
"name": "bigints",
"version": "20231006",
"externalReferences": [
{
"url": "https://github.com/ehmry/nim-bigints/archive/86ea14d31eea9275e1408ca34e6bfe9c99989a96.tar.gz",
"type": "source-distribution"
},
{
"url": "https://github.com/ehmry/nim-bigints.git",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/jvrm392g8adfsgf36prgwkbyd7vh5jsw-source"
},
{
"name": "nix:fod:rev",
"value": "86ea14d31eea9275e1408ca34e6bfe9c99989a96"
},
{
"name": "nix:fod:sha256",
"value": "15pcpmnk1bnw3k8769rjzcpg00nahyrypwbxs88jnwr4aczp99j4"
},
{
"name": "nix:fod:url",
"value": "https://github.com/ehmry/nim-bigints/archive/86ea14d31eea9275e1408ca34e6bfe9c99989a96.tar.gz"
},
{
"name": "nix:fod:ref",
"value": "20231006"
},
{
"name": "nix:fod:srcDir",
"value": "src"
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/cps",
"name": "cps",
"version": "0.10.4",
"externalReferences": [
{
"url": "https://github.com/nim-works/cps/archive/2a4d771a715ba45cfba3a82fa625ae7ad6591c8b.tar.gz",
"type": "source-distribution"
},
{
"url": "https://github.com/nim-works/cps",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/m9vpcf3dq6z2h1xpi1vlw0ycxp91s5p7-source"
},
{
"name": "nix:fod:rev",
"value": "2a4d771a715ba45cfba3a82fa625ae7ad6591c8b"
},
{
"name": "nix:fod:sha256",
"value": "0c62k5wpq9z9mn8cd4rm8jjc4z0xmnak4piyj5dsfbyj6sbdw2bf"
},
{
"name": "nix:fod:url",
"value": "https://github.com/nim-works/cps/archive/2a4d771a715ba45cfba3a82fa625ae7ad6591c8b.tar.gz"
},
{
"name": "nix:fod:ref",
"value": "0.10.4"
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/stew",
"name": "stew",
"version": "3c91b8694e15137a81ec7db37c6c58194ec94a6a",
"externalReferences": [
{
"url": "https://github.com/status-im/nim-stew/archive/3c91b8694e15137a81ec7db37c6c58194ec94a6a.tar.gz",
"type": "source-distribution"
},
{
"url": "https://github.com/status-im/nim-stew",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/mqg8qzsbcc8xqabq2yzvlhvcyqypk72c-source"
},
{
"name": "nix:fod:rev",
"value": "3c91b8694e15137a81ec7db37c6c58194ec94a6a"
},
{
"name": "nix:fod:sha256",
"value": "17lfhfxp5nxvld78xa83p258y80ks5jb4n53152cdr57xk86y07w"
},
{
"name": "nix:fod:url",
"value": "https://github.com/status-im/nim-stew/archive/3c91b8694e15137a81ec7db37c6c58194ec94a6a.tar.gz"
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/getdns",
"name": "getdns",
"version": "trunk",
"externalReferences": [
{
"url": "https://git.sr.ht/~ehmry/getdns-nim/archive/e925d2f6d2bf31384969568e97917af8ef77b7a2.tar.gz",
"type": "source-distribution"
},
{
"url": "https://git.sr.ht/~ehmry/getdns-nim",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/hq145zgfjldsj7fh026ikfwkhs2cz9nv-source"
},
{
"name": "nix:fod:rev",
"value": "e925d2f6d2bf31384969568e97917af8ef77b7a2"
},
{
"name": "nix:fod:sha256",
"value": "0gflawpkwk8nghwvs69yb5mj3s6fzrmybys5466m2650xr26hs4p"
},
{
"name": "nix:fod:url",
"value": "https://git.sr.ht/~ehmry/getdns-nim/archive/e925d2f6d2bf31384969568e97917af8ef77b7a2.tar.gz"
},
{
"name": "nix:fod:ref",
"value": "trunk"
},
{
"name": "nix:fod:srcDir",
"value": "src"
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/solo5_dispatcher",
"name": "solo5_dispatcher",
"version": "20240522",
"externalReferences": [
{
"url": "https://git.sr.ht/~ehmry/solo5_dispatcher/archive/cc64ef99416b22b12e4a076d33de9e25a163e57d.tar.gz",
"type": "source-distribution"
},
{
"url": "https://git.sr.ht/~ehmry/solo5_dispatcher",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/4jj467pg4hs6warhksb8nsxn9ykz8c7c-source"
},
{
"name": "nix:fod:rev",
"value": "cc64ef99416b22b12e4a076d33de9e25a163e57d"
},
{
"name": "nix:fod:sha256",
"value": "1v9i9fqgx1g76yrmz2xwj9mxfwbjfpar6dsyygr68fv9031cqxq7"
},
{
"name": "nix:fod:url",
"value": "https://git.sr.ht/~ehmry/solo5_dispatcher/archive/cc64ef99416b22b12e4a076d33de9e25a163e57d.tar.gz"
},
{
"name": "nix:fod:ref",
"value": "20240522"
},
{
"name": "nix:fod:srcDir",
"value": "pkg"
}
]
}
],
"dependencies": [
{
"ref": "pkg:nim/syndicate_utils",
"dependsOn": [
"pkg:nim/syndicate"
]
},
{
"ref": "pkg:nim/syndicate",
"dependsOn": [
"pkg:nim/nimcrypto",
"pkg:nim/preserves",
"pkg:nim/sys",
"pkg:nim/taps"
]
},
{
"ref": "pkg:nim/preserves",
"dependsOn": [
"pkg:nim/npeg",
"pkg:nim/bigints"
]
},
{
"ref": "pkg:nim/sys",
"dependsOn": [
"pkg:nim/cps",
"pkg:nim/stew"
]
},
{
"ref": "pkg:nim/taps",
"dependsOn": [
"pkg:nim/getdns",
"pkg:nim/sys",
"pkg:nim/cps",
"pkg:nim/solo5_dispatcher"
]
},
{
"ref": "pkg:nim/nimcrypto",
"dependsOn": []
},
{
"ref": "pkg:nim/npeg",
"dependsOn": []
},
{
"ref": "pkg:nim/bigints",
"dependsOn": []
},
{
"ref": "pkg:nim/cps",
"dependsOn": []
},
{
"ref": "pkg:nim/stew",
"dependsOn": []
},
{
"ref": "pkg:nim/getdns",
"dependsOn": []
},
{
"ref": "pkg:nim/solo5_dispatcher",
"dependsOn": [
"pkg:nim/cps"
]
}
]
}

View File

@ -1,8 +0,0 @@
version 1 .
# When asserted the actor reponds to @target rows as records
# of the given label and row columns as record fields.
Query = <query @statement [any ...] @target #:any> .
# When a query fails this is asserted instead.
SqlError = <sql-error @msg string @context string>.

2
src/Tupfile Normal file
View File

@ -0,0 +1,2 @@
include_rules
: foreach *.nim | $(SYNDICATE_PROTOCOL) ./<schema> |> !nim_bin |>

View File

@ -1,111 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
## ESC/P printer control actor.
import
std/[cmdline, oserrors, posix, sequtils, sets],
pkg/sys/[files, ioqueue],
preserves, preserves/sugar,
syndicate, syndicate/relays,
syndicate/protocols/[gatekeeper, sturdy],
./private/esc_p
from pkg/sys/handles import FD
proc echo(args: varargs[string, `$`]) {.used.} =
stderr.writeLine(args)
type
HandleSet = HashSet[Handle]
Printer = ref object of Entity
device: AsyncFile
boldHandles, italicHandles, superscriptHandles, subscriptHandles: HandleSet
buffer: seq[byte]
isBusy: bool
proc flush(printer: Printer) {.asyncio.} =
printer.isBusy = true
while printer.buffer.len > 0:
let n = printer.device.write(printer.buffer)
if n > 0:
printer.buffer.delete(0..<n)
elif n < 0:
osLastError().osErrorMsg().quit()
printer.isBusy = false
proc write(printer: Printer; s: string) {.inline.} =
printer.buffer.add cast[seq[byte]](s)
if not printer.isBusy:
discard trampoline:
whelp printer.flush()
proc writeLine(printer: Printer; s: string) {.inline.} =
printer.write(s)
printer.write("\r\n")
method message(printer: Printer; t: Turn; a: AssertionRef) =
if a.value.isString:
printer.write(a.value.string)
# TODO: unicode?
# TODO: line breaks?
proc assert(printer: Printer; handles: var HandleSet; ctrl: string; h: Handle) =
if handles.len == 0: printer.write(ctrl)
handles.incl h
proc retract(printer: Printer; handles: var HandleSet; ctrl: string; h: Handle) =
handles.excl h
if handles.len == 0: printer.write(ctrl)
method publish(printer: Printer; t: Turn; a: AssertionRef; h: Handle) =
if a.value.isRecord("bold"):
printer.assert(printer.boldHandles, SelectBoldFont, h)
elif a.value.isRecord("italic"):
printer.assert(printer.italicHandles, SelectItalicFont, h)
elif a.value.isRecord("superscript"):
printer.assert(printer.superscriptHandles, SelectSuperScript, h)
elif a.value.isRecord("subscript"):
printer.assert(printer.subscriptHandles, SelectSubScript, h)
method retract(printer: Printer; t: Turn; h: Handle) =
if printer.boldHandles.contains h:
printer.retract(printer.boldHandles, CancelBoldFont, h)
elif printer.italicHandles.contains h:
printer.retract(printer.italicHandles, CanceItalicFont, h)
elif printer.superscriptHandles.contains h:
printer.retract(printer.superscriptHandles, CancelAltScript, h)
elif printer.subscriptHandles.contains h:
printer.retract(printer.subscriptHandles, CancelAltScript, h)
proc devicePath: string =
if paramCount() < 1:
quit "missing path to printer device file"
if paramCount() > 1:
quit "too many command line parameters"
paramStr(1)
proc openPrinter(turn: Turn): Printer =
new result
result.facet = turn.facet
let fd = posix.open(devicePath(), O_WRONLY or O_NONBLOCK, 0)
if fd < 0: osLastError().osErrorMsg().quit()
result.device = newAsyncFile(FD fd)
result.write(InitializePrinter)
runActor(devicePath()) do (turn: Turn):
let printer = openPrinter(turn)
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
let pat = Resolve?:{0: matchRecord("printer"), 1: grab()}
during(turn, relay, pat) do (cont: Cap):
# Publish for any <printer> step.
discard publish(turn, cont, ResolvedAccepted(
responderSession: turn.newCap(printer)))

View File

@ -1,119 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
# TODO: write a TAPS HTTP client. Figure out how to externalise TLS.
import
std/[httpclient, options, streams, strutils, tables, uri],
pkg/taps,
pkg/preserves,
pkg/syndicate, pkg/syndicate/protocols/[gatekeeper, http],
./schema/config
proc url(req: HttpRequest): Uri =
result.scheme = if req.port == 80: "http" else: "https"
result.hostname = req.host.present
result.port = $req.port
for i, p in req.path:
if 0 < i: result.path.add '/'
result.path.add p.encodeUrl
for key, vals in req.query:
if result.query.len > 0:
result.query.add '&'
result.query.add key.string.encodeUrl
for i, val in vals:
if i == 0: result.query.add '='
elif i < vals.high: result.query.add ','
result.query.add val.string.encodeUrl
proc toContent(body: Value; contentType: var string): string =
case contentType
of "application/json", "text/javascript":
var stream = newStringStream()
writeText(stream, body, textJson)
return stream.data.move
of "application/preserves":
return cast[string](body.encode)
of "text/preserves":
return $body
else:
discard
case body.kind
of pkString:
result = body.string
if contentType == "":
contentType = "text/plain"
of pkByteString:
result = cast[string](body.bytes)
if contentType == "":
contentType = "application/octet-stream"
else:
raise newException(ValueError, "unknown content type")
proc spawnHttpClient*(turn: Turn; relay: Cap): Actor {.discardable.} =
let pat = Resolve?:{ 0: HttpClientStep.grabWithin, 1: grab() }
result = spawnActor(turn, "http-client") do (turn: Turn):
during(turn, relay, pat) do (detail: HttpClientStepDetail, observer: Cap):
linkActor(turn, "session") do (turn: Turn):
let ds = turn.newDataspace()
discard publish(turn, observer, ResolvedAccepted(responderSession: ds))
during(turn, ds, HttpContext.grabType) do (ctx: HttpContext):
let peer = ctx.res.unembed(Cap).get
var client = newHttpClient()
try:
var
headers = newHttpHeaders()
contentType: string
for key, val in ctx.req.headers:
if key == Symbol"content-type" or key == Symbol"Content-Type":
contentType = val
client.headers[key.string] = val
let stdRes = client.request(
ctx.req.url,
ctx.req.method.string.toUpper,
ctx.req.body.toContent(contentType), headers
)
var resp = HttpResponse(orKind: HttpResponseKind.status)
resp.status.code = stdRes.status[0 .. 2].parseInt
resp.status.message = stdRes.status[4 .. ^1]
message(turn, peer, resp)
resp = HttpResponse(orKind: HttpResponseKind.header)
for key, vals in stdRes.headers.table:
for val in vals.items:
resp.header.name = key.Symbol
resp.header.value = val
message(turn, peer, resp)
if detail.`response-content-type-override` != "":
contentType = detail.`response-content-type-override`
else:
for val in stdRes.headers.table.getOrDefault("content-type").items:
contentType = val
case contentType
of "application/json", "text/preserves", "text/javascript":
message(turn, peer,
initRecord("done", stdRes.bodyStream.readAll.parsePreserves))
of "application/preserves":
message(turn, peer,
initRecord("done", stdRes.bodyStream.decodePreserves))
else:
resp = HttpResponse(orKind: HttpResponseKind.done)
resp.done.chunk.string = stdRes.bodyStream.readAll()
message(turn, peer, resp)
except CatchableError as err:
var resp = HttpResponse(orKind: HttpResponseKind.status)
resp.status.code = 400
resp.status.message = "Internal client error"
message(turn, peer, resp)
resp = HttpResponse(orKind: HttpResponseKind.done)
resp.done.chunk.string = err.msg
message(turn, peer, resp)
client.close()
do:
client.close()
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
spawnHttpClient(turn, relay)

View File

@ -1 +0,0 @@
define:ssl

92
src/http_translator.nim Normal file
View File

@ -0,0 +1,92 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import std/[asyncdispatch, asynchttpserver, strutils, tables, uri]
import preserves
import syndicate, syndicate/actors
import ./schema/http_protocol
func toHttpCore(methods: Methods): set[HttpMethod] =
# Convert the schema type to the type in the httpcore module.
for m in methods:
result.incl(
case m
of Method.GET: HttpGET
of Method.HEAD: HttpHEAD
of Method.POST: HttpPOST
of Method.PUT: HttpPUT
of Method.DELETE: HttpDELETE
of Method.CONNECT: HttpCONNECT
of Method.OPTIONS: HttpOPTIONS
of Method.TRACE: HttpTRACE
of Method.PATCH: HttpPATCH)
proc splitPath(u: Uri): Path =
u.path.strip(chars = {'/'}).split("/")
proc hitch(a, b: Future[void]) =
a.addCallback do (f: Future[void]):
if f.failed: fail(b, f.error)
else: complete(b)
bootDataspace("main") do (ds: Ref; turn: var Turn):
connectStdio(ds, turn)
var
handlers: Table[seq[string], (Ref, set[HttpMethod])]
pathPrefixMaxLen = 0
requestIdSource = 0
during(turn, ds, ?Handler) do (methods: Methods; path: seq[string]; entity: Ref):
handlers[path] = (entity, methods.toHttpCore)
pathPrefixMaxLen = max(pathPrefixMaxLen, path.len)
do:
handlers.del(path)
pathPrefixMaxLen = 0
for path in handlers.keys:
pathPrefixMaxLen = max(pathPrefixMaxLen, path.len)
var parentFacet = turn.facet
proc handleRequest(req: asynchttpserver.Request): Future[void] =
# TODO: use pattern matching
var
entity: Ref
methods: set[HttpMethod]
path = req.url.splitPath()
block:
var prefix = path[0..min(pathPrefixMaxLen.succ, path.high)]
while entity.isNil:
(entity, methods) = handlers.getOrDefault(prefix)
if prefix.len == 0: break
else: discard prefix.pop()
if entity.isNil:
result = req.respond(Http503, "no handler registered for this path")
else:
var parentFut = newFuture[void]("handleRequest")
result = parentFut
if req.reqMethod notin methods:
result = req.respond(Http405, "method not valid for this handler")
else:
run(parentFacet) do (turn: var Turn):
inc requestIdSource
let
rId = requestIdSource
let rHandle = publish(turn, entity, http_protocol.Request(
handle: rId,
`method`: Method.GET,
headers: req.headers.table[],
path: path,
body: req.body))
onPublish(turn, entity, Response ? { 0: ?rId, 1: ?int, 3: ?string }) do (code: HttpCode, body: string):
req.respond(code, body).addCallback do (fut: Future[void]):
run(parentFacet) do (turn: var Turn): retract(turn, rHandle)
hitch(fut, parentFut)
during(turn, ds, ?Listener) do (port: Port):
var http = newAsyncHttpServer()
asyncCheck serve(http, port, handleRequest)
do:
close(http)
runForever()

View File

@ -0,0 +1,33 @@
# SPDX-FileCopyrightText: ☭ 2022 Emery Hemingway
# SPDX-License-Identifier: Unlicense
import std/[asyncdispatch, asyncnet, json]
from std/nativesockets import AF_UNIX, SOCK_STREAM, Protocol
import preserves, preserves/jsonhooks, syndicate, syndicate/patterns
import ./schema/config
bootDataspace("main") do (ds: Ref; turn: var Turn):
connectStdio(ds, turn)
during(turn, ds, ?config.JsonSocket) do (label: Assertion, socketPath: string):
let socket = newAsyncSocket(
domain = AF_UNIX,
sockType = SOCK_STREAM,
protocol = cast[Protocol](0),
buffered = false)
waitFor connectUnix(socket, socketPath)
onMessage(turn, ds, recordPattern(label, ?toPreserve(1), grab())) do (data: Assertion):
var js: JsonNode
if fromPreserve(js, data):
asyncCheck socket.send($js & "\n")
let f = turn.facet
proc processIncoming(fut: Future[string]) {.gcsafe.} =
var data = fut.read.parseJson.toPreserve(Ref)
run(f) do (turn: var Turn):
message(turn, ds, initRecord(label, 0.toPreserve(Ref), data))
socket.recvLine.addCallback(processIncoming)
socket.recvLine.addCallback(processIncoming)
runForever()

40
src/json_translator.nim Normal file
View File

@ -0,0 +1,40 @@
# SPDX-FileCopyrightText: ☭ 2022 Emery Hemingway
# SPDX-License-Identifier: Unlicense
import std/[asyncdispatch, json, os, osproc, strutils]
import preserves # , preserves/jsonhooks,
import syndicate
from preserves/jsonhooks import toPreserveHook
proc runChild: string =
let params = commandLineParams()
if params.len < 1:
stderr.writeLine "not enough parameters"
quit 1
let
cmd = params[0]
args = params[1..params.high]
try: result = execProcess(command=cmd, args=args, options={poUsePath})
except:
stderr.writeLine "execProcess failed"
quit 1
if result == "":
stderr.writeLine "no ouput"
quit 1
proc translate(output: string): Assertion =
var js: JsonNode
try: js = parseJson output
except:
stderr.writeLine "parseJson failed"
quit 1
js.toPreserveHook(Ref)
bootDataspace("main") do (root: Ref; turn: var Turn):
connectStdio(root, turn)
discard publish(turn, root,
initRecord[Ref]("output", runChild().translate()))
runForever()

View File

@ -1,44 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
from os import commandLineParams
import preserves, syndicate/capabilities, syndicate/protocols/sturdy
const usage = """
mintsturdyref OID < SECRET_KEY
Mint Sturdyrefs using a sixteen-byte secret key read from stdin using OIDs
passed as command-line parameters.
Example:
mintsturdyref '"syndicate"' < /dev/null
See:
https://synit.org/book/operation/builtin/gatekeeper.html#sturdyrefs
https://synit.org/book/glossary.html?highlight=oid#oid
"""
proc main =
var oids: seq[Value]
for p in commandLineParams():
case p
of "-h", "--help", "?":
quit(usage)
else:
add(oids, parsePreserves p)
if oids.len == 0:
stderr.writeLine """using the "syndicate" OID"""
oids.add(toPreserves "syndicate")
var key: array[16, byte]
case readBytes(stdin, key, 0, 16)
of 16: discard
of 0: stderr.writeLine "using null key"
else: quit "expected sixteen bytes of key from stdin"
for oid in oids:
let sturdy = mint(key, oid)
doAssert validate(key, sturdy)
stdout.writeLine(sturdy)
main()

View File

@ -1,53 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
## An actor for Linux file-system mounting.
when not defined(linux):
{.error: "this component only tested for Linux".}
import std/oserrors
import preserves, preserves/sugar
import syndicate
import ./schema/mountpoints
type BootArgs {.preservesDictionary.} = object
dataspace: Cap
proc mount(source, target, fsType: cstring; flags: culong; data: pointer): cint {.importc, header: "<sys/mount.h>".}
## `mount(2)`
proc umount(target: cstring): cint {.importc, header: "<sys/mount.h>".}
## `umount(2)`
proc spawnMountActor*(turn: Turn; ds: Cap): Actor {.discardable.} =
spawnActor(turn, "mount_actor") do (turn: Turn):
let
targetPat = observePattern(!Mountpoint, { @[%1]: grabLit() })
sourcePat = observePattern(!Mountpoint, {
@[%0]: grabLit(),
@[%2]: grabLit(),
})
during(turn, ds, ?:BootArgs) do (ds: Cap):
during(turn, ds, targetPat) do (target: string):
during(turn, ds, sourcePat) do (source: string, fsType: string):
var mountpoint = Mountpoint(
source: source,
target: target,
`type`: fsType,
)
var rc = mount(source, target, fsType, 0, nil)
if rc == 0:
mountpoint.status = Status(orKind: StatusKind.success)
else:
mountpoint.status = Status(orKind: StatusKind.Failure)
mountpoint.status.failure.msg = osErrorMsg(osLastError())
discard publish(turn, ds, mountpoint)
do:
discard umount(target)
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
discard spawnMountActor(turn, ds)

View File

@ -1,20 +1,19 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-FileCopyrightText: ☭ 2022 Emery Hemingway
# SPDX-License-Identifier: Unlicense
import std/[sequtils, os, strutils]
import preserves, syndicate, syndicate/relays
import std/[asyncdispatch, os]
import preserves, syndicate, syndicate/capabilities
runActor("msg") do (turn: Turn):
let
data = map(commandLineParams(), parsePreserves)
cmd = paramStr(0).extractFilename.normalize
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
case cmd
of "assert":
for e in data:
publish(turn, ds, e)
else: # "msg"
for e in data:
message(turn, ds, e)
sync(turn, ds) do (turn: Turn):
stopActor(turn)
proc unixSocketPath: string =
result = getEnv("SYNDICATE_SOCK")
if result == "":
result = getEnv("XDG_RUNTIME_DIR", "/run/user/1000") / "dataspace"
bootDataspace("main") do (root: Ref; turn: var Turn):
let label = getAppFilename().extractFilename
connectUnix(turn, unixSocketPath(), capabilities.mint()) do (turn: var Turn; ds: Ref):
var a = initRecord[Ref](label, commandLineParams().toPreserve(Ref))
message(turn, ds, a)
for i in 0..7: poll(20)
# A hack to exit

View File

@ -1,163 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import
pkg/preserves,
pkg/syndicate, pkg/syndicate/protocols/[gatekeeper, sturdy],
./schema/[config, sql]
{.passL: "-lpq".}
{.pragma: libpq, header: "libpq-fe.h", importc.}
type
Oid = cuint
PGconn {.libpq.} = ptr object
PGresult {.libpq.} = ptr object
ConnStatusType {.libpq.} = enum
CONNECTION_OK, CONNECTION_BAD, ## Non-blocking mode only below here
##
## The existence of these should never be relied upon - they should only
## be used for user feedback or similar purposes.
##
CONNECTION_STARTED, ## Waiting for connection to be made.
CONNECTION_MADE, ## Connection OK; waiting to send.
CONNECTION_AWAITING_RESPONSE, ## Waiting for a response from the
## postmaster.
CONNECTION_AUTH_OK, ## Received authentication; waiting for
## backend startup.
CONNECTION_SETENV, ## This state is no longer used.
CONNECTION_SSL_STARTUP, ## Negotiating SSL.
CONNECTION_NEEDED, ## Internal state: connect() needed
CONNECTION_CHECK_WRITABLE, ## Checking if session is read-write.
CONNECTION_CONSUME, ## Consuming any extra messages.
CONNECTION_GSS_STARTUP, ## Negotiating GSSAPI.
CONNECTION_CHECK_TARGET, ## Checking target server properties.
CONNECTION_CHECK_STANDBY ## Checking if server is in standby mode.
ExecStatusType = enum
PGRES_EMPTY_QUERY = 0, ## empty query string was executed
PGRES_COMMAND_OK, ## a query command that doesn't return
## anything was executed properly by the
## backend
PGRES_TUPLES_OK, ## a query command that returns tuples was
## executed properly by the backend, PGresult
## contains the result tuples
PGRES_COPY_OUT, ## Copy Out data transfer in progress
PGRES_COPY_IN, ## Copy In data transfer in progress
PGRES_BAD_RESPONSE, ## an unexpected response was recv'd from the
## backend
PGRES_NONFATAL_ERROR, ## notice or warning message
PGRES_FATAL_ERROR, ## query failed
PGRES_COPY_BOTH, ## Copy In/Out data transfer in progress
PGRES_SINGLE_TUPLE, ## single tuple from larger resultset
PGRES_PIPELINE_SYNC, ## pipeline synchronization point
PGRES_PIPELINE_ABORTED ## Command didn't run because of an abort
## earlier in a pipeline
proc PQconnectdbParams(
keywords: cstringArray; values: cstringArray; expand_dbname: cint): PGconn {.libpq.}
proc PQerrorMessage(conn: PGconn): cstring {.libpq.}
proc PQfinish(conn: PGconn) {.libpq.}
proc PQstatus(conn: PGconn): ConnStatusType {.libpq.}
proc PQexec(conn: PGconn; query: cstring): PGresult {.libpq.}
proc PQresultStatus(res: PGresult): ExecStatusType {.libpq.}
proc PQresStatus (status: ExecStatusType): cstring {.libpq.}
proc PQresultErrorMessage(res: PGresult): cstring {.libpq.}
proc PQclear(res: PGresult) {.libpq.}
proc PQntuples(res: PGresult): cint {.libpq.}
proc PQnfields(res: PGresult): cint {.libpq.}
proc PQgetvalue(res: PGresult; tup_num: cint; field_num: cint): cstring {.libpq.}
proc PQftype(res: PGresult; field_num: cint): Oid {.libpq.}
proc PQfsize(res: PGresult; field_num: cint): cint {.libpq.}
# proc PQsocket(conn: PGconn): cint
# proc PQconnectStartParams(
# keywords: cstringArray; values: cstringArray; expand_dbname: cint): PGconn
# TODO: async
proc checkPointer(p: pointer) =
if p.isNil: raise newException(OutOfMemDefect, "Postgres returned nil")
type StringPairs = seq[tuple[key: string, val: string]]
proc splitParams(params: StringPairs): (cstringArray, cstringArray) =
var strings = newSeq[string](params.len)
for i, _ in params: strings[i] = params[i][0]
result[0] = allocCStringArray(strings)
for i, _ in params: strings[i] = params[i][1]
result[1] = allocCStringArray(strings)
proc renderSql(tokens: openarray[Value]): string =
for token in tokens:
if result.len > 0: result.add ' '
case token.kind
of pkSymbol:
result.add token.symbol.string
of pkString:
result.add '\''
result.add token.string
result.add '\''
of pkFloat, pkRegister, pkBigInt:
result.add $token
of pkBoolean:
if token.bool: result.add '1'
else: result.add '0'
else:
return ""
proc spawnPostgreActor*(turn: Turn; relay: Cap): Actor {.discardable.} =
result = spawnActor(turn, "postgre") do (turn: Turn):
let pat = Resolve?:{ 0: PostgreStep.grabTypeFlat, 1: grab() }
during(turn, relay, pat) do (params: StringPairs, observer: Cap):
linkActor(turn, "postgre-conn") do (turn: Turn):
var
(keys, vals) = splitParams(params)
conn = PQconnectdbParams(keys, vals, 0)
checkPointer(conn)
let
status = PQstatus(conn)
msg = $PQerrorMessage(conn)
deallocCStringArray(keys)
deallocCStringArray(vals)
onStop(turn) do (turn: Turn):
PQfinish(conn)
if status == CONNECTION_OK:
let ds = turn.newDataspace()
discard publish(turn, ds, initRecord("status", toSymbol($status), msg.toPreserves))
during(turn, ds, ?:Query) do (statement: seq[Value], target: Cap):
var text = renderSql statement
if text == "":
discard publish(turn, ds, SqlError(msg: "invalid statement", context: $statement))
else:
var
res = PQexec(conn, text)
st = PQresultStatus(res)
if st == PGRES_TUPLES_OK or st == PGRES_SINGLE_TUPLE:
let tuples = PQntuples(res)
let fields = PQnfields(res)
if tuples > 0 and fields > 0:
for r in 0..<tuples:
var tupl = initSequence(fields)
for f in 0..<fields:
tupl[f] = toPreserves($PQgetvalue(res, r, f))
discard publish(turn, target, tupl)
else:
discard publish(turn, ds, SqlError(
msg: $PQresStatus(st),
context: $PQresultErrorMessage(res),
))
PQclear(res)
discard publish(turn, observer,
ResolvedAccepted(responderSession: ds))
else:
discard publish(turn, observer,
Rejected(detail: msg.toPreserves))
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
spawnPostgreActor(turn, relay)

View File

@ -1,21 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import std/[tables, os]
import preserves
type ProcessInfo {.preservesDictionary.} = object
program: string
argv: seq[string]
env: Table[string, string]
dir: string
proc main =
var info: ProcessInfo
info.program = getAppFilename()
info.argv = commandLineParams()
for key, val in envPairs(): info.env[key] = val
info.dir = getCurrentDir()
writeLine(stdout, info.toPreserves)
main()

View File

@ -1,11 +0,0 @@
const
ESC* = "\x1b"
InitializePrinter* = ESC & "@"
CancelLine* = ESC & "\x18"
SelectBoldFont* = ESC & "E"
CancelBoldFont* = ESC & "F"
SelectItalicFont* = ESC & "4"
CanceItalicFont* = ESC & "5"
SelectSuperScript* = ESC & "S0"
SelectSubScript* = ESC & "S1"
CancelAltScript* = ESC & "T"

View File

@ -1,31 +0,0 @@
# SPDX-FileCopyrightText: ☭ 2022 Emery Hemingway
# SPDX-License-Identifier: Unlicense
## See the rofi-script(5) manpage for documentation.
import std/[cmdline, envvars, strutils, tables]
import preserves, syndicate, syndicate/relays
import ./schema/rofi
if getEnv("ROFI_OUTSIDE") == "":
quit("run this program in rofi")
runActor("rofi_script_actor") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
case paramCount()
of 0:
let pat = ?:Options
onPublish(turn, ds, pat) do (options: seq[string]):
stdout.writeLine options.join("\n")
quit()
of 1:
var select = Select(option: commandLineParams()[0])
for (key, val) in envPairs():
if key.startsWith "ROFI_":
select.environment[Symbol key] = val
message(turn, ds, select)
sync(turn, ds, stopActor)
else:
quit("rofi passed an unexpected number of arguments")

View File

@ -1,2 +1,2 @@
include_rules
: foreach ../../*.prs |> !preserves-schema-nim |> %B.nim | $(PROJECT_DIR)/<schema>
: foreach ../../*.prs |> !preserves_schema_nim |> %B.nim | ../<schema>

View File

@ -1,31 +0,0 @@
import
preserves
type
XsltItems* = seq[XsltItem]
Pulse* {.preservesRecord: "pulse".} = object
`periodSec`*: float
`proxy`* {.preservesEmbedded.}: Value
XsltItem* = string
XmlTranslation* {.preservesRecord: "xml-translation".} = object
`xml`*: string
`pr`*: Value
FileSystemUsage* {.preservesRecord: "file-system-usage".} = object
`path`*: string
`size`*: BiggestInt
XsltTransform* {.preservesRecord: "xslt-transform".} = object
`stylesheet`*: string
`input`*: string
`output`*: Value
proc `$`*(x: XsltItems | Pulse | XsltItem | XmlTranslation | FileSystemUsage |
XsltTransform): string =
`$`(toPreserves(x))
proc encode*(x: XsltItems | Pulse | XsltItem | XmlTranslation | FileSystemUsage |
XsltTransform): seq[byte] =
encode(toPreserves(x))

View File

@ -1,19 +0,0 @@
import
preserves
type
Base64File* {.preservesRecord: "base64-file".} = object
`txt`*: string
`path`*: string
`size`*: BiggestInt
Base64Text* {.preservesRecord: "base64".} = object
`txt`*: string
`bin`*: seq[byte]
proc `$`*(x: Base64File | Base64Text): string =
`$`(toPreserves(x))
proc encode*(x: Base64File | Base64Text): seq[byte] =
encode(toPreserves(x))

View File

@ -1,168 +1,14 @@
import
preserves
std/typetraits, preserves
type
PulseStep* {.preservesRecord: "pulse".} = object
`detail`*: PulseDetail
JsonTranslatorArgumentsField0* {.preservesDictionary.} = object
`argv`*: seq[string]
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
JsonTranslatorArguments* {.preservesRecord: "json-stdio-translator".} = object
`field0`*: JsonTranslatorArgumentsField0
SocketAddressKind* {.pure.} = enum
`TcpAddress`, `UnixAddress`
`SocketAddress`* {.preservesOr.} = object
case orKind*: SocketAddressKind
of SocketAddressKind.`TcpAddress`:
`tcpaddress`*: TcpAddress
of SocketAddressKind.`UnixAddress`:
`unixaddress`*: UnixAddress
PulseDetail* {.preservesDictionary.} = object
`dither`*: float
`interval`*: float
`period`*: float
`target`* {.preservesEmbedded.}: EmbeddedRef
Base64DecoderArgumentsField0* {.preservesDictionary.} = object
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
Base64DecoderArguments* {.preservesRecord: "base64-decoder".} = object
`field0`*: Base64DecoderArgumentsField0
SqliteStepField0* {.preservesDictionary.} = object
`database`*: string
SqliteStep* {.preservesRecord: "sqlite".} = object
`field0`*: SqliteStepField0
XsltArgumentsField0* {.preservesDictionary.} = object
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
XsltArguments* {.preservesRecord: "xslt".} = object
`field0`*: XsltArgumentsField0
HttpClientStepDetail* {.preservesDictionary.} = object
`response-content-type-override`*: string
FileSystemDetail* {.preservesDictionary.} = object
`root`*: string
JsonSocketTranslatorStepField0* {.preservesDictionary.} = object
`socket`*: SocketAddress
JsonSocketTranslatorStep* {.preservesRecord: "json-socket-translator".} = object
`field0`*: JsonSocketTranslatorStepField0
FileSystemUsageArgumentsField0* {.preservesDictionary.} = object
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
FileSystemUsageArguments* {.preservesRecord: "file-system-usage".} = object
`field0`*: FileSystemUsageArgumentsField0
HttpClientStep* {.preservesRecord: "http-client".} = object
`detail`*: HttpClientStepDetail
HttpDriverStepField0* {.preservesDictionary.} = object
HttpDriverStep* {.preservesRecord: "http-driver".} = object
`field0`*: HttpDriverStepField0
PostgreStepField0* {.preservesDictionary.} = object
`connection`*: seq[PostgreConnectionParameter]
PostgreStep* {.preservesRecord: "postgre".} = object
`field0`*: PostgreStepField0
TcpAddress* {.preservesRecord: "tcp".} = object
`host`*: string
`port`*: BiggestInt
CacheArgumentsField0* {.preservesDictionary.} = object
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
`lifetime`*: float
CacheArguments* {.preservesRecord: "cache".} = object
`field0`*: CacheArgumentsField0
XmlTranslatorArgumentsField0* {.preservesDictionary.} = object
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
XmlTranslatorArguments* {.preservesRecord: "xml-translator".} = object
`field0`*: XmlTranslatorArgumentsField0
PostgreConnectionParameter* {.preservesTuple.} = object
`key`*: string
`val`*: string
PulseArgumentsField0* {.preservesDictionary.} = object
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
PulseArguments* {.preservesRecord: "pulse".} = object
`field0`*: PulseArgumentsField0
Tcp* {.preservesRecord: "tcp".} = object
`host`*: string
`port`*: BiggestInt
FileSystemStep* {.preservesRecord: "file-system".} = object
`detail`*: FileSystemDetail
UnixAddress* {.preservesRecord: "unix".} = object
JsonSocket* {.preservesRecord: "json-socket".} = object
`label`*: Symbol
`path`*: string
PrinterStepField0* {.preservesDictionary.} = object
proc `$`*(x: JsonSocket): string =
`$`(toPreserve(x))
PrinterStep* {.preservesRecord: "printer".} = object
`field0`*: PrinterStepField0
proc `$`*(x: PulseStep | JsonTranslatorArguments | SocketAddress | PulseDetail |
Base64DecoderArguments |
SqliteStep |
XsltArguments |
HttpClientStepDetail |
FileSystemDetail |
JsonSocketTranslatorStep |
FileSystemUsageArguments |
HttpClientStep |
HttpDriverStep |
PostgreStep |
TcpAddress |
CacheArguments |
XmlTranslatorArguments |
PostgreConnectionParameter |
PulseArguments |
Tcp |
FileSystemStep |
UnixAddress |
PrinterStep): string =
`$`(toPreserves(x))
proc encode*(x: PulseStep | JsonTranslatorArguments | SocketAddress |
PulseDetail |
Base64DecoderArguments |
SqliteStep |
XsltArguments |
HttpClientStepDetail |
FileSystemDetail |
JsonSocketTranslatorStep |
FileSystemUsageArguments |
HttpClientStep |
HttpDriverStep |
PostgreStep |
TcpAddress |
CacheArguments |
XmlTranslatorArguments |
PostgreConnectionParameter |
PulseArguments |
Tcp |
FileSystemStep |
UnixAddress |
PrinterStep): seq[byte] =
encode(toPreserves(x))
proc encode*(x: JsonSocket): seq[byte] =
encode(toPreserve(x))

View File

@ -1,23 +0,0 @@
import
preserves
type
Read* {.preservesRecord: "read".} = object
`path`*: string
`offset`*: BiggestInt
`count`*: BiggestInt
`sink`* {.preservesEmbedded.}: EmbeddedRef
Write* {.preservesRecord: "Write".} = object
`path`*: string
`offset`*: BiggestInt
`count`*: BiggestInt
`data`*: seq[byte]
`written`* {.preservesEmbedded.}: EmbeddedRef
proc `$`*(x: Read | Write): string =
`$`(toPreserves(x))
proc encode*(x: Read | Write): seq[byte] =
encode(toPreserves(x))

View File

@ -0,0 +1,38 @@
import
std/typetraits, preserves, std/tables
type
Path* = seq[string]
Headers* = Table[string, seq[string]]
Response* {.preservesRecord: "http".} = object
`handle`*: BiggestInt
`code`*: BiggestInt
`headers`*: Headers
`body`*: string
Listener* {.preservesRecord: "listen".} = object
`port`*: BiggestInt
Handler* {.preservesRecord: "handler".} = object
`methods`*: Methods
`path`*: Path
`entity`* {.preservesEmbedded.}: Preserve[void]
`Method`* {.preservesOr, pure.} = enum
`GET`, `HEAD`, `POST`, `PUT`, `DELETE`, `CONNECT`, `OPTIONS`, `TRACE`,
`PATCH`
Request* {.preservesRecord: "http".} = object
`handle`*: BiggestInt
`method`*: Method
`headers`*: Headers
`path`*: Path
`body`*: string
Methods* = set[Method]
proc `$`*(x: Path | Headers | Response | Listener | Handler | Request | Methods): string =
`$`(toPreserve(x))
proc encode*(x: Path | Headers | Response | Listener | Handler | Request |
Methods): seq[byte] =
encode(toPreserve(x))

View File

@ -1,16 +0,0 @@
import
preserves
type
InotifyMessage* {.preservesRecord: "inotify".} = object
`path`*: string
`event`*: Symbol
`cookie`*: BiggestInt
`name`*: string
proc `$`*(x: InotifyMessage): string =
`$`(toPreserves(x))
proc encode*(x: InotifyMessage): seq[byte] =
encode(toPreserves(x))

View File

@ -1,14 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import std/json
import preserves, preserves/jsonhooks
export fromPreservesHook, toPreservesHook
# re-export the hooks so that conversion "just works"
type
SendJson* {.preservesRecord: "send".} = object
data*: JsonNode
RecvJson* {.preservesRecord: "recv".} = object
data*: JsonNode

View File

@ -1,30 +0,0 @@
import
preserves
type
Failure* {.preservesRecord: "failure".} = object
`msg`*: string
Mountpoint* {.preservesRecord: "mount".} = object
`source`*: string
`target`*: string
`type`*: string
`status`*: Status
StatusKind* {.pure.} = enum
`Failure`, `success`
`Status`* {.preservesOr.} = object
case orKind*: StatusKind
of StatusKind.`Failure`:
`failure`*: Failure
of StatusKind.`success`:
`success`* {.preservesLiteral: "#t".}: bool
proc `$`*(x: Failure | Mountpoint | Status): string =
`$`(toPreserves(x))
proc encode*(x: Failure | Mountpoint | Status): seq[byte] =
encode(toPreserves(x))

View File

@ -1,16 +0,0 @@
import
preserves
type
RoundTripTime* {.preservesRecord: "rtt".} = object
`address`*: string
`minimum`*: float
`average`*: float
`maximum`*: float
proc `$`*(x: RoundTripTime): string =
`$`(toPreserves(x))
proc encode*(x: RoundTripTime): seq[byte] =
encode(toPreserves(x))

View File

@ -1,18 +0,0 @@
import
preserves, std/tables
type
Environment* = Table[Symbol, string]
Select* {.preservesRecord: "rofi-select".} = object
`option`*: string
`environment`*: Environment
Options* {.preservesRecord: "rofi-options".} = object
`options`*: seq[string]
proc `$`*(x: Environment | Select | Options): string =
`$`(toPreserves(x))
proc encode*(x: Environment | Select | Options): seq[byte] =
encode(toPreserves(x))

View File

@ -1,18 +0,0 @@
import
preserves
type
Query* {.preservesRecord: "query".} = object
`statement`*: seq[Value]
`target`* {.preservesEmbedded.}: Value
SqlError* {.preservesRecord: "sql-error".} = object
`msg`*: string
`context`*: string
proc `$`*(x: Query | SqlError): string =
`$`(toPreserves(x))
proc encode*(x: Query | SqlError): seq[byte] =
encode(toPreserves(x))

View File

@ -1,156 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import
pkg/preserves,
pkg/syndicate, pkg/syndicate/protocols/[gatekeeper, sturdy],
./schema/[config, sql]
# Avoid Sqlite3 from the standard library because it is
# only held together by wishful thinking and dlload.
{.passC: staticExec"$PKG_CONFIG --cflags sqlite3".}
{.passL: staticExec"$PKG_CONFIG --libs sqlite3".}
{.pragma: sqlite3h, header: "sqlite3.h".}
var
SQLITE_VERSION_NUMBER {.importc, sqlite3h.}: cint
SQLITE_OK {.importc, sqlite3h.}: cint
SQLITE_ROW {.importc, sqlite3h.}: cint
SQLITE_DONE {.importc, sqlite3h.}: cint
SQLITE_OPEN_READONLY {.importc, sqlite3h.}: cint
const
SQLITE_INTEGER = 1
SQLITE_FLOAT = 2
SQLITE_TEXT = 3
SQLITE_BLOB = 4
# SQLITE_NULL = 5
type
Sqlite3 {.importc: "sqlite3", sqlite3h.} = distinct pointer
Stmt {.importc: "sqlite3_stmt", sqlite3h.} = distinct pointer
{.pragma: importSqlite3, importc: "sqlite3_$1", sqlite3h.}
proc libversion_number: cint {.importSqlite3.}
proc open_v2(filename: cstring; ppDb: ptr Sqlite3; flags: cint; zVfs: cstring): cint {.importSqlite3.}
proc close(ds: Sqlite3): int32 {.discardable, importSqlite3.}
proc errmsg(db: Sqlite3): cstring {.importSqlite3.}
proc prepare_v2(db: Sqlite3; zSql: cstring, nByte: cint; ppStmt: ptr Stmt; pzTail: ptr cstring): cint {.importSqlite3.}
proc step(para1: Stmt): cint {.importSqlite3.}
proc column_count(stmt: Stmt): int32 {.importSqlite3.}
proc column_blob(stmt: Stmt; col: cint): pointer {.importSqlite3.}
proc column_bytes(stmt: Stmt; col: cint): cint {.importSqlite3.}
proc column_double(stmt: Stmt; col: cint): float64 {.importSqlite3.}
proc column_int64(stmt: Stmt; col: cint): int64 {.importSqlite3.}
proc column_text(stmt: Stmt; col: cint): cstring {.importSqlite3.}
proc column_type(stmt: Stmt; col: cint): cint {.importSqlite3.}
proc finalize(stmt: Stmt): cint {.importSqlite3.}
doAssert libversion_number() == SQLITE_VERSION_NUMBER
proc assertError(facet: Facet; cap: Cap; db: Sqlite3; context: string) =
run(facet) do (turn: Turn):
publish(turn, cap, SqlError(
msg: $errmsg(db),
context: context,
))
proc assertError(facet: Facet; cap: Cap; msg, context: string) =
run(facet) do (turn: Turn):
publish(turn, cap, SqlError(
msg: msg,
context: context,
))
proc extractValue(stmt: Stmt; col: cint): Value =
case column_type(stmt, col)
of SQLITE_INTEGER:
result = toPreserves(column_int64(stmt, col))
of SQLITE_FLOAT:
result = toPreserves(column_double(stmt, col))
of SQLITE_TEXT:
result = Value(kind: pkString, string: newString(column_bytes(stmt, col)))
if result.string.len > 0:
copyMem(addr result.string[0], column_text(stmt, col), result.string.len)
of SQLITE_BLOB:
result = Value(kind: pkByteString, bytes: newSeq[byte](column_bytes(stmt, col)))
if result.bytes.len > 0:
copyMem(addr result.bytes[0], column_blob(stmt, col), result.bytes.len)
else:
result = initRecord("null")
proc extractTuple(stmt: Stmt; arity: cint): Value =
result = initSequence(arity)
for col in 0..<arity: result[col] = extractValue(stmt, col)
proc renderSql(tokens: openarray[Value]): string =
for token in tokens:
if result.len > 0: result.add ' '
case token.kind
of pkSymbol:
result.add token.symbol.string
of pkString:
result.add '\''
result.add token.string
result.add '\''
of pkFloat, pkRegister, pkBigInt:
result.add $token
of pkBoolean:
if token.bool: result.add '1'
else: result.add '0'
else:
return ""
proc spawnSqliteActor*(turn: Turn; relay: Cap): Actor {.discardable.} =
result = spawnActor(turn, "sqlite") do (turn: Turn):
let pat = Resolve?:{ 0: SqliteStep.grabTypeFlat, 1: grab() }
during(turn, relay, pat) do (path: string, observer: Cap):
linkActor(turn, path) do (turn: Turn):
let facet = turn.facet
stderr.writeLine("opening SQLite database ", path)
var db: Sqlite3
if open_v2(path, addr db, SQLITE_OPEN_READONLY, nil) != SQLITE_OK:
discard publish(turn, observer,
Rejected(detail: toPreserves($errmsg(db))))
else:
turn.onStop do (turn: Turn):
close(db)
stderr.writeLine("closed SQLite database ", path)
let ds = turn.newDataspace()
discard publish(turn, observer,
ResolvedAccepted(responderSession: ds))
during(turn, ds, ?:Query) do (statement: seq[Value], target: Cap):
var
stmt: Stmt
text = renderSql statement
if text == "":
assertError(facet, target, "invalid statement", $statement)
elif prepare_v2(db, text, text.len.cint, addr stmt, nil) != SQLITE_OK:
assertError(facet, target, db, text)
else:
try:
let arity = column_count(stmt)
var res = step(stmt)
while res == SQLITE_ROW:
var rec = extractTuple(stmt, arity)
discard publish(turn, target, rec)
res = step(stmt)
assert res != 100
if res != SQLITE_DONE:
assertError(facet, target, db, text)
finally:
if finalize(stmt) != SQLITE_OK: assertError(facet, target, db, text)
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
spawnSqliteActor(turn, relay)

View File

@ -1,30 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
## Syndicate multitool.
import syndicate, syndicate/relays, syndicate/drivers/timers
import ./syndesizer/[
base64_decoder,
cache_actor,
file_systems,
file_system_usage,
http_driver,
json_socket_translator,
json_translator,
pulses,
xml_translator]
runActor("syndesizer") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
discard spawnTimerDriver(turn, relay)
discard spawnBase64Decoder(turn, relay)
discard spawnCacheActor(turn, relay)
discard spawnFileSystemActor(turn, relay)
discard spawnFileSystemUsageActor(turn, relay)
discard spawnHttpDriver(turn, relay)
discard spawnJsonSocketTranslator(turn, relay)
discard spawnJsonStdioTranslator(turn, relay)
discard spawnPulseActor(turn, relay)
discard spawnXmlTranslator(turn, relay)

View File

@ -1,3 +0,0 @@
include_rules
: foreach *.nim |> !nim_bin |> {bin}
: foreach {bin} |> !assert_built |>

View File

@ -1,50 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import
std/[base64, os],
pkg/nimcrypto/blake2,
preserves, preserves/sugar, syndicate,
../schema/config,
../schema/base64 as schema
export Base64DecoderArguments
export schema
proc spawnBase64Decoder*(turn: Turn; root: Cap): Actor {.discardable.} =
spawnActor(turn, "base64-decoder") do (turn: Turn):
let tmpDir = getTempDir()
during(turn, root, ?:Base64DecoderArguments) do (ds: Cap):
let decTextPat = observePattern(!Base64Text, { @[%0]: grabLit() })
during(turn, ds, decTextPat) do (txt: string):
discard publish(turn, ds, Base64Text(
txt: txt,
bin: cast[seq[byte]](decode(txt)),
))
let encTextPat = observePattern(!Base64Text, { @[%1]: grabLit() })
during(turn, ds, encTextPat) do (bin: seq[byte]):
discard publish(turn, ds, Base64Text(
txt: encode(bin),
bin: bin,
))
let decFilePat = observePattern( !Base64File, { @[%0]: grabLit() })
during(turn, ds, decFilePat) do (txt: string):
var
bin = decode(txt)
digest = $blake2_256.digest(bin)
path = tmpDir / digest
writeFile(path, bin)
discard publish(turn, ds, Base64File(
txt: txt,
path: path,
size: bin.len,
))
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
spawnBase64Decoder(turn, ds)

View File

@ -1,58 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import std/times
import preserves, syndicate,
syndicate/durings,
syndicate/drivers/timers
import ../schema/config
proc afterTimeout(n: float64): LaterThan =
## Get a `LaterThan` record for `n` seconds in the future.
result.seconds = getTime().toUnixFloat() + n
type CacheEntity {.final.} = ref object of Entity
timeouts, target: Cap
# dataspaces for observing timeouts and publishing values
pattern: Pattern
lifetime: float64
method publish(cache: CacheEntity; turn: Turn; ass: AssertionRef; h: Handle) =
## Re-assert pattern captures in a sub-facet.
discard inFacet(turn) do (turn: Turn):
# TODO: a seperate facet for every assertion, too much?
var ass = depattern(cache.pattern, ass.value.sequence)
# Build an assertion with what he have of the pattern and capture.
discard publish(turn, cache.target, ass)
let timeout = afterTimeout(cache.lifetime)
onPublish(turn, cache.timeouts, ?timeout) do:
stop(turn) # end this facet
proc isObserve(pat: Pattern): bool =
pat.orKind == PatternKind.group and
pat.group.type.orKind == GroupTypeKind.rec and
pat.group.type.rec.label.isSymbol"Observe"
proc spawnCacheActor*(turn: Turn; root: Cap): Actor =
spawnActor(turn, "cache_actor") do (turn: Turn):
during(turn, root, ?:CacheArguments) do (ds: Cap, lifetime: float64):
onPublish(turn, ds, ?:Observe) do (pat: Pattern, obs: Cap):
var cache: CacheEntity
if obs.relay != turn.facet and not(pat.isObserve):
# Watch pattern if the observer is not us
# and if the pattern isn't a recursive observe
cache = CacheEntity(
timeouts: root,
target: ds,
pattern: pat,
lifetime: lifetime,
)
discard observe(turn, ds, pat, cache)
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
discard spawnTimerDriver(turn, ds)
discard spawnCacheActor(turn, ds)

View File

@ -1,28 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import std/[dirs, os, paths]
import preserves, preserves/sugar
import syndicate
import ../schema/[assertions, config]
proc spawnFileSystemUsageActor*(turn: Turn; root: Cap): Actor {.discardable.} =
spawn("file-system-usage", turn) do (turn: Turn):
during(turn, root, ?:FileSystemUsageArguments) do (ds: Cap):
let pat = observePattern(!FileSystemUsage, { @[%0]: grab() })
during(turn, ds, pat) do (lit: Literal[string]):
var ass = FileSystemUsage(path: lit.value)
if fileExists(ass.path): ass.size = getFileSize(ass.path)
else:
for fp in walkDirRec(paths.Path(lit.value), yieldFilter={pcFile}):
var fs = getFileSize(string fp)
inc(ass.size, fs)
discard publish(turn, ds, ass)
# TODO: updates?
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
discard spawnFileSystemUsageActor(turn, ds)

View File

@ -1,101 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import
std/[cmdline, oserrors, posix],
pkg/sys/[files, ioqueue],
pkg/preserves,
pkg/syndicate,
pkg/syndicate/protocols/gatekeeper,
pkg/syndicate/drivers/timers
from std/os import `/`
import ../schema/[config, file_system]
from pkg/sys/handles import FD
proc echo(args: varargs[string, `$`]) {.used.} =
stderr.writeLine(args)
proc stopForOsError(turn: Turn; cap: Cap) =
message(turn, cap, initRecord("error", osLastError().osErrorMsg().toPreserves))
turn.stopFacet()
proc stopAsOkay(turn: Turn; cap: Cap) =
message(turn, cap, initRecord"ok")
turn.stopFacet()
const iounit = 0x1000
type Buffer = ref seq[byte]
proc newBuffer(n: int): Buffer =
new result
if n < 0: result[].setLen iounit
else: result[].setLen min(n, iounit)
proc read(facet: Facet; file: AsyncFile; count: BiggestInt; buf: Buffer; dst: Cap)
proc readAsync(facet: Facet; file: AsyncFile; count: BiggestInt; buf: Buffer; dst: Cap) {.asyncio.} =
# TODO: optimise
assert count != 0
let n = file.read(buf)
proc deliver(turn: Turn) {.closure.} =
case n
of -1:
turn.stopForOsError(dst)
else:
if n < buf[].len:
buf[].setLen(n)
if n > 0:
message(turn, dst, buf[])
turn.stopAsOkay(dst)
else:
message(turn, dst, buf[])
var count = count
if count != -1:
count = count - n
read(facet, file, count, buf, dst)
facet.run(deliver)
proc read(facet: Facet; file: AsyncFile; count: BiggestInt; buf: Buffer; dst: Cap) =
discard trampoline:
whelp readAsync(facet, file, count, buf, dst)
proc read(facet: Facet; file: AsyncFile; count: BiggestInt; dst: Cap) =
## Call read with a reuseable buffer.
read(facet, file, count, newBuffer(count.int), dst)
proc serve(turn: Turn; detail: FileSystemDetail; ds: Cap) =
during(turn, ds, Read.grabType) do (op: Read):
let dst = op.sink.Cap
let fd = posix.open(detail.root / op.path, O_RDONLY or O_NONBLOCK, 0)
if fd < 0:
turn.stopForOsError(dst)
else:
if op.count == 0:
discard close(fd)
message(turn, dst, initRecord"ok")
turn.facet.stop()
elif posix.lseek(fd, op.offset, SEEK_SET) < 0:
discard close(fd)
turn.stopForOsError(dst)
else:
# fd is hopefully automagically closed.
turn.facet.read(fd.FD.newAsyncFile, op.count, dst)
proc spawnFileSystemActor*(turn: Turn; relay: Cap): Actor {.discardable.} =
spawnActor(turn, "file-system") do (turn: Turn):
let resolvePat = Resolve?:{ 0: FileSystemStep.grabWithin, 1: grab() }
during(turn, relay, resolvePat) do (detail: FileSystemDetail; observer: Cap):
let ds = turn.newDataspace()
serve(turn, detail, ds)
discard publish(turn, observer, ResolvedAccepted(responderSession: ds))
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
spawnFileSystemActor(turn, relay)

View File

@ -1,53 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
## Thin wrapper over `syndicate/drivers/http_driver`.
import
pkg/taps,
pkg/preserves,
pkg/syndicate,
pkg/syndicate/drivers/http_driver,
pkg/syndicate/protocols/[gatekeeper, sturdy],
../schema/config
proc spawnHttpDriver*(turn: Turn; relay: Cap): Actor {.discardable.} =
## Create a dataspace for the driver and to the gatekeeper dance.
spawnActor(turn, "http-driver") do (turn: Turn):
let pat = Resolve?:{ 0: HttpDriverStep.matchType }
during(turn, relay, pat):
let ds = turn.newDataspace()
http_driver.spawnHttpDriver(turn, ds)
# Spawn a shared driver.
let pat = Resolve?:{ 0: HttpDriverStep.matchType, 1: grab() }
during(turn, relay, pat) do (obs: Cap):
discard publish(turn, obs, ResolvedAccepted(responderSession: ds))
# Pass the shared driver dataspace.
when isMainModule:
import syndicate/relays
when defined(solo5):
import solo5
acquireDevices([("eth0", netBasic)], netAcquireHook)
proc envRoute: Route =
var pr = parsePreserves $solo5_start_info.cmdline
if result.fromPreserves pr:
return
elif pr.isSequence:
for e in pr:
if result.fromPreserves e:
return
quit("failed to parse command line for route to Syndicate gatekeeper")
runActor("main") do (turn: Turn):
let relay = newDataspace(turn)
spawnRelays(turn, relay)
resolve(turn, relay, envRoute(), spawnHttpDriver)
else:
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
spawnHttpDriver(turn, relay)

View File

@ -1,2 +0,0 @@
define:ipv6Enabled
include:"std/assertions"

View File

@ -1,82 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import
std/[json, options],
pkg/sys/[ioqueue, sockets],
pkg/preserves, pkg/preserves/jsonhooks,
pkg/syndicate, pkg/syndicate/protocols/[gatekeeper, sturdy],
../schema/[config, json_messages]
template translateSocketBody {.dirty.} =
# Template workaround for CPS and parameterized types.
var
guard = initGuard(facet)
dec = newBufferedDecoder(0)
buf = new string #TODO: get a pointer into the decoder
alive = true
proc kill(turn: Turn) =
alive = false
proc setup(turn: Turn) =
# Closure, not CPS.
onMessage(turn, ds, ?:SendJson) do (data: JsonNode):
if alive:
discard trampoline:
whelp write(socket[], $data & "\n")
else:
stderr.writeLine "dropped send of ", data
discard publish(turn, observer, ResolvedAccepted(responderSession: ds))
# Resolve the <json-socket-translator { }> step.
onStop(facet, kill)
run(facet, setup)
while alive:
# TODO: parse buffer
buf[].setLen(0x4000)
let n = read(socket[], buf)
if n < 1:
stderr.writeLine "socket read returned ", n
else:
buf[].setLen(n)
dec.feed(buf[])
var data = dec.parse()
if data.isSome:
proc send(turn: Turn) =
# Closure, not CPS.
message(turn, ds, initRecord("recv", data.get))
run(facet, send)
stderr.writeLine "close socket ", sa
close(socket[])
proc translateSocket(facet: Facet; sa: TcpAddress; ds, observer: Cap) {.asyncio.} =
var
socket = new AsyncConn[Protocol.Tcp]
conn = connectTcpAsync(sa.host, Port sa.port)
socket[] = conn
translateSocketBody()
proc translateSocket(facet: Facet; sa: UnixAddress; ds, observer: Cap) {.asyncio.} =
var
socket = new AsyncConn[Protocol.Unix]
conn = connectUnixAsync(sa.path)
socket[] = conn
translateSocketBody()
proc spawnJsonSocketTranslator*(turn: Turn; relay: Cap): Actor {.discardable.} =
let pat = Resolve?:{ 0: JsonSocketTranslatorStep.grabTypeFlat, 1: grab() }
spawnActor(turn, "json-socket-translator") do (turn: Turn):
during(turn, relay, pat) do (sa: TcpAddress, observer: Cap):
linkActor(turn, "json-socket-translator") do (turn: Turn):
let ds = turn.newDataspace()
discard trampoline:
whelp translateSocket(turn.facet, sa, ds, observer)
during(turn, relay, pat) do (sa: UnixAddress, observer: Cap):
linkActor(turn, "json-socket-translator") do (turn: Turn):
let ds = turn.newDataspace()
discard trampoline:
whelp translateSocket(turn.facet, sa, ds, observer)
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
spawnJsonSocketTranslator(turn, relay)

View File

@ -1,33 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import std/[json, osproc]
import preserves
import syndicate
import ../schema/[config, json_messages]
proc runChild(params: seq[string]): string =
if params.len < 1:
stderr.writeLine "not enough parameters"
let
cmd = params[0]
args = params[1..params.high]
try: result = execProcess(command=cmd, args=args, options={poUsePath})
except CatchableError as err:
quit("execProcess failed: " & err.msg)
if result == "":
stderr.writeLine "no ouput"
proc spawnJsonStdioTranslator*(turn: Turn; root: Cap): Actor {.discardable.} =
spawnActor(turn, "json-stdio-translator") do (turn: Turn):
during(turn, root, ?:JsonTranslatorArguments) do (argv: seq[string], ds: Cap):
var js = parseJson(runChild(argv))
message(turn, ds, RecvJson(data: js))
discard publish(turn, ds, RecvJson(data: js))
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
spawnJsonStdioTranslator(turn, ds)

View File

@ -1,143 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import
std/[options, random, tables, times],
pkg/preserves, preserves/sugar,
pkg/syndicate,
pkg/syndicate/protocols/gatekeeper,
pkg/syndicate/drivers/timers
import ../schema/[assertions, config]
type
ProxyEntity {.final.} = ref object of Entity
## An entity that asserts and retracts observers on a pulse.
self, target: Cap
hold: Table[Handle, Forward]
isActive: bool
Forward = tuple
ass: Value
hand: Handle
proc flipOn(proxy: ProxyEntity; turn: Turn) =
assert proxy.isActive == false
proxy.isActive = true
for fwd in proxy.hold.mvalues:
assert fwd.hand == 0.Handle
fwd.hand = publish(turn, proxy.target, fwd.ass)
proc flipOff(proxy: ProxyEntity; turn: Turn) =
if proxy.isActive:
proxy.isActive = false
for fwd in proxy.hold.mvalues:
assert fwd.hand != 0.Handle
retract(turn, fwd.hand)
fwd.hand = 0.Handle
method publish(proxy: ProxyEntity; turn: Turn; ass: AssertionRef; h: Handle) =
var fwd: Forward
fwd.ass = ass.value
if proxy.isActive:
fwd.hand = publish(turn, proxy.target, fwd.ass)
proxy.hold[h] = fwd
method retract(proxy: ProxyEntity; turn: Turn; h: Handle) =
var fwd: Forward
if proxy.hold.pop(h, fwd):
if fwd.hand > 0:
retract(turn, fwd.hand)
method message(proxy: ProxyEntity; turn: Turn; v: AssertionRef) =
## Messages passthru.
message(turn, proxy.target, v.value)
method sync(proxy: ProxyEntity; turn: Turn; peer: Cap) =
## Sync passthru.
sync(turn, proxy.target, peer)
type
PulseEntity {.final.} = ref object of Entity
self, driver: Cap
proxy: ProxyEntity
detail: PulseDetail
timerHandle: Handle
proc scheduleFlipOn(pulse: PulseEntity; turn: Turn) =
var period: float
while period <= 0.0:
period = gauss(mu = pulse.detail.interval, sigma = pulse.detail.dither)
replace(turn, pulse.driver, pulse.timerHandle, SetTimer(
label: true.toPreserves,
seconds: period,
kind: TimerKind.relative,
peer: pulse.self.embed,
))
proc scheduleFlipOff(pulse: PulseEntity; turn: Turn) =
replace(turn, pulse.driver, pulse.timerHandle, SetTimer(
label: false.toPreserves,
seconds: pulse.detail.period,
kind: TimerKind.relative,
peer: pulse.self.embed,
))
method message(pulse: PulseEntity; turn: Turn; v: AssertionRef) =
var exp: TimerExpired
if exp.fromPreserves(v.value):
if exp.label.isFalse:
pulse.scheduleFlipOn(turn)
pulse.proxy.flipOff(turn)
else:
pulse.scheduleFlipOff(turn)
pulse.proxy.flipOn(turn)
proc stop(pulse: PulseEntity, turn: Turn) =
if pulse.proxy.isActive:
pulse.proxy.flipOff(turn)
retract(turn, pulse.timerHandle)
# TODO: is this automatic?
proc newPulseEntity(turn: Turn; detail: PulseDetail; timerDriver: Cap): PulseEntity =
if not (detail.target of Cap):
raise newException(ValueError, "pulse target is not an embedded Cap")
result = PulseEntity(
facet: turn.facet,
driver: timerDriver,
detail: detail,
proxy: ProxyEntity(
facet: turn.facet,
target: detail.target.Cap,
)
)
result.proxy.self = newCap(turn, result.proxy)
result.self = newCap(turn, result)
proc spawnPulseActor*(turn: Turn; relay: Cap): Actor {.discardable.} =
spawnActor(turn, "pulse") do (turn: Turn):
let timerDriver = turn.newDataspace()
spawnTimerDriver(turn, timerDriver)
let resolvePat = Resolve?:{ 0: PulseStep.grabWithin, 1: grab() }
during(turn, relay, resolvePat) do (detail: PulseDetail; observer: Cap):
var pulse: PulseEntity
if detail.period < 0.000_0001 or
detail.interval < detail.period or
detail.interval < detail.dither:
var r = Resolved(orKind: ResolvedKind.Rejected)
r.rejected.detail = "invalid pulse parameters".toPreserves
discard publish(turn, observer, r)
else:
randomize()
pulse = turn.newPulseEntity(detail, timerDriver)
discard publish(turn, observer, ResolvedAccepted(
responderSession: pulse.proxy.self))
pulse.scheduleFlipOn(turn)
do:
if not pulse.isNil:
pulse.stop(turn)
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
spawnPulseActor(turn, relay)

View File

@ -1,34 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import std/[options, parsexml, xmlparser, xmltree]
import preserves, preserves/sugar, preserves/xmlhooks
import syndicate
import ../schema/[assertions, config]
proc translateXml(s: string): XmlTranslation =
result.xml = s
try: result.pr = result.xml.parseXml({allowUnquotedAttribs}).toPreservesHook
except XmlError: discard
proc translatePreserves(pr: Value): XmlTranslation {.gcsafe.} =
result.pr = pr
var xn = result.pr.preservesTo(XmlNode)
if xn.isSome: result.xml = $get(xn)
proc spawnXmlTranslator*(turn: Turn; root: Cap): Actor {.discardable.} =
spawnActor(turn, "xml-translator") do (turn: Turn):
during(turn, root, ?:XmlTranslatorArguments) do (ds: Cap):
let xmlPat = observePattern(!XmlTranslation, {@[%0]:grab()})
during(turn, ds, xmlPat) do (xs: Literal[string]):
publish(turn, ds, translateXml(xs.value))
let prPat = observePattern(!XmlTranslation, {@[%1]:grab()})
during(turn, ds, prPat) do (pr: Literal[Value]):
publish(turn, ds, translatePreserves(pr.value))
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
spawnXmlTranslator(turn, ds)

View File

@ -1,64 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import std/[os, tables]
import preserves, syndicate, syndicate/[durings, relays]
proc parsePattern(pr: Value): Pattern =
let
dropSigil = initRecord("lit", "_".toSymbol)
grabSigil = initRecord("lit", "?".toSymbol)
var pr = drop(pr).toPreserves
apply(pr) do (pr: var Value):
if pr == dropSigil:
pr = initRecord("_")
elif pr == grabSigil:
pr = initRecord("bind", initRecord("_"))
doAssert result.fromPreserves(pr)
proc inputPatterns: seq[Pattern] =
var args = commandLineParams()
result.setLen(args.len)
for i, input in args:
try: result[i] = input.parsePreserves.parsePattern
except ValueError:
quit "failed to parse Preserves argument"
type DumpEntity {.final.} = ref object of Entity
assertions: Table[Handle, seq[Value]]
proc toLine(values: seq[Value]; prefix: char): string =
result = newStringOfCap(1024)
let sep = getEnv("FS", " ")
result.add(prefix)
for v in values:
add(result, sep)
add(result, $v)
add(result, '\n')
method publish(dump: DumpEntity; turn: Turn; ass: AssertionRef; h: Handle) =
var values = ass.value.sequence
stdout.write(values.toLine('+'))
stdout.flushFile()
dump.assertions[h] = values
method retract(dump: DumpEntity; turn: Turn; h: Handle) =
var values: seq[Value]
if dump.assertions.pop(h, values):
stdout.write(values.toLine('-'))
stdout.flushFile()
method message*(dump: DumpEntity; turn: Turn; ass: AssertionRef) =
stdout.write(ass.value.sequence.toLine('!'))
stdout.flushFile()
proc main =
let
patterns = inputPatterns()
entity = DumpEntity()
runActor("syndex_card") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; peer: Cap):
for pat in patterns:
discard observe(turn, peer, pat, entity)
main()

View File

@ -1,211 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import std/[os, strutils]
import preserves, preserves/sugar, syndicate
import ./schema/[assertions, config]
{.passC: staticExec"$PKG_CONFIG --cflags libxslt".}
{.passL: staticExec"$PKG_CONFIG --libs libxslt".}
{.pragma: libxslt, header: "libxslt/xslt.h", importc.}
type
xmlElementType {.libxslt.} = enum
XML_ELEMENT_NODE = 1,
XML_ATTRIBUTE_NODE = 2,
XML_TEXT_NODE = 3,
XML_CDATA_SECTION_NODE = 4,
XML_ENTITY_REF_NODE = 5,
XML_ENTITY_NODE = 6,
XML_PI_NODE = 7,
XML_COMMENT_NODE = 8,
XML_DOCUMENT_NODE = 9,
XML_DOCUMENT_TYPE_NODE = 10,
XML_DOCUMENT_FRAG_NODE = 11,
XML_NOTATION_NODE = 12,
XML_HTML_DOCUMENT_NODE = 13,
XML_DTD_NODE = 14,
XML_ELEMENT_DECL = 15,
XML_ATTRIBUTE_DECL = 16,
XML_ENTITY_DECL = 17,
XML_NAMESPACE_DECL = 18,
XML_XINCLUDE_START = 19,
XML_XINCLUDE_END = 20
xmlNsPtr = ptr xmlNs
xmlNs {.libxslt.} = object
next: xmlNsPtr
href, prefix: cstring
xmlAttrPtr = ptr xmlAttr
xmlAttr {.libxslt.} = object
name: cstring
next: xmlAttrPtr
children: xmlNodePtr
xmlElementContentPtr = ptr xmlElementContent
xmlElementContent {.libxslt.} = object
encoding: cstring
xmlNodePtr = ptr xmlNode
xmlNode {.libxslt.} = object
`type`: xmlElementType
name: cstring
children, next: xmlNodePtr
content: cstring
properties: xmlAttrPtr
nsDef: xmlNsPtr
xmlDocPtr {.libxslt.} = distinct pointer
xsltStylesheetPtr {.libxslt.} = distinct pointer
proc isNil(x: xmlDocPtr): bool {.borrow.}
proc isNil(x: xsltStylesheetPtr): bool {.borrow.}
proc xmlReadMemory(buf: pointer; len: cint; url, enc: cstring; opts: cint): xmlDocPtr {.libxslt.}
proc xmlReadMemory(buf: string; uri = "noname.xml"): xmlDocPtr =
xmlReadMemory(buf[0].addr, buf.len.cint, uri, "UTF-8", 0)
proc xmlParseFile(filename: cstring): xmlDocPtr {.libxslt.}
proc xmlFreeDoc(p: xmlDocPtr) {.libxslt.}
proc xmlDocGetRootElement(doc: xmlDocPtr): xmlNodePtr {.libxslt.}
proc loadXmlDoc(text: string): xmlDocPtr =
if text.startsWith("/") and fileExists(text):
xmlParseFile(text)
else:
xmlReadMemory(text, "noname.xml")
proc xsltParseStylesheetFile(filename: cstring): xsltStylesheetPtr {.libxslt.}
proc xsltParseStylesheetDoc(doc: xmlDocPtr): xsltStylesheetPtr {.libxslt.}
proc xsltParseStylesheetDoc(text: string; uri = "noname.xml"): xsltStylesheetPtr =
var doc = xmlReadMemory(text, uri)
result = xsltParseStylesheetDoc(doc)
# implicit free of doc
proc loadStylesheet(text: string): xsltStylesheetPtr =
if text.startsWith("/") and fileExists(text):
xsltParseStylesheetFile(text)
else:
xsltParseStylesheetDoc(text, "noname.xsl")
proc xsltApplyStylesheet(
style: xsltStylesheetPtr, doc: xmlDocPtr, params: cstringArray): xmlDocPtr {.libxslt.}
proc xsltFreeStylesheet(style: xsltStylesheetPtr) {.libxslt.}
proc xsltSaveResultToString(txt: ptr pointer; len: ptr cint; res: xmlDocPtr; style: xsltStylesheetPtr): cint {.libxslt.}
proc c_free*(p: pointer) {.importc: "free", header: "<stdlib.h>".}
proc xsltSaveResultToString(res: xmlDocPtr; style: xsltStylesheetPtr): string =
var
txt: pointer
len: cint
if xsltSaveResultToString(addr txt, addr len, res, style) < 0:
raise newException(CatchableError, "xsltSaveResultToString failed")
if len > 0:
result = newString(int len)
copyMem(result[0].addr, txt, len)
c_free(txt)
proc initLibXml =
discard
proc XML_GET_CONTENT(xn: xmlNodePtr): xmlElementContentPtr {.libxslt.}
proc textContent(xn: xmlNodePtr): string =
if xn.content != nil: result = $xn.content
proc content(attr: xmlAttrPtr): string =
var child = attr.children
while not child.isNil:
result.add child.content
child = child.next
proc preserveSiblings(result: var seq[Value]; first: xmlNodePtr) =
var xn = first
while not xn.isNil:
case xn.type
of XML_ELEMENT_NODE:
var child = Value(kind: pkRecord)
if not xn.nsDef.isNil:
child.record.add initDictionary()
var ns = xn.nsDef
while not ns.isNil:
if not ns.href.isNil:
var key = Value(kind: pkString)
if ns.prefix.isNil:
key.string = "xmlns"
else:
key.string = "xmlns:" & $ns.prefix
child.record[0][key] = toPreserves($ns.href)
ns = ns.next
if not xn.properties.isNil:
if child.record.len < 1:
child.record.add initDictionary()
var attr = xn.properties
while not attr.isNil:
var
key = toPreserves($attr.name)
val = toPreserves(attr.content)
child.record[0][key] = val
attr = attr.next
if not xn.children.isNil:
preserveSiblings(child.record, xn.children)
child.record.add tosymbol($xn.name)
result.add child
of XML_TEXT_NODE:
result.add textContent(xn).toPreserves
else:
stderr.writeLine "not an XML_ELEMENT_NODE - ", $xn.type
xn = xn.next
proc toPreservesHook*(xn: xmlNodePtr): Value =
var items = newSeqofCap[Value](1)
preserveSiblings(items, xn)
items[0]
proc spawnXsltActor*(turn: Turn; root: Cap): Actor {.discardable.} =
spawnActor(turn, "xslt") do (turn: Turn):
initLibXml()
during(turn, root, ?:XsltArguments) do (ds: Cap):
let sheetsPat = observePattern(!XsltTransform, {@[%0]: grab(), @[%1]: grab()})
during(turn, ds, sheetsPat) do (stylesheet: Literal[string], input: Literal[string]):
let cur = loadStylesheet(stylesheet.value)
if cur.isNil:
stderr.writeLine "failed to parse stylesheet"
else:
let doc = loadXmlDoc(input.value)
if doc.isNil:
stderr.writeLine "failed to parse input document"
else:
let
params = allocCStringArray([])
res = xsltApplyStylesheet(cur, doc, params)
if res.isNil:
stderr.writeLine "failed to apply stylesheet transformation"
else:
let output = xsltSaveResultToString(res, cur)
deallocCStringArray(params)
publish(turn, ds, XsltTransform(
stylesheet: stylesheet.value,
input: input.value,
output: xmlDocGetRootElement(res).toPreservesHook,
))
xmlFreeDoc(res)
xmlFreeDoc(doc)
xsltFreeStylesheet(cur)
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
spawnXsltActor(turn, ds)

View File

@ -1,61 +1,13 @@
# Emulate Nimble from CycloneDX data at sbom.json.
# Package
import std/json
version = "20230410"
author = "Emery Hemingway"
description = "Utilites for Syndicated Actors and Synit"
license = "unlicense"
srcDir = "src"
bin = @["http_translator", "json_socket_translator", "json_translator", "msg"]
proc lookupComponent(sbom: JsonNode; bomRef: string): JsonNode =
for c in sbom{"components"}.getElems.items:
if c{"bom-ref"}.getStr == bomRef:
return c
result = newJNull()
let
sbom = (getPkgDir() & "/sbom.json").readFile.parseJson
comp = sbom{"metadata", "component"}
bomRef = comp{"bom-ref"}.getStr
# Dependencies
version = comp{"version"}.getStr
author = comp{"authors"}[0]{"name"}.getStr
description = comp{"description"}.getStr
license = comp{"licenses"}[0]{"license", "id"}.getStr
for prop in comp{"properties"}.getElems.items:
let (key, val) = (prop{"name"}.getStr, prop{"value"}.getStr)
case key
of "nim:skipDirs:":
add(skipDirs, val)
of "nim:skipFiles:":
add(skipFiles, val)
of "nim:skipExt":
add(skipExt, val)
of "nim:installDirs":
add(installDirs, val)
of "nim:installFiles":
add(installFiles, val)
of "nim:installExt":
add(installExt, val)
of "nim:binDir":
add(binDir, val)
of "nim:srcDir":
add(srcDir, val)
of "nim:backend":
add(backend, val)
else:
if key.startsWith "nim:bin:":
namedBin[key[8..key.high]] = val
for depend in sbom{"dependencies"}.items:
if depend{"ref"}.getStr == bomRef:
for depRef in depend{"dependsOn"}.items:
let dep = sbom.lookupComponent(depRef.getStr)
var spec = dep{"name"}.getStr
for extRef in dep{"externalReferences"}.elems:
if extRef{"type"}.getStr == "vcs":
spec = extRef{"url"}.getStr
break
let ver = dep{"version"}.getStr
if ver != "":
if ver.allCharsInSet {'0'..'9', '.'}: spec.add " == "
else: spec.add '#'
spec.add ver
requires spec
break
requires "nim >= 1.6.6", "syndicate >= 0.3.1"