Compare commits

..

5 Commits

51 changed files with 1618 additions and 2573 deletions

417
README.md
View File

@ -4,6 +4,8 @@
A Syndicate multitool that includes a number of different actors that become active via configuration.
Think of it as a Busybox for Syndicate, if Busybox was created before POSIX.
Whether you use a single instance for many protocols or many specialized instances is up to you.
### Cache
@ -26,7 +28,6 @@ Example configuration:
$cap <cache { dataspace: $nixspace lifetime: 3600.0 }> ]
]
```
### File System Usage
Summarize the size of file-system directory. Equivalent to `du -s -b`.
@ -45,50 +46,11 @@ Query the size of a directory in bytes by observing `<file-system-usage "/SOME/P
]
```
### HTTP driver
Experimental HTTP server that services requests using [some version](https://git.syndicate-lang.org/syndicate-lang/syndicate-protocols/src/commit/9864ce0ec86fb2f916c2aab318a1e6994ab8834c/schemas/http.prs) of the http Syndicate protocol schema.
```
# Configuration example
let ?not-found = dataspace
$not-found ? <request _ ?res> [
$res ! <status 503 "Service unavailable">
$res ! <done "No binding here.">
]
let ?greeting = dataspace
$greeting ? <request _ ?res> [
$res ! <status 200 "ok">
$res ! <chunk "Hello world">
$res ! <done "!">
]
let ?http = dataspace
$http [
<http-bind #f 80 get [ ] $not-found>
<http-bind #f 80 get [|...|] $not-found>
<http-bind #f 80 get ["hello"] $greeting>
]
? <service-object <daemon http-driver> ?cap> [
$cap <http-driver { dataspace: $http }>
]
<daemon http-driver {
argv: [ "/bin/syndesizer" ]
clearEnv: #t
protocol: application/syndicate
}>
<require-service <daemon http-driver>>
```
### JSON Socket Translator
Communicate with sockets that send and receive lines of JSON using `<send …>` and `<recv …>` messages.
Responds to the gatekeeper step `<json-socket-translator { socket: <unix "…"> / <tcp "…" … }> $resolver>`.
Do not send messages into the dataspace configure with `<json-socket-translator …>` until `<connected @socketPath string>` is asserted.
```
# MPV configuration example
@ -106,11 +68,25 @@ Responds to the gatekeeper step `<json-socket-translator { socket: <unix "…">
protocol: none
}>
let ?resolver = dataspace
$resolver ? <accepted ?mpvSpace> $mpvSpace [
let ?mpvSpace = dataspace
? <service-state <daemon mpv-server> ready> [
<require-service <daemon syndesizer>>
? <service-object <daemon syndesizer> ?cap> [
$cap <json-socket-translator {
dataspace: $mpvSpace
socket: "/run/user/1000/mpv.sock"
}>
]
]
$mpvSpace [
# announce the dataspace when the translator is connected
$config <mpv $mpvSpace>
$config <bind <ref { oid: "mpv" key: #x"" }> $mpvSpace #f>
? <connected $socketPath> [
$config <mpv $mpvSpace>
$config <bind <ref { oid: "mpv" key: #x"" }> $mpvSpace #f>
]
# translate <play-file > to an MPV command
?? <play-file ?file> [
@ -122,15 +98,6 @@ $resolver ? <accepted ?mpvSpace> $mpvSpace [
! <send { "command": ["playlist-clear"] }>
]
]
? <service-state <daemon mpv-server> ready> [
<require-service <daemon syndesizer>>
? <service-object <daemon syndesizer> ?cap> [
$cap <resolve <json-socket-translator {
socket: <unix "/run/user/1000/mpv.sock">
}> $resolver>
]
]
```
### JSON Stdio Translator
@ -156,43 +123,57 @@ let ?ds = dataspace
]
```
### Pulse proxy
### PostgreSQL
An actor that produces proxies that accept assertions put only forwards them during a pulse window.
This can be used to implement polling behavior or periodic service scheduling.
Readonly access to PostgreSQL databases. Asserts rows as records in response to SQL query assertions. Dynamic updates are not implemented.
Can be disabled by passing `--define:withPostgre=no` to the Nim compiler.
```
#!/usr/bin/env -S syndicate-server --control --config
let ?destination = dataspace
$destination ? ?x [
$log ! <log "destination" { +: $x }>
?- $log ! <log "destination" { -: $x }>
]
? <pulsator ?pulsator> [
$log ! <log "pulsator" { line: $pulsator }>
$pulsator <greeting "hello world">
]
# Configuration example
<require-service <daemon syndesizer>>
let ?resolver = <* $config [<rewrite <accepted ?cap> <pulsator $cap>>]>
let ?sqlspace = dataspace
? <service-object <daemon syndesizer> ?cap> [
$log ! <log "service-object" { line: $cap }>
$cap <resolve <pulse {
target: $destination
interval: 4.0 # Interval between pulses.
period: 1.0 # Duration of pulse window.
dither: 2.0 # Gaussian deviation applied to each interval.
}> $resolver>
$cap <postgre {
dataspace: $sqlspace
connection: [
["host" "example.com"]
["dbname" "foobar"]
["user" "hackme"]
]
}>
]
<daemon syndesizer {
argv: [ "/bin/syndesizer" ]
clearEnv: #t
protocol: application/syndicate
}>
let ?tuplespace = dataspace
$sqlspace <query "SELECT id, name FROM stuff" $tuplespace>
$tuplespace ? [?id ?name] [
$log ! <log "-" { row: <example-row $id $name> }>
]
```
### Pulse proxy
A proxy actor that passes assertions and messages to a configured capability but only asserts observations on a a periodic pulse.
This can be used to implement polling behavior.
```
# Example config
let ?ds = dataspace
<require-service <daemon syndesizer>>
? <service-object <daemon syndesizer> ?cap> [
$cap <pulse {dataspace: $ds}>
]
$ds ? <pulse 3600.0 ?proxy> [
$proxy ? <assertion-updated-hourly ?value> [
$log ! <log "-" {assertion-updated-hourly: $value}>
]
]
```
### SQLite
@ -218,16 +199,56 @@ let ?tuplespace = dataspace
$sqlspace <query "SELECT id, name FROM stuff" $tuplespace>
$tuplespace [
? [?id ?name] [
$log ! <log "-" { row: <example-row $id $name> }>
]
? <sqlite-error ?msg ?ctx> [
$log ! <log "-" { msg: $msg ctx: $ctx }>
]
$tuplespace ? [?id ?name] [
$log ! <log "-" { row: <example-row $id $name> }>
]
```
### Webooks
Listens for webhook requests and sends request data to a dataspace as messages.
Request data is formated according to the http schema [defined in syndicate-protocols](https://git.syndicate-lang.org/syndicate-lang/syndicate-protocols/src/branch/main/schemas/http.prs), with the exception that messages bodies may be **bytes**, **string**, or **any** for the `content-type`s of `application/octet-stream`, `text/*`, and `application/json` respectively.
```
# Configuration example
<require-service <daemon syndesizer>>
? <service-object <daemon syndesizer> ?cap> [
$cap <webhooks {
listen: <tcp "0.0.0.0" 1048>
endpoints: {
# http://0.0.0.0:1048/my-endpoint
["my-endpoint"]: $target-dataspace
# http://0.0.0.0:1048/some/multi-element/path
["some", "multi-element", "path"]: $target-dataspace
}
}>
]
```
### Websockets
connects to a websocket endpoint. During the lifetime of the connection a `<connected $URL>` assertion is made. Messages received from the server are sent to the dataspace wrapped in `<recv …>` records and messages observed as `<send …>` are sent to the server.
```
# Configuration example
<require-service <daemon syndesizer>>
let ?websocketspace = dataspace
? <service-object <daemon syndesizer> ?cap> [
$cap <websocket {
dataspace: $websocketspace
url: "ws://127.0.0.1:5225/"
}>
]
$websocketspace ? <connected $websocketUrl> [
<bind <ref { oid: "websocket" key: #x"" }> $websocketspace #f>
]
```
### XML translator
Translates between Preserves and XML according to the [Conventions for Common Data Types](https://preserves.dev/conventions.html).
@ -249,92 +270,30 @@ Examples:
]
```
---
### XSLT processor
## esc-printer-driver
Perform XML stylesheet transformations. For a given textual XSLT stylesheet and a textual XML document generate an abstract XML document in Preserves form. Inputs may be XML text or paths to XML files.
A basic [ESC/P](https://en.wikipedia.org/wiki/ESC/P) printer driver.
Takes a path to a printer device file as a command line argument.
The driver speaks the gatekeeper protocol and responds to the `<print {}>` step with a capability that prints strings it receives as messages.
While the `<bold>` or `<italic>` is asserted to this entity the printer will go into the corresponding font mode (if the printer supports it).
Sample Syndicate server script:
```
<require-service <daemon printer>>
let ?printer-resolver = dataspace
$printer-resolver ? <accepted ?printer> [
$printer <italic>
$printer ! "printer resolved\r\n"
]
? <service-object <daemon printer> ?cap> [
$cap <resolve <printer {}> $printer-resolver>
$log ! <log "-" { line: "printer started"}>
]
<daemon printer {
argv: [ "/bin/esc-printer-driver" "/dev/usb/lp0"]
protocol: application/syndicate
clearEnv: #t
}>
```
## http-client
The inverse of `http-driver`.
### Caveats
- HTTPS is assumed unless the request is to port 80.
- If the request or response sets `Content-Type` to `application/json` or `…/preserves`
the body will be a parsed Preserves value.
- No caching or proxying.
- Internal errors propagate using a `400 Internal client error` response.
Sample Syndicate server script:
```
#!/usr/bin/env -S syndicate-server --control --config
# A dataspace for handling the HTTP response.
let ?response-handler = dataspace
$response-handler [
?? <done { "code": "EUR" "exchange_middle": ?middle } > [
$log ! <log "-" { line: <exchange EUR RSD $middle> }>
$control <exit 0>
# Configuration example
let ?ds = dataspace
$ds [
? <xslt-transform "/stylesheet.xls" "/doc.xml" ?output> [
? <xml-translation ?text $output> [
$log ! <log "-" { xslt-output: $text }>
]
]
]
# A dataspace for collecting a dataspace from the http-client.
let ?client-resolver = dataspace
$client-resolver ? <accepted ?client> $client [
<request
# Request Dinar to Euro exchange rate.
<http-request 0 "kurs.resenje.org" 443
get ["api" "v1" "currencies" "eur" "rates" "today"]
{content-type: "application/json"} {} #f
>
$response-handler
>
<require-service <daemon syndesizer>>
? <service-object <daemon syndesizer> ?cap> $cap [
<xml-translator { dataspace: $ds }>
<xslt { dataspace: $ds }>
]
# Pass the resolver dataspace to the client.
? <service-object <daemon http-client> ?cap> [
$cap <resolve <http-client { response-content-type-override: "" }> $client-resolver>
]
<require-service <daemon http-client>>
<daemon http-client {
argv: [ "/bin/http-client" ]
clearEnv: #t
env: {
BUILD_SUM: $sum
}
protocol: application/syndicate
}>
```
---
## mintsturdyref
A utility for minting [Sturdyrefs](https://synit.org/book/operation/builtin/gatekeeper.html#sturdyrefs).
@ -384,118 +343,34 @@ Sample Syndicate server script:
## msg
A utility that parses its command-line arguments as Preserves and send them as messages to `$SYNDICATE_ROUTE`.
When called as `assert` (by a symlink or a rename) it will make assertions instead.
A utility that sends messages to `$SYNDICATE_ROUTE`.
## PostgreSQL
Readonly access to PostgreSQL databases.
Asserts rows as records in response to SQL query assertions.
Dynamic updates are not implemented.
## net_mapper
Publishes ICMP packet round-trip-times. See [net_mapper.prs](./net_mapper.prs) for a protocol description. [Source](./src/net_mapper.nim).
Example script:
```
let ?postgreStep = <postgre {connection: [["host" "db.example.com"] ["dbname" "example"] ["user" "hackme"]]}>
let ?tuplespace = dataspace
$tuplespace ? ?row [
$log ! <log "-" { line: $row }>
]
let ?resolver = dataspace
$resolver ? <accepted ?sqlspace> [
$sqlspace ? <sql-error ?msg ?context> [
$log ! <log "-" { line: $msg context: $context }>
? <machine-dataspace ?machine> [
$machine ? <rtt "10.0.33.136" ?min ?avg ?max> [
$log ! <log "-" { ping: { min: $min avg: $avg max: $max } }>
]
$config [
<require-service <daemon net_mapper>>
<daemon net_mapper {
argv: ["/bin/net_mapper"]
protocol: application/syndicate
}>
? <service-object <daemon net_mapper> ?cap> [
$cap { dataspace: $machine }
]
]
$sqlspace <query [SELECT firstname FROM users] $tuplespace>
]
<require-service <daemon postgre-actor>>
$config ? <service-object <daemon postgre-actor> ?cap> [
$cap <resolve $postgreStep $resolver>
]
<daemon postgre-actor {
argv: [ "/bin/postgre-actor" ]
clearEnv: #t
protocol: application/syndicate
}>
```
## preserve_process_environment
This utility serializes it's process environment to Preserves and prints it to stdout.
It can be used to feed the environment variables of a nested child of the Syndicate server back to the server. For example, to retreive the environmental variables that a desktop manager passed on to its children.
## SQLite
Readonly access to SQLite databases.
Asserts rows as records in response to SQL query assertions.
Dynamic updates are not implemented.
```
# Configuration example
let ?sqliteStep = <sqlite { database: "/var/db/stuff.db" }>
let ?tuplespace = dataspace
$tuplespace ? ?row [
$log ! <log "-" { line: $row }>
]
let ?resolver = dataspace
$resolver [
? <rejected ?detail> [
$log ! <log "-" { line: $detail }>
]
? <accepted ?sqlspace> [
$log ! <log "-" { sqlspace: $sqlspace }>
$sqlspace ? <sql-error ?msg ?context> [
$log ! <log "-" { line: $msg context: $context }>
]
$sqlspace <query [ SELECT local_display_name FROM contacts ] $tuplespace>
]
]
<require-service <daemon sqlite-actor>>
$config ? <service-object <daemon sqlite-actor> ?cap> [
$cap <resolve $sqliteStep $resolver>
]
<daemon sqlite-actor {
argv: [ "/bin/sqlite-actor" ]
clearEnv: #t
protocol: application/syndicate
}>
```
## syndump
Utility for printing assertions and messages. Parses the command-line arguments as a pattern, connects a dataspace via `$SYNDICATE_ROUTE`, and writes observations to standard-output. Published assertions are prefixed by the `+` character, retractions by `-`, and messages by `!`.
Example
```sh
# Print patterns in use, filter down with AWK to only the published patterns.
$ FS=':' syndump '<Observe ? _>' | awk -F : '/^+/ { print $2 }'
```
## XSLT processor
Perform XML stylesheet transformations. For a given textual XSLT stylesheet and a textual XML document generate an abstract XML document in Preserves form. Inputs may be XML text or paths to XML files.
```
# Configuration example
let ?ds = dataspace
$ds [
? <xslt-transform "/stylesheet.xls" "/doc.xml" ?output> [
? <xml-translation ?text $output> [
$log ! <log "-" { xslt-output: $text }>
]
]
]
<require-service <daemon xslt_actor>>
? <service-object <daemon xslt_actor> ?cap> $cap [
<xml-translator { dataspace: $ds }>
<xslt { dataspace: $ds }>
]
```

View File

@ -1,8 +1,2 @@
include_rules
: sbom.json |> !sbom-to-nix |> | ./<lock>
run ./Tuprules.jq sbom.json
: foreach {bin} |> !assert_built |>
: $(BIN_DIR)/msg |> !symlink |> $(BIN_DIR)/beep
: $(BIN_DIR)/msg |> !symlink |> $(BIN_DIR)/assert
: lock.json |> !nim_cfg |> | ./<lock>

View File

@ -1,12 +0,0 @@
#! /usr/bin/env -S jq --raw-output --from-file
.metadata.component.properties as $props |
$props |
( map( select(.name | .[0:10] == "nim:binDir") ) +
map( select(.name | .[0:10] == "nim:srcDir") ) |
map( .value )
) + ["."] | .[0] as $binDir |
$props |
map( select(.name | .[0:8] == "nim:bin:") ) |
map( ": \($binDir)/\(.value).nim |> !nim_bin |> $(BIN_DIR)/\(.name[8:]) {bin}" ) |
join("\n")

View File

@ -1,7 +1,4 @@
include ../syndicate-nim/depends.tup
PROJECT_DIR = $(TUP_CWD)
NIM = $(DIRENV) $(NIM)
NIM_GROUPS += $(SYNDICATE_PROTOCOL)
NIM_GROUPS += $(PROJECT_DIR)/<lock>
NIM_GROUPS += $(PROJECT_DIR)/<schema>
NIM_FLAGS += --path:$(TUP_CWD)/../syndicate-nim/src
NIM_GROUPS += $(TUP_CWD)/<lock>

View File

@ -1,4 +0,0 @@
version 1.
Base64Text = <base64 @txt string @bin bytes> .
Base64File = <base64-file @txt string @path string @size int> .

View File

@ -1,192 +0,0 @@
{
lib,
stdenv,
fetchgit,
fetchzip,
runCommand,
xorg,
nim,
nimOverrides,
}:
let
fetchers = {
fetchzip =
{ url, sha256, ... }:
fetchzip {
name = "source";
inherit url sha256;
};
fetchgit =
{
fetchSubmodules ? false,
leaveDotGit ? false,
rev,
sha256,
url,
...
}:
fetchgit {
inherit
fetchSubmodules
leaveDotGit
rev
sha256
url
;
};
};
filterPropertiesToAttrs =
prefix: properties:
lib.pipe properties [
(builtins.filter ({ name, ... }: (lib.strings.hasPrefix prefix name)))
(map (
{ name, value }:
{
name = lib.strings.removePrefix prefix name;
inherit value;
}
))
builtins.listToAttrs
];
buildNimCfg =
{ backend, components, ... }:
let
componentSrcDirs = map (
{ properties, ... }:
let
fodProps = filterPropertiesToAttrs "nix:fod:" properties;
fod = fetchers.${fodProps.method} fodProps;
srcDir = fodProps.srcDir or "";
in
if srcDir == "" then fod else "${fod}/${srcDir}"
) components;
in
runCommand "nim.cfg"
{
outputs = [
"out"
"src"
];
nativeBuildInputs = [ xorg.lndir ];
}
''
pkgDir=$src/pkg
cat << EOF >> $out
backend:${backend}
path:"$src"
path:"$pkgDir"
EOF
mkdir -p "$pkgDir"
${lib.strings.concatMapStrings (d: ''
lndir "${d}" "$pkgDir"
'') componentSrcDirs}
'';
buildCommands = lib.attrsets.mapAttrsToList (
output: input: ''
nim compile $nimFlags --out:${output} ${input}
''
);
installCommands = lib.attrsets.mapAttrsToList (
output: input: ''
install -Dt $out/bin ${output}
''
);
applySbom =
sbom:
{
passthru ? { },
...
}@prevAttrs:
let
properties = # SBOM metadata.component.properties as an attrset.
lib.attrsets.recursiveUpdate (builtins.listToAttrs sbom.metadata.component.properties)
passthru.properties or { };
nimBin = # A mapping of Nim module file paths to names of programs.
lib.attrsets.recursiveUpdate (lib.pipe properties [
(lib.attrsets.filterAttrs (name: value: lib.strings.hasPrefix "nim:bin:" name))
(lib.attrsets.mapAttrs' (
name: value: {
name = lib.strings.removePrefix "nim:bin:" name;
value = "${properties."nim:binDir" or (properties."nim:srcDir" or ".")}/${value}";
}
))
]) passthru.nimBin or { };
in
{
strictDeps = true;
pname = prevAttrs.pname or sbom.metadata.component.name;
version = prevAttrs.version or sbom.metadata.component.version or null;
configurePhase =
prevAttrs.configurePhase or ''
runHook preConfigure
echo "nim.cfg << $nimCfg"
cat $nimCfg >> nim.cfg
cat << EOF >> nim.cfg
nimcache:"$NIX_BUILD_TOP/nimcache"
parallelBuild:$NIX_BUILD_CORES
EOF
runHook postConfigure
'';
buildPhase =
prevAttrs.buildPhase or ''
runHook preBuild
${lib.strings.concatLines (buildCommands nimBin)}
runHook postBuild
'';
installPhase =
prevAttrs.installPhase or ''
runHook preInstall
${lib.strings.concatLines (installCommands nimBin)}
runHook postInstall
'';
nativeBuildInputs = (prevAttrs.nativeBuildInputs or [ ]) ++ [ nim ];
nimCfg =
prevAttrs.nimCfg or (buildNimCfg {
backend = prevAttrs.nimBackend or properties."nim:backend" or "c";
inherit (sbom) components;
});
passthru = {
inherit sbom properties nimBin;
};
};
applyOverrides =
prevAttrs:
builtins.foldl' (
prevAttrs:
{ name, ... }@component:
if (builtins.hasAttr name nimOverrides) then
prevAttrs // (nimOverrides.${name} component prevAttrs)
else
prevAttrs
) prevAttrs prevAttrs.passthru.sbom.components;
compose =
callerArg: sbom: finalAttrs:
let
callerAttrs = if builtins.isAttrs callerArg then callerArg else callerArg finalAttrs;
sbomAttrs = callerAttrs // (applySbom sbom callerAttrs);
overrideAttrs = sbomAttrs // (applyOverrides sbomAttrs);
in
overrideAttrs;
in
callerArg: sbomArg:
let
sbom = if builtins.isAttrs sbomArg then sbomArg else builtins.fromJSON (builtins.readFile sbomArg);
overrideSbom = f: stdenv.mkDerivation (compose callerArg (sbom // (f sbom)));
in
(stdenv.mkDerivation (compose callerArg sbom)) // { inherit overrideSbom; }

View File

@ -1,21 +1,11 @@
version 1 .
embeddedType EntityRef.Cap .
Base64DecoderArguments = <base64-decoder {
dataspace: #:any
}>.
CacheArguments = <cache {
dataspace: #:any
lifetime: float
}>.
FileSystemStep = <file-system @detail FileSystemDetail> .
FileSystemDetail = {
# iounit: int
root: string
} .
FileSystemUsageArguments = <file-system-usage {
dataspace: #:any
}>.
@ -25,56 +15,36 @@ JsonTranslatorArguments = <json-stdio-translator {
dataspace: #:any
}>.
TcpAddress = <tcp @host string @port int>.
UnixAddress = <unix @path string>.
JsonTranslatorConnected = <connected @path string>.
SocketAddress = TcpAddress / UnixAddress .
HttpClientStep = <http-client @detail HttpClientStepDetail>.
HttpClientStepDetail = {
# Body parsing happens according to a heuristic interpretation
# of Content-Type headers.
# Set this field as "application/octet-stream" to never parse
# response bodies or to "application/json" to parse all response
# bodies as JSON.
response-content-type-override: string
} .
HttpDriverStep= <http-driver { }>.
JsonSocketTranslatorStep = <json-socket-translator {
socket: SocketAddress
JsonSocketTranslatorArguments = <json-socket-translator {
dataspace: #:any
socket: string
}>.
PostgreStep = <postgre {
PostgreArguments = <postgre {
connection: [PostgreConnectionParameter ...]
dataspace: #:any
}>.
PostgreConnectionParameter = [@key string @val string].
PrinterStep = <printer {}> .
PulseStep = <pulse @detail PulseDetail> .
PulseDetail = {
# Destination for assertions.
target: #:any
# Interval in seconds at which assertions are forwarded.
interval: float
# Period in seconds of assertion.
period: float
# Dither the @interval with a Gaussian deviation of @dither.
dither: float
} .
PulseArguments = <pulse {
dataspace: #:any
}>.
SqliteStep = <sqlite {
SqliteArguments = <sqlite {
database: string
dataspace: #:any
}>.
WebhooksArguments = <webhooks {
endpoints: {[string ...]: #:any ...:...}
listen: Tcp
}>.
WebsocketArguments = <websocket {
dataspace: #:any
url: string
}>.
XmlTranslatorArguments = <xml-translator {

View File

@ -1,17 +1,10 @@
{
pkgs ? import <nixpkgs> { },
}:
with pkgs;
let
buildNimSbom = pkgs.callPackage ./build-nim-sbom.nix { };
in
buildNimSbom (finalAttrs: {
src = if lib.inNixShell then null else lib.cleanSource ./.;
buildInputs = [
postgresql.out
sqlite
libxml2
libxslt
openssl
];
}) ./sbom.json
{ pkgs ? import <nixpkgs> { } }:
pkgs.buildNimPackage {
name = "syndicate_utils";
propagatedNativeBuildInputs = [ pkgs.pkg-config ];
propagatedBuildInputs =
[ pkgs.postgresql pkgs.sqlite pkgs.libxml2 pkgs.libxslt ];
lockFile = ./lock.json;
src = pkgs.lib.sources.cleanSource ./.;
}

View File

@ -1,4 +0,0 @@
version 1 .
embeddedType EntityRef.Cap .
Read = <read @path string @offset int @count int @sink #:bytes> .

120
lock.json
View File

@ -1,28 +1,12 @@
{
"depends": [
{
"date": "2024-05-23T17:44:14+03:00",
"deepClone": false,
"fetchLFS": false,
"fetchSubmodules": true,
"hash": "sha256-qTRhHsOPNov1BQcm3P7NEkEPW6uh80XFfQRBdMp4o0Q=",
"leaveDotGit": false,
"method": "git",
"packages": [
"syndicate"
],
"path": "/nix/store/1lcxrap5n80hy1z4bcmsmdx83n4b9wjf-syndicate-nim",
"rev": "7ab4611824b676157523f2618e7893d5ac99e4f2",
"sha256": "0i53g3578h84gp2lbwx1mddhyh8jrpzdq9h70psqndlgqcg62d59",
"srcDir": "src",
"url": "https://git.syndicate-lang.org/ehmry/syndicate-nim.git"
},
{
"method": "fetchzip",
"packages": [
"bigints"
],
"path": "/nix/store/jvrm392g8adfsgf36prgwkbyd7vh5jsw-source",
"ref": "20231006",
"rev": "86ea14d31eea9275e1408ca34e6bfe9c99989a96",
"sha256": "15pcpmnk1bnw3k8769rjzcpg00nahyrypwbxs88jnwr4aczp99j4",
"srcDir": "src",
@ -31,107 +15,85 @@
{
"method": "fetchzip",
"packages": [
"cps"
"hashlib"
],
"path": "/nix/store/8gbhwni0akqskdb3qhn5nfgv6gkdz0vz-source",
"rev": "c90530ac57f98a842b7be969115c6ef08bdcc564",
"sha256": "0h8ghs2fqg68j3jdcg7grnxssmllmgg99kym2w0a3vlwca1zvr62",
"path": "/nix/store/fav82xdbicvlk34nmcbl89zx99lr3mbs-source",
"rev": "f9455d4be988e14e3dc7933eb7cc7d7c4820b7ac",
"sha256": "1sx6j952lj98629qfgr7ds5aipyw9d6lldcnnqs205wpj4pkcjb3",
"srcDir": "",
"url": "https://github.com/ehmry/cps/archive/c90530ac57f98a842b7be969115c6ef08bdcc564.tar.gz"
"url": "https://github.com/ehmry/hashlib/archive/f9455d4be988e14e3dc7933eb7cc7d7c4820b7ac.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"getdns"
"illwill"
],
"path": "/nix/store/x9xmn7w4k6jg8nv5bnx148ibhnsfh362-source",
"rev": "c73cbe288d9f9480586b8fa87f6d794ffb6a6ce6",
"sha256": "1sbgx2x51szr22i72n7c8jglnfmr8m7y7ga0v85d58fwadiv7g6b",
"srcDir": "src",
"url": "https://git.sr.ht/~ehmry/getdns-nim/archive/c73cbe288d9f9480586b8fa87f6d794ffb6a6ce6.tar.gz"
"path": "/nix/store/3lmm3z36qn4gz7bfa209zv0pqrpm3di9-source",
"ref": "v0.3.2",
"rev": "1d12cb36ab7b76c31d2d25fa421013ecb382e625",
"sha256": "0f9yncl5gbdja18mrqf5ixrdgrh95k0khda923dm1jd1x1b7ar8z",
"srcDir": "",
"url": "https://github.com/johnnovak/illwill/archive/1d12cb36ab7b76c31d2d25fa421013ecb382e625.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"nimcrypto"
],
"path": "/nix/store/fkrcpp8lzj2yi21na79xm63xk0ggnqsp-source",
"rev": "485f7b3cfa83c1beecc0e31be0e964d697aa74d7",
"sha256": "1h3dzdbc9kacwpi10mj73yjglvn7kbizj1x8qc9099ax091cj5xn",
"path": "/nix/store/zyr8zwh7vaiycn1s4r8cxwc71f2k5l0h-source",
"ref": "traditional-api",
"rev": "602c5d20c69c76137201b5d41f788f72afb95aa8",
"sha256": "1dmdmgb6b9m5f8dyxk781nnd61dsk3hdxqks7idk9ncnpj9fng65",
"srcDir": "",
"url": "https://github.com/cheatfate/nimcrypto/archive/485f7b3cfa83c1beecc0e31be0e964d697aa74d7.tar.gz"
"url": "https://github.com/cheatfate/nimcrypto/archive/602c5d20c69c76137201b5d41f788f72afb95aa8.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"npeg"
],
"path": "/nix/store/xpn694ibgipj8xak3j4bky6b3k0vp7hh-source",
"rev": "ec0cc6e64ea4c62d2aa382b176a4838474238f8d",
"sha256": "1fi9ls3xl20bmv1ikillxywl96i9al6zmmxrbffx448gbrxs86kg",
"path": "/nix/store/ffkxmjmigfs7zhhiiqm0iw2c34smyciy-source",
"ref": "1.2.1",
"rev": "26d62fdc40feb84c6533956dc11d5ee9ea9b6c09",
"sha256": "0xpzifjkfp49w76qmaylan8q181bs45anmp46l4bwr3lkrr7bpwh",
"srcDir": "src",
"url": "https://github.com/zevv/npeg/archive/ec0cc6e64ea4c62d2aa382b176a4838474238f8d.tar.gz"
"url": "https://github.com/zevv/npeg/archive/26d62fdc40feb84c6533956dc11d5ee9ea9b6c09.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"preserves"
],
"path": "/nix/store/9zl4s2did00725n8ygbp37agvkskdhcx-source",
"rev": "1fee87590940761e288cf9ab3c7270832403b719",
"sha256": "1ny42rwr3yx52zwvkdg4lh54nxaxrmxdj9dlw3qarvvp2grfq4j2",
"path": "/nix/store/6nnn5di5vip1vladlb7z56rbw18d1y7j-source",
"ref": "20240208",
"rev": "2825bceecf33a15b9b7942db5331a32cbc39b281",
"sha256": "145vf46fy3wc52j6vs509fm9bi5lx7c53gskbkpcfbkv82l86dgk",
"srcDir": "src",
"url": "https://git.syndicate-lang.org/ehmry/preserves-nim/archive/1fee87590940761e288cf9ab3c7270832403b719.tar.gz"
"url": "https://git.syndicate-lang.org/ehmry/preserves-nim/archive/2825bceecf33a15b9b7942db5331a32cbc39b281.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"stew"
"syndicate"
],
"path": "/nix/store/mqg8qzsbcc8xqabq2yzvlhvcyqypk72c-source",
"rev": "3c91b8694e15137a81ec7db37c6c58194ec94a6a",
"sha256": "17lfhfxp5nxvld78xa83p258y80ks5jb4n53152cdr57xk86y07w",
"srcDir": "",
"url": "https://github.com/status-im/nim-stew/archive/3c91b8694e15137a81ec7db37c6c58194ec94a6a.tar.gz"
"path": "/nix/store/y9f3j4m7vmhf8gbpkvqa77jvzrc5ynlm-source",
"ref": "20240208",
"rev": "50a77995bcfe15e6062f54c6af0f55fba850c329",
"sha256": "1avrk86c34qg39w8vlixsksli2gwgbsf29jhlap27ffzdbj2zbal",
"srcDir": "src",
"url": "https://git.syndicate-lang.org/ehmry/syndicate-nim/archive/50a77995bcfe15e6062f54c6af0f55fba850c329.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"sys"
"ws"
],
"path": "/nix/store/syhxsjlsdqfap0hk4qp3s6kayk8cqknd-source",
"rev": "4ef3b624db86e331ba334e705c1aa235d55b05e1",
"sha256": "1q4qgw4an4mmmcbx48l6xk1jig1vc8p9cq9dbx39kpnb0890j32q",
"path": "/nix/store/zd51j4dphs6h1hyhdbzdv840c8813ai8-source",
"ref": "0.5.0",
"rev": "9536bf99ddf5948db221ccb7bb3663aa238a8e21",
"sha256": "0j8z9jlvzb1h60v7rryvh2wx6vg99lra6i62whf3fknc53l641fz",
"srcDir": "src",
"url": "https://github.com/ehmry/nim-sys/archive/4ef3b624db86e331ba334e705c1aa235d55b05e1.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"taps"
],
"path": "/nix/store/6y14ia52kr7jyaa0izx37mlablmq9s65-source",
"rev": "8c8572cd971d1283e6621006b310993c632da247",
"sha256": "1dp166bv9x773jmfqppg5i3v3rilgff013vb11yzwcid9l7s3iy8",
"srcDir": "src",
"url": "https://git.sr.ht/~ehmry/nim_taps/archive/8c8572cd971d1283e6621006b310993c632da247.tar.gz"
},
{
"date": "2024-05-22T06:09:38+02:00",
"deepClone": false,
"fetchLFS": false,
"fetchSubmodules": true,
"hash": "sha256-B3fMwgBpO2Ty8143k9V1cnHXa5K8i1+zN+eF/rBLMe0=",
"leaveDotGit": false,
"method": "git",
"packages": [
"solo5_dispatcher"
],
"path": "/nix/store/xqj48v4rqlffl1l94hi02szazj5gla8g-solo5_dispatcher",
"rev": "cc64ef99416b22b12e4a076d33de9e25a163e57d",
"sha256": "1v9i9fqgx1g76yrmz2xwj9mxfwbjfpar6dsyygr68fv9031cqxq7",
"srcDir": "pkg",
"url": "https://git.sr.ht/~ehmry/solo5_dispatcher"
"url": "https://github.com/treeform/ws/archive/9536bf99ddf5948db221ccb7bb3663aa238a8e21.tar.gz"
}
]
}

653
sbom.json
View File

@ -1,653 +0,0 @@
{
"bomFormat": "CycloneDX",
"specVersion": "1.6",
"metadata": {
"component": {
"type": "application",
"bom-ref": "pkg:nim/syndicate_utils",
"name": "syndicate_utils",
"description": "Utilites for Syndicated Actors and Synit",
"version": "20240627",
"authors": [
{
"name": "Emery Hemingway"
}
],
"licenses": [
{
"license": {
"id": "Unlicense"
}
}
],
"properties": [
{
"name": "nim:skipExt",
"value": "nim"
},
{
"name": "nim:bin:postgre-actor",
"value": "postgre_actor"
},
{
"name": "nim:bin:xslt-actor",
"value": "xslt_actor"
},
{
"name": "nim:bin:preserve-process-environment",
"value": "preserve_process_environment"
},
{
"name": "nim:bin:mintsturdyref",
"value": "mintsturdyref"
},
{
"name": "nim:bin:esc-printer-driver",
"value": "esc_printer_driver"
},
{
"name": "nim:bin:msg",
"value": "msg"
},
{
"name": "nim:bin:rofi-script-actor",
"value": "rofi_script_actor"
},
{
"name": "nim:bin:syndesizer",
"value": "syndesizer"
},
{
"name": "nim:bin:http-client",
"value": "http_client"
},
{
"name": "nim:bin:mount-actor",
"value": "mount_actor"
},
{
"name": "nim:bin:syndump",
"value": "syndump"
},
{
"name": "nim:bin:sqlite-actor",
"value": "sqlite_actor"
},
{
"name": "nim:srcDir",
"value": "src"
},
{
"name": "nim:backend",
"value": "c"
}
]
}
},
"components": [
{
"type": "library",
"bom-ref": "pkg:nim/syndicate",
"name": "syndicate",
"version": "trunk",
"externalReferences": [
{
"url": "https://git.syndicate-lang.org/ehmry/syndicate-nim/archive/c21fdb5003417c99b8bb599df03fd7914cba7466.tar.gz",
"type": "source-distribution"
},
{
"url": "https://git.syndicate-lang.org/ehmry/syndicate-nim.git",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/lw30rzfxk35nzkkp4d53s9nr6xalkg8s-source"
},
{
"name": "nix:fod:rev",
"value": "c21fdb5003417c99b8bb599df03fd7914cba7466"
},
{
"name": "nix:fod:sha256",
"value": "0f14w83hpjym23f12brrirqwlib9b7m52m0g63fzmrcl6ig9y915"
},
{
"name": "nix:fod:url",
"value": "https://git.syndicate-lang.org/ehmry/syndicate-nim/archive/c21fdb5003417c99b8bb599df03fd7914cba7466.tar.gz"
},
{
"name": "nix:fod:ref",
"value": "trunk"
},
{
"name": "nix:fod:srcDir",
"value": "src"
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/preserves",
"name": "preserves",
"version": "20240610",
"externalReferences": [
{
"url": "https://git.syndicate-lang.org/ehmry/preserves-nim/archive/560a6417a30a2dff63f24b62498e9fcac2de8354.tar.gz",
"type": "source-distribution"
},
{
"url": "https://git.syndicate-lang.org/ehmry/preserves-nim.git",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/0sszsmz84ppwqsgda8cmli4lfh2mjmin-source"
},
{
"name": "nix:fod:rev",
"value": "560a6417a30a2dff63f24b62498e9fcac2de8354"
},
{
"name": "nix:fod:sha256",
"value": "19r983fy7m54mlaj0adxdp8pxi1x8dp6phkcnr8rz5y5cwndfjx2"
},
{
"name": "nix:fod:url",
"value": "https://git.syndicate-lang.org/ehmry/preserves-nim/archive/560a6417a30a2dff63f24b62498e9fcac2de8354.tar.gz"
},
{
"name": "nix:fod:ref",
"value": "20240610"
},
{
"name": "nix:fod:srcDir",
"value": "src"
},
{
"name": "nix:fod:date",
"value": "2024-05-23T15:58:40+03:00"
},
{
"name": "nix:fod:hash",
"value": "sha256-JvdvLdPajDgIPbLblO0LbOm0wEp530fs8LYmgH885sk="
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/sys",
"name": "sys",
"version": "4ef3b624db86e331ba334e705c1aa235d55b05e1",
"externalReferences": [
{
"url": "https://github.com/ehmry/nim-sys/archive/4ef3b624db86e331ba334e705c1aa235d55b05e1.tar.gz",
"type": "source-distribution"
},
{
"url": "https://github.com/ehmry/nim-sys.git",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/syhxsjlsdqfap0hk4qp3s6kayk8cqknd-source"
},
{
"name": "nix:fod:rev",
"value": "4ef3b624db86e331ba334e705c1aa235d55b05e1"
},
{
"name": "nix:fod:sha256",
"value": "1q4qgw4an4mmmcbx48l6xk1jig1vc8p9cq9dbx39kpnb0890j32q"
},
{
"name": "nix:fod:url",
"value": "https://github.com/ehmry/nim-sys/archive/4ef3b624db86e331ba334e705c1aa235d55b05e1.tar.gz"
},
{
"name": "nix:fod:srcDir",
"value": "src"
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/taps",
"name": "taps",
"version": "20240405",
"externalReferences": [
{
"url": "https://git.sr.ht/~ehmry/nim_taps/archive/8c8572cd971d1283e6621006b310993c632da247.tar.gz",
"type": "source-distribution"
},
{
"url": "https://git.sr.ht/~ehmry/nim_taps",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/6y14ia52kr7jyaa0izx37mlablmq9s65-source"
},
{
"name": "nix:fod:rev",
"value": "8c8572cd971d1283e6621006b310993c632da247"
},
{
"name": "nix:fod:sha256",
"value": "1dp166bv9x773jmfqppg5i3v3rilgff013vb11yzwcid9l7s3iy8"
},
{
"name": "nix:fod:url",
"value": "https://git.sr.ht/~ehmry/nim_taps/archive/8c8572cd971d1283e6621006b310993c632da247.tar.gz"
},
{
"name": "nix:fod:ref",
"value": "20240405"
},
{
"name": "nix:fod:srcDir",
"value": "src"
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/nimcrypto",
"name": "nimcrypto",
"version": "traditional-api",
"externalReferences": [
{
"url": "https://github.com/cheatfate/nimcrypto/archive/602c5d20c69c76137201b5d41f788f72afb95aa8.tar.gz",
"type": "source-distribution"
},
{
"url": "https://github.com/cheatfate/nimcrypto",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/zyr8zwh7vaiycn1s4r8cxwc71f2k5l0h-source"
},
{
"name": "nix:fod:rev",
"value": "602c5d20c69c76137201b5d41f788f72afb95aa8"
},
{
"name": "nix:fod:sha256",
"value": "1dmdmgb6b9m5f8dyxk781nnd61dsk3hdxqks7idk9ncnpj9fng65"
},
{
"name": "nix:fod:url",
"value": "https://github.com/cheatfate/nimcrypto/archive/602c5d20c69c76137201b5d41f788f72afb95aa8.tar.gz"
},
{
"name": "nix:fod:ref",
"value": "traditional-api"
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/npeg",
"name": "npeg",
"version": "1.2.2",
"externalReferences": [
{
"url": "https://github.com/zevv/npeg/archive/ec0cc6e64ea4c62d2aa382b176a4838474238f8d.tar.gz",
"type": "source-distribution"
},
{
"url": "https://github.com/zevv/npeg.git",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/xpn694ibgipj8xak3j4bky6b3k0vp7hh-source"
},
{
"name": "nix:fod:rev",
"value": "ec0cc6e64ea4c62d2aa382b176a4838474238f8d"
},
{
"name": "nix:fod:sha256",
"value": "1fi9ls3xl20bmv1ikillxywl96i9al6zmmxrbffx448gbrxs86kg"
},
{
"name": "nix:fod:url",
"value": "https://github.com/zevv/npeg/archive/ec0cc6e64ea4c62d2aa382b176a4838474238f8d.tar.gz"
},
{
"name": "nix:fod:ref",
"value": "1.2.2"
},
{
"name": "nix:fod:srcDir",
"value": "src"
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/bigints",
"name": "bigints",
"version": "20231006",
"externalReferences": [
{
"url": "https://github.com/ehmry/nim-bigints/archive/86ea14d31eea9275e1408ca34e6bfe9c99989a96.tar.gz",
"type": "source-distribution"
},
{
"url": "https://github.com/ehmry/nim-bigints.git",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/jvrm392g8adfsgf36prgwkbyd7vh5jsw-source"
},
{
"name": "nix:fod:rev",
"value": "86ea14d31eea9275e1408ca34e6bfe9c99989a96"
},
{
"name": "nix:fod:sha256",
"value": "15pcpmnk1bnw3k8769rjzcpg00nahyrypwbxs88jnwr4aczp99j4"
},
{
"name": "nix:fod:url",
"value": "https://github.com/ehmry/nim-bigints/archive/86ea14d31eea9275e1408ca34e6bfe9c99989a96.tar.gz"
},
{
"name": "nix:fod:ref",
"value": "20231006"
},
{
"name": "nix:fod:srcDir",
"value": "src"
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/cps",
"name": "cps",
"version": "0.10.4",
"externalReferences": [
{
"url": "https://github.com/nim-works/cps/archive/2a4d771a715ba45cfba3a82fa625ae7ad6591c8b.tar.gz",
"type": "source-distribution"
},
{
"url": "https://github.com/nim-works/cps",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/m9vpcf3dq6z2h1xpi1vlw0ycxp91s5p7-source"
},
{
"name": "nix:fod:rev",
"value": "2a4d771a715ba45cfba3a82fa625ae7ad6591c8b"
},
{
"name": "nix:fod:sha256",
"value": "0c62k5wpq9z9mn8cd4rm8jjc4z0xmnak4piyj5dsfbyj6sbdw2bf"
},
{
"name": "nix:fod:url",
"value": "https://github.com/nim-works/cps/archive/2a4d771a715ba45cfba3a82fa625ae7ad6591c8b.tar.gz"
},
{
"name": "nix:fod:ref",
"value": "0.10.4"
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/stew",
"name": "stew",
"version": "3c91b8694e15137a81ec7db37c6c58194ec94a6a",
"externalReferences": [
{
"url": "https://github.com/status-im/nim-stew/archive/3c91b8694e15137a81ec7db37c6c58194ec94a6a.tar.gz",
"type": "source-distribution"
},
{
"url": "https://github.com/status-im/nim-stew",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/mqg8qzsbcc8xqabq2yzvlhvcyqypk72c-source"
},
{
"name": "nix:fod:rev",
"value": "3c91b8694e15137a81ec7db37c6c58194ec94a6a"
},
{
"name": "nix:fod:sha256",
"value": "17lfhfxp5nxvld78xa83p258y80ks5jb4n53152cdr57xk86y07w"
},
{
"name": "nix:fod:url",
"value": "https://github.com/status-im/nim-stew/archive/3c91b8694e15137a81ec7db37c6c58194ec94a6a.tar.gz"
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/getdns",
"name": "getdns",
"version": "trunk",
"externalReferences": [
{
"url": "https://git.sr.ht/~ehmry/getdns-nim/archive/e925d2f6d2bf31384969568e97917af8ef77b7a2.tar.gz",
"type": "source-distribution"
},
{
"url": "https://git.sr.ht/~ehmry/getdns-nim",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/hq145zgfjldsj7fh026ikfwkhs2cz9nv-source"
},
{
"name": "nix:fod:rev",
"value": "e925d2f6d2bf31384969568e97917af8ef77b7a2"
},
{
"name": "nix:fod:sha256",
"value": "0gflawpkwk8nghwvs69yb5mj3s6fzrmybys5466m2650xr26hs4p"
},
{
"name": "nix:fod:url",
"value": "https://git.sr.ht/~ehmry/getdns-nim/archive/e925d2f6d2bf31384969568e97917af8ef77b7a2.tar.gz"
},
{
"name": "nix:fod:ref",
"value": "trunk"
},
{
"name": "nix:fod:srcDir",
"value": "src"
}
]
},
{
"type": "library",
"bom-ref": "pkg:nim/solo5_dispatcher",
"name": "solo5_dispatcher",
"version": "20240522",
"externalReferences": [
{
"url": "https://git.sr.ht/~ehmry/solo5_dispatcher/archive/cc64ef99416b22b12e4a076d33de9e25a163e57d.tar.gz",
"type": "source-distribution"
},
{
"url": "https://git.sr.ht/~ehmry/solo5_dispatcher",
"type": "vcs"
}
],
"properties": [
{
"name": "nix:fod:method",
"value": "fetchzip"
},
{
"name": "nix:fod:path",
"value": "/nix/store/4jj467pg4hs6warhksb8nsxn9ykz8c7c-source"
},
{
"name": "nix:fod:rev",
"value": "cc64ef99416b22b12e4a076d33de9e25a163e57d"
},
{
"name": "nix:fod:sha256",
"value": "1v9i9fqgx1g76yrmz2xwj9mxfwbjfpar6dsyygr68fv9031cqxq7"
},
{
"name": "nix:fod:url",
"value": "https://git.sr.ht/~ehmry/solo5_dispatcher/archive/cc64ef99416b22b12e4a076d33de9e25a163e57d.tar.gz"
},
{
"name": "nix:fod:ref",
"value": "20240522"
},
{
"name": "nix:fod:srcDir",
"value": "pkg"
}
]
}
],
"dependencies": [
{
"ref": "pkg:nim/syndicate_utils",
"dependsOn": [
"pkg:nim/syndicate"
]
},
{
"ref": "pkg:nim/syndicate",
"dependsOn": [
"pkg:nim/nimcrypto",
"pkg:nim/preserves",
"pkg:nim/sys",
"pkg:nim/taps"
]
},
{
"ref": "pkg:nim/preserves",
"dependsOn": [
"pkg:nim/npeg",
"pkg:nim/bigints"
]
},
{
"ref": "pkg:nim/sys",
"dependsOn": [
"pkg:nim/cps",
"pkg:nim/stew"
]
},
{
"ref": "pkg:nim/taps",
"dependsOn": [
"pkg:nim/getdns",
"pkg:nim/sys",
"pkg:nim/cps",
"pkg:nim/solo5_dispatcher"
]
},
{
"ref": "pkg:nim/nimcrypto",
"dependsOn": []
},
{
"ref": "pkg:nim/npeg",
"dependsOn": []
},
{
"ref": "pkg:nim/bigints",
"dependsOn": []
},
{
"ref": "pkg:nim/cps",
"dependsOn": []
},
{
"ref": "pkg:nim/stew",
"dependsOn": []
},
{
"ref": "pkg:nim/getdns",
"dependsOn": []
},
{
"ref": "pkg:nim/solo5_dispatcher",
"dependsOn": [
"pkg:nim/cps"
]
}
]
}

View File

@ -2,7 +2,4 @@ version 1 .
# When asserted the actor reponds to @target rows as records
# of the given label and row columns as record fields.
Query = <query @statement [any ...] @target #:any> .
# When a query fails this is asserted instead.
SqlError = <sql-error @msg string @context string>.
Query = <query @statement string @target #:any> .

4
src/Tupfile Normal file
View File

@ -0,0 +1,4 @@
include_rules
: foreach *.nim | $(SYNDICATE_PROTOCOL) ./<schema> ./syndesizer/<checks> |> !nim_bin |> {bin}
: foreach {bin} |> !assert_built |>
: $(BIN_DIR)/msg |> cp %f %o |> $(BIN_DIR)/beep

4
src/drivers/Tupfile Normal file
View File

@ -0,0 +1,4 @@
include_rules
NIM_FLAGS += --path:$(TUP_CWD)/../../../taps/pkg
: foreach *.nim | $(SYNDICATE_PROTOCOL) ../<schema> |> !nim_bin |> | {bin}
: foreach {bin} |> !assert_built |>

339
src/drivers/http_driver.nim Normal file
View File

@ -0,0 +1,339 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import std/[httpcore, options, parseutils, sets, streams, strutils, tables, times, uri]
import pkg/sys/ioqueue
import pkg/preserves
import pkg/syndicate
import pkg/syndicate/protocols/http
import taps
import ../schema/config
const
SP = { ' ', '\x09', '\x0b', '\x0c', '\x0d' }
SupportedVersion = "HTTP/1.1"
IMF = initTimeFormat"ddd, dd MMM yyyy HH:mm:ss"
proc echo(args: varargs[string, `$`]) =
stderr.writeLine(args)
proc `$`(b: seq[byte]): string = cast[string](b)
# a Date header on responses must be present if a clock is available
# An upgrade header can be used to switch over to native syndicate protocol.
# Check the response encoding matches or otherwise return 415
type HandlerEntity = ref object of Entity
handler: proc (turn: var Turn; req: HttpRequest; cap: Cap)
method publish(e: HandlerEntity; turn: var Turn; a: AssertionRef; h: Handle) =
var ctx = a.value.preservesTo HttpContext
if ctx.isSome:
var res = ctx.get.res.unembed Cap
if res.isSome:
e.handler(turn, ctx.get.req, res.get)
else:
echo "HandlerEntity got a non-Cap ", ctx.get.res
else:
echo "HandlerEntity got a non-HttpContext ", a.value
proc respond404(turn: var Turn; req: HttpRequest; cap: Cap) =
message(turn, cap, HttpResponse(
orKind: HttpResponseKind.status,
status: HttpResponseStatus(
code: 404,
message: "resource not found",
)))
message(turn, cap, HttpResponse(
orKind: HttpResponseKind.header,
header: HttpResponseHeader(
name: Symbol"content-length",
value: "0",
)))
message(turn, cap, HttpResponse(orKind: HttpResponseKind.done))
proc bind404Handler(turn: var Turn; ds: Cap; port: Port) =
stderr.writeLine "bind 404 handler to ", port
var b: HttpBinding
b.host = HostPattern(orKind: HostPatternKind.any)
b.port = BiggestInt port
b.method = MethodPattern(orKind: MethodPatternKind.any)
b.path = @[PathPatternElement(orKind: PathPatternElementKind.rest)]
b.handler = newCap(turn, HandlerEntity(handler: respond404)).toPreserves
discard publish(turn, ds, b)
proc badRequest(conn: Connection; msg: string) =
conn.send(SupportedVersion & " 400 " & msg, endOfMessage = true)
close(conn)
proc extractQuery(s: var string): Table[Symbol, seq[QueryValue]] =
let start = succ skipUntil(s, '?')
if start < s.len:
var query = s[start..s.high]
s.setLen(start)
for key, val in uri.decodeQuery(query):
var list = result.getOrDefault(Symbol key)
list.add QueryValue(orKind: QueryValueKind.string, string: val)
result[Symbol key] = list
proc parseRequest(conn: Connection; text: string): (int, HttpRequest) =
## Parse an `HttpRequest` request out of a `text` from a `Connection`.
var
token: string
off: int
template advanceSp =
let n = skipWhile(text, SP, off)
if n < 1:
badRequest(conn, "invalid request")
return
inc(off, n)
# method
off.inc parseUntil(text, token, SP, off)
result[1].method = token.toLowerAscii.Symbol
advanceSp()
# target
if text[off] == '/': inc(off) #TODO: always a leading slash?
off.inc parseUntil(text, token, SP, off)
advanceSp()
block:
var version: string
off.inc parseUntil(text, version, SP, off)
advanceSp()
if version != SupportedVersion:
badRequest(conn, "version not supported")
return
result[1].query = extractQuery(token)
result[1].path = split(token, '/')
for p in result[1].path.mitems:
# normalize the path
for i, c in p:
if c in {'A'..'Z'}:
p[i] = char c.ord + 0x20
template advanceLine =
inc off, skipWhile(text, {'\x0d'}, off)
if text.high < off or text[off] != '\x0a':
badRequest(conn, "invalid request")
return
inc off, 1
advanceLine()
while off < text.len:
off.inc parseUntil(text, token, {'\x0d', '\x0a'}, off)
if token == "": break
advanceLine()
var
(key, vals) = httpcore.parseHeader(token)
k = key.toLowerAscii.Symbol
v = result[1].headers.getOrDefault(k)
for e in vals.mitems:
e = e.toLowerAscii
if k == Symbol"host":
result[1].host = e
if v == "": v = move e
else:
v.add ", "
v.add e
if k == Symbol"host":
result[1].host = v
result[1].headers[k] = v
result[0] = off
proc send(conn: Connection; chunk: Chunk) =
case chunk.orKind
of ChunkKind.string:
conn.send(chunk.string, endOfMessage = false)
of ChunkKind.bytes:
conn.send(chunk.bytes, endOfMessage = false)
type
Driver = ref object
facet: Facet
ds: Cap
bindings: seq[HttpBinding]
Session = ref object
facet: Facet
driver: Driver
conn: Connection
port: Port
Exchange = ref object of Entity
ses: Session
req: HttpRequest
stream: StringStream
mode: HttpResponseKind
proc match(b: HttpBinding, r: HttpRequest): bool =
## Check if `HttpBinding` `b` matches `HttpRequest` `r`.
result =
(b.host.orKind == HostPatternKind.any or
b.host.host == r.host) and
(b.port == r.port) and
(b.method.orKind == MethodPatternKind.any or
b.method.specific == r.method)
if result:
for i, p in b.path:
if i > r.path.high: return false
case p.orKind
of PathPatternElementKind.wildcard: discard
of PathPatternElementKind.label:
if p.label != r.path[i]: return false
of PathPatternElementKind.rest:
return i == b.path.high
# return false if ... isn't the last element
proc strongerThan(a, b: HttpBinding): bool =
## Check if `a` is a stronger `HttpBinding` than `b`.
result =
(a.host.orKind != b.host.orKind and
a.host.orKind == HostPatternKind.host) or
(a.method.orKind != b.method.orKind and
a.method.orKind == MethodPatternKind.specific)
if not result:
if a.path.len > b.path.len: return true
for i in a.path.low..b.path.high:
if a.path[i].orKind != b.path[i].orKind and
a.path[i].orKind == PathPatternElementKind.label:
return true
proc match(driver: Driver; req: HttpRequest): Option[HttpBinding] =
for b in driver.bindings:
if b.match req:
if result.isNone or b.strongerThan(result.get):
result = some b
else:
echo b, " does not match ", req
method message(e: Exchange; turn: var Turn; a: AssertionRef) =
# Send responses back into a connection.
var res: HttpResponse
if e.mode != HttpResponseKind.done and res.fromPreserves a.value:
case res.orKind
of HttpResponseKind.status:
if e.mode == res.orKind:
e.stream.writeLine(SupportedVersion, " ", res.status.code, " ", res.status.message)
e.stream.writeLine("date: ", now().format(IMF))
# add Date header automatically - RFC 9110 Section 6.6.1.
e.mode = HttpResponseKind.header
of HttpResponseKind.header:
if e.mode == res.orKind:
e.stream.writeLine(res.header.name, ": ", res.header.value)
of HttpResponseKind.chunk:
if e.mode == HttpResponseKind.header:
e.mode = res.orKind
e.stream.writeLine()
e.ses.conn.send(move e.stream.data, endOfMessage = false)
e.ses.conn.send(res.chunk.chunk)
of HttpResponseKind.done:
if e.mode == HttpResponseKind.header:
e.stream.writeLine()
e.ses.conn.send(move e.stream.data, endOfMessage = false)
e.mode = res.orKind
e.ses.conn.send(res.done.chunk)
stop(turn)
# stop the facet scoped to the exchange
# so that the response capability is withdrawn
proc service(turn: var Turn; exch: Exchange) =
## Service an HTTP message exchange.
var binding = exch.ses.driver.match exch.req
if binding.isNone:
echo "no binding for ", exch.req
stop(turn)
else:
echo "driver matched binding ", binding.get
var handler = binding.get.handler.unembed Cap
if handler.isNone:
stop(turn)
else:
publish(turn, handler.get, HttpContext(
req: exch.req,
res: embed newCap(turn, exch),
))
proc service(ses: Session) =
## Service a connection to an HTTP client.
ses.facet.onStop do (turn: var Turn):
close ses.conn
ses.conn.onClosed do ():
stop ses.facet
ses.conn.onReceivedPartial do (data: seq[byte]; ctx: MessageContext; eom: bool):
ses.facet.run do (turn: var Turn):
var (n, req) = parseRequest(ses.conn, cast[string](data))
if n > 0:
req.port = BiggestInt ses.port
inFacet(turn) do (turn: var Turn):
preventInertCheck(turn)
# start a new facet for this message exchange
turn.service Exchange(
facet: turn.facet,
ses: ses,
req: req,
stream: newStringStream(),
mode: HttpResponseKind.status
)
# ses.conn.receive()
ses.conn.receive()
proc newListener(port: Port): Listener =
var lp = newLocalEndpoint()
lp.with port
listen newPreconnection(local=[lp])
proc httpListen(turn: var Turn; driver: Driver; port: Port) =
let facet = turn.facet
var listener = newListener(port)
# TODO: let listener
listener.onListenError do (err: ref Exception):
terminateFacet(facet, err)
facet.onStop do (turn: var Turn):
stop listener
listener.onConnectionReceived do (conn: Connection):
driver.facet.run do (turn: var Turn):
# start a new turn
linkActor(turn, "http-conn") do (turn: var Turn):
preventInertCheck(turn)
# facet is scoped to the lifetime of the connection
service Session(
facet: turn.facet,
driver: driver,
conn: conn,
port: port,
)
proc httpDriver(turn: var Turn; ds: Cap) =
let driver = Driver(facet: turn.facet, ds: ds)
during(turn, ds, HttpBinding?:{
1: grab(),
}) do (port: BiggestInt):
publish(turn, ds, HttpListener(port: port))
during(turn, ds, ?:HttpBinding) do (
ho: HostPattern, po: int, me: MethodPattern, pa: PathPattern, e: Value):
let b = HttpBinding(host: ho, port: po, `method`: me, path: pa, handler: e)
driver.bindings.add b
do:
raiseAssert("need to remove binding " & $b)
during(turn, ds, ?:HttpListener) do (port: uint16):
bind404Handler(turn, ds, Port port)
httpListen(turn, driver, Port port)
proc spawnHttpDriver*(turn: var Turn; ds: Cap) =
during(turn, ds, ?:HttpDriverArguments) do (ds: Cap):
spawnActor("http-driver", turn) do (turn: var Turn):
httpDriver(turn, ds)
when isMainModule:
import syndicate/relays
runActor("main") do (turn: var Turn):
resolveEnvironment(turn, spawnHttpDriver)

View File

@ -1,111 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
## ESC/P printer control actor.
import
std/[cmdline, oserrors, posix, sequtils, sets],
pkg/sys/[files, ioqueue],
preserves, preserves/sugar,
syndicate, syndicate/relays,
syndicate/protocols/[gatekeeper, sturdy],
./private/esc_p
from pkg/sys/handles import FD
proc echo(args: varargs[string, `$`]) {.used.} =
stderr.writeLine(args)
type
HandleSet = HashSet[Handle]
Printer = ref object of Entity
device: AsyncFile
boldHandles, italicHandles, superscriptHandles, subscriptHandles: HandleSet
buffer: seq[byte]
isBusy: bool
proc flush(printer: Printer) {.asyncio.} =
printer.isBusy = true
while printer.buffer.len > 0:
let n = printer.device.write(printer.buffer)
if n > 0:
printer.buffer.delete(0..<n)
elif n < 0:
osLastError().osErrorMsg().quit()
printer.isBusy = false
proc write(printer: Printer; s: string) {.inline.} =
printer.buffer.add cast[seq[byte]](s)
if not printer.isBusy:
discard trampoline:
whelp printer.flush()
proc writeLine(printer: Printer; s: string) {.inline.} =
printer.write(s)
printer.write("\r\n")
method message(printer: Printer; t: Turn; a: AssertionRef) =
if a.value.isString:
printer.write(a.value.string)
# TODO: unicode?
# TODO: line breaks?
proc assert(printer: Printer; handles: var HandleSet; ctrl: string; h: Handle) =
if handles.len == 0: printer.write(ctrl)
handles.incl h
proc retract(printer: Printer; handles: var HandleSet; ctrl: string; h: Handle) =
handles.excl h
if handles.len == 0: printer.write(ctrl)
method publish(printer: Printer; t: Turn; a: AssertionRef; h: Handle) =
if a.value.isRecord("bold"):
printer.assert(printer.boldHandles, SelectBoldFont, h)
elif a.value.isRecord("italic"):
printer.assert(printer.italicHandles, SelectItalicFont, h)
elif a.value.isRecord("superscript"):
printer.assert(printer.superscriptHandles, SelectSuperScript, h)
elif a.value.isRecord("subscript"):
printer.assert(printer.subscriptHandles, SelectSubScript, h)
method retract(printer: Printer; t: Turn; h: Handle) =
if printer.boldHandles.contains h:
printer.retract(printer.boldHandles, CancelBoldFont, h)
elif printer.italicHandles.contains h:
printer.retract(printer.italicHandles, CanceItalicFont, h)
elif printer.superscriptHandles.contains h:
printer.retract(printer.superscriptHandles, CancelAltScript, h)
elif printer.subscriptHandles.contains h:
printer.retract(printer.subscriptHandles, CancelAltScript, h)
proc devicePath: string =
if paramCount() < 1:
quit "missing path to printer device file"
if paramCount() > 1:
quit "too many command line parameters"
paramStr(1)
proc openPrinter(turn: Turn): Printer =
new result
result.facet = turn.facet
let fd = posix.open(devicePath(), O_WRONLY or O_NONBLOCK, 0)
if fd < 0: osLastError().osErrorMsg().quit()
result.device = newAsyncFile(FD fd)
result.write(InitializePrinter)
runActor(devicePath()) do (turn: Turn):
let printer = openPrinter(turn)
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
let pat = Resolve?:{0: matchRecord("printer"), 1: grab()}
during(turn, relay, pat) do (cont: Cap):
# Publish for any <printer> step.
discard publish(turn, cont, ResolvedAccepted(
responderSession: turn.newCap(printer)))

View File

@ -1,119 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
# TODO: write a TAPS HTTP client. Figure out how to externalise TLS.
import
std/[httpclient, options, streams, strutils, tables, uri],
pkg/taps,
pkg/preserves,
pkg/syndicate, pkg/syndicate/protocols/[gatekeeper, http],
./schema/config
proc url(req: HttpRequest): Uri =
result.scheme = if req.port == 80: "http" else: "https"
result.hostname = req.host.present
result.port = $req.port
for i, p in req.path:
if 0 < i: result.path.add '/'
result.path.add p.encodeUrl
for key, vals in req.query:
if result.query.len > 0:
result.query.add '&'
result.query.add key.string.encodeUrl
for i, val in vals:
if i == 0: result.query.add '='
elif i < vals.high: result.query.add ','
result.query.add val.string.encodeUrl
proc toContent(body: Value; contentType: var string): string =
case contentType
of "application/json", "text/javascript":
var stream = newStringStream()
writeText(stream, body, textJson)
return stream.data.move
of "application/preserves":
return cast[string](body.encode)
of "text/preserves":
return $body
else:
discard
case body.kind
of pkString:
result = body.string
if contentType == "":
contentType = "text/plain"
of pkByteString:
result = cast[string](body.bytes)
if contentType == "":
contentType = "application/octet-stream"
else:
raise newException(ValueError, "unknown content type")
proc spawnHttpClient*(turn: Turn; relay: Cap): Actor {.discardable.} =
let pat = Resolve?:{ 0: HttpClientStep.grabWithin, 1: grab() }
result = spawnActor(turn, "http-client") do (turn: Turn):
during(turn, relay, pat) do (detail: HttpClientStepDetail, observer: Cap):
linkActor(turn, "session") do (turn: Turn):
let ds = turn.newDataspace()
discard publish(turn, observer, ResolvedAccepted(responderSession: ds))
during(turn, ds, HttpContext.grabType) do (ctx: HttpContext):
let peer = ctx.res.unembed(Cap).get
var client = newHttpClient()
try:
var
headers = newHttpHeaders()
contentType: string
for key, val in ctx.req.headers:
if key == Symbol"content-type" or key == Symbol"Content-Type":
contentType = val
client.headers[key.string] = val
let stdRes = client.request(
ctx.req.url,
ctx.req.method.string.toUpper,
ctx.req.body.toContent(contentType), headers
)
var resp = HttpResponse(orKind: HttpResponseKind.status)
resp.status.code = stdRes.status[0 .. 2].parseInt
resp.status.message = stdRes.status[4 .. ^1]
message(turn, peer, resp)
resp = HttpResponse(orKind: HttpResponseKind.header)
for key, vals in stdRes.headers.table:
for val in vals.items:
resp.header.name = key.Symbol
resp.header.value = val
message(turn, peer, resp)
if detail.`response-content-type-override` != "":
contentType = detail.`response-content-type-override`
else:
for val in stdRes.headers.table.getOrDefault("content-type").items:
contentType = val
case contentType
of "application/json", "text/preserves", "text/javascript":
message(turn, peer,
initRecord("done", stdRes.bodyStream.readAll.parsePreserves))
of "application/preserves":
message(turn, peer,
initRecord("done", stdRes.bodyStream.decodePreserves))
else:
resp = HttpResponse(orKind: HttpResponseKind.done)
resp.done.chunk.string = stdRes.bodyStream.readAll()
message(turn, peer, resp)
except CatchableError as err:
var resp = HttpResponse(orKind: HttpResponseKind.status)
resp.status.code = 400
resp.status.message = "Internal client error"
message(turn, peer, resp)
resp = HttpResponse(orKind: HttpResponseKind.done)
resp.done.chunk.string = err.msg
message(turn, peer, resp)
client.close()
do:
client.close()
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
spawnHttpClient(turn, relay)

View File

@ -1 +0,0 @@
define:ssl

114
src/inotify_actor.nim Normal file
View File

@ -0,0 +1,114 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
## An actor for filesystem monitoring.
import std/[asyncdispatch, asyncfile, tables]
import posix, posix/inotify
import preserves
import syndicate, syndicate/[bags, relays]
import ./schema/inotify_actor
var IN_NONBLOCK {.importc, nodecl.}: cint
type
BootArgs {.preservesDictionary.} = object
dataspace: Cap
proc toMask(sym: Symbol): uint32 =
case sym.string
of "IN_ACCESS": IN_ACCESS
of "IN_MODIFY": IN_MODIFY
of "IN_ATTRIB": IN_ATTRIB
of "IN_CLOSE_WRITE": IN_CLOSE_WRITE
of "IN_CLOSE_NOWRITE": IN_CLOSE_NOWRITE
of "IN_CLOSE": IN_CLOSE
of "IN_OPEN": IN_OPEN
of "IN_MOVED_FROM": IN_MOVED_FROM
of "IN_MOVED_TO": IN_MOVED_TO
of "IN_MOVE": IN_MOVE
of "IN_CREATE": IN_CREATE
of "IN_DELETE": IN_DELETE
of "IN_DELETE_SELF": IN_DELETE_SELF
of "IN_MOVE_SELF": IN_MOVE_SELF
else: 0
func contains(event, bit: uint32): bool = (event and bit) != 0
iterator symbols(event: uint32): Symbol =
if event.contains IN_ACCESS:
yield Symbol"IN_ACCESS"
if event.contains IN_MODIFY:
yield Symbol"IN_MODIFY"
if event.contains IN_ATTRIB:
yield Symbol"IN_ATTRIB"
if event.contains IN_CLOSE_WRITE:
yield Symbol"IN_CLOSE_WRITE"
if event.contains IN_CLOSE_NOWRITE:
yield Symbol"IN_CLOSE_NOWRITE"
if event.contains IN_OPEN:
yield Symbol"IN_OPEN"
if event.contains IN_MOVED_FROM:
yield Symbol"IN_MOVED_FROM"
if event.contains IN_MOVED_TO:
yield Symbol"IN_MOVED_TO"
if event.contains IN_CREATE:
yield Symbol"IN_CREATE"
if event.contains IN_DELETE:
yield Symbol"IN_DELETE"
if event.contains IN_DELETE_SELF:
yield Symbol"IN_DELETE_SELF"
if event.contains IN_MOVE_SELF:
yield Symbol"IN_MOVE_SELF"
if event.contains (IN_CLOSE_WRITE or IN_CLOSE_NOWRITE):
yield Symbol"IN_CLOSE"
if event.contains (IN_MOVED_FROM or IN_MOVED_TO):
yield Symbol"IN_MOVE"
runActor("inotify_actor") do (root: Cap; turn: var Turn):
let buf = newSeq[byte](8192)
let eventPattern = ?Observe(pattern: !InotifyMessage) ?? { 0: grabLit(), 1: grabLit() }
connectStdio(turn, root)
during(turn, root, ?:BootArgs) do (ds: Cap):
let inf = inotify_init1(IN_NONBLOCK)
doAssert inf != -1, $inf & " - " & $strerror(errno)
var
registry = initTable[cint, string]()
watchBag: Bag[cint]
let
anf = newAsyncFile(AsyncFD inf)
facet = turn.facet
var fut: Future[int]
proc readEvents() {.gcsafe.} =
fut = readBuffer(anf, buf[0].addr, buf.len)
addCallback(fut, facet) do (turn: var Turn):
let n = read(fut)
doAssert n > 0
for event in inotify_events(buf[0].addr, n):
var msg = InotifyMessage(path: registry[event.wd], cookie: event.cookie.BiggestInt)
if event.len > 0:
let n = event.len
msg.name.setLen(n)
copyMem(msg.name[0].addr, event.name.addr, n)
for i, c in msg.name:
if c == '\0':
msg.name.setLen(i)
break
for sym in event.mask.symbols:
msg.event = sym
message(turn, ds, msg)
readEvents()
readEvents()
during(turn, ds, eventPattern) do (path: string, kind: Symbol):
let wd = inotify_add_watch(inf, path, kind.toMask or IN_MASK_ADD)
doAssert wd > 0, $strerror(errno)
registry[wd] = path
discard watchBag.change(wd, 1)
do:
if watchBag.change(wd, -1, clamp = true) == cdPresentToAbsent:
discard close(wd)
registry.del(wd)
do:
close(anf)

View File

@ -7,8 +7,8 @@ when not defined(linux):
{.error: "this component only tested for Linux".}
import std/oserrors
import preserves, preserves/sugar
import syndicate
import preserves
import syndicate, syndicate/relays
import ./schema/mountpoints
type BootArgs {.preservesDictionary.} = object
@ -20,34 +20,25 @@ proc mount(source, target, fsType: cstring; flags: culong; data: pointer): cint
proc umount(target: cstring): cint {.importc, header: "<sys/mount.h>".}
## `umount(2)`
proc spawnMountActor*(turn: Turn; ds: Cap): Actor {.discardable.} =
spawnActor(turn, "mount_actor") do (turn: Turn):
let
targetPat = observePattern(!Mountpoint, { @[%1]: grabLit() })
sourcePat = observePattern(!Mountpoint, {
@[%0]: grabLit(),
@[%2]: grabLit(),
})
during(turn, ds, ?:BootArgs) do (ds: Cap):
during(turn, ds, targetPat) do (target: string):
during(turn, ds, sourcePat) do (source: string, fsType: string):
var mountpoint = Mountpoint(
source: source,
target: target,
`type`: fsType,
)
var rc = mount(source, target, fsType, 0, nil)
if rc == 0:
mountpoint.status = Status(orKind: StatusKind.success)
else:
mountpoint.status = Status(orKind: StatusKind.Failure)
mountpoint.status.failure.msg = osErrorMsg(osLastError())
discard publish(turn, ds, mountpoint)
do:
discard umount(target)
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
discard spawnMountActor(turn, ds)
runActor("mount_actor") do (turn: var Turn; root: Cap):
let
targetPat = ?Observe(pattern: !Mountpoint) ?? { 1: grabLit() }
sourcePat = ?Observe(pattern: !Mountpoint) ?? { 0: grabLit(), 2: grabLit() }
connectStdio(turn, root)
during(turn, root, ?:BootArgs) do (ds: Cap):
during(turn, ds, targetPat) do (target: string):
during(turn, ds, sourcePat) do (source: string, fsType: string):
var mountpoint = Mountpoint(
source: source,
target: target,
`type`: fsType,
)
var rc = mount(source, target, fsType, 0, nil)
if rc == 0:
mountpoint.status = Status(orKind: StatusKind.success)
else:
mountpoint.status = Status(orKind: StatusKind.Failure)
mountpoint.status.failure.msg = osErrorMsg(osLastError())
discard publish(turn, ds, mountpoint)
do:
discard umount(target)

View File

@ -1,20 +1,20 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import std/[sequtils, os, strutils]
import std/[asyncdispatch, sequtils, os]
import preserves, syndicate, syndicate/relays
runActor("msg") do (turn: Turn):
proc main =
let
route = envRoute()
data = map(commandLineParams(), parsePreserves)
cmd = paramStr(0).extractFilename.normalize
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
case cmd
of "assert":
for e in data:
publish(turn, ds, e)
else: # "msg"
discard bootDataspace("msg") do (turn: var Turn; root: Cap):
spawnRelays(turn, root)
resolve(turn, root, route) do (turn: var Turn; ds: Cap):
for e in data:
message(turn, ds, e)
sync(turn, ds) do (turn: Turn):
stopActor(turn)
for _ in 1..2: poll()
main()

167
src/net_mapper.nim Normal file
View File

@ -0,0 +1,167 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
## A ping utility for Syndicate.
import std/[asyncdispatch, asyncnet, monotimes, nativesockets, net, os, strutils, tables, times]
import preserves
import syndicate, syndicate/relays
import ./schema/net_mapper
#[
var
SOL_IP {.importc, nodecl, header: "<sys/socket.h>".}: int
IP_TTL {.importc, nodecl, header: "<netinet/in.h>".}: int
]#
proc toPreservesHook(address: IpAddress): Value = toPreserves($address)
proc fromPreservesHook(address: var IpAddress; pr: Value): bool =
try:
if pr.isString:
address = parseIpAddress(pr.string)
result = true
except ValueError: discard
when isMainModule:
# verify that the hook catches
var ip: IpAddress
assert fromPreservesHook(ip, toPreservesHook(ip))
type
IcmpHeader {.packed.} = object
`type`: uint8
code: uint8
checksum: uint16
IcmpEchoFields {.packed.} = object
header: IcmpHeader
identifier: array[2, byte]
sequenceNumber: uint16
IcmpEcho {.union.} = object
fields: IcmpEchoFields
buffer: array[8, uint8]
IcmpTypes = enum
icmpEchoReply = 0,
icmpEcho = 8,
proc initIcmpEcho(): IcmpEcho =
result.fields.header.`type` = uint8 icmpEcho
# doAssert urandom(result.fields.identifier) # Linux does this?
proc updateChecksum(msg: var IcmpEcho) =
var sum: uint32
msg.fields.header.checksum = 0
for n in cast[array[4, uint16]](msg.buffer): sum = sum + uint32(n)
while (sum and 0xffff0000'u32) != 0:
sum = (sum and 0xffff) + (sum shr 16)
msg.fields.header.checksum = not uint16(sum)
proc match(a, b: IcmpEchoFields): bool =
({a.header.type, b.header.type} == {uint8 icmpEcho, uint8 icmpEchoReply}) and
(a.header.code == b.header.code) and
(a.sequenceNumber == b.sequenceNumber)
type
Pinger = ref object
facet: Facet
ds: Cap
rtt: RoundTripTime
rttHandle: Handle
sum: Duration
count: int64
msg: IcmpEcho
socket: AsyncSocket
sad: Sockaddr_storage
sadLen: SockLen
interval: Duration
proc newPinger(address: IpAddress; facet: Facet; ds: Cap): Pinger =
result = Pinger(
facet: facet,
ds: ds,
rtt: RoundTripTime(address: $address),
msg: initIcmpEcho(),
socket: newAsyncSocket(AF_INET, SOCK_DGRAM, IPPROTO_ICMP, false, true),
interval: initDuration(milliseconds = 500))
toSockAddr(address, Port 0, result.sad, result.sadLen)
# setSockOptInt(getFd socket, SOL_IP, IP_TTL, _)
proc close(ping: Pinger) = close(ping.socket)
proc sqr(dur: Duration): Duration =
let us = dur.inMicroseconds
initDuration(microseconds = us * us)
proc update(ping: Pinger; dur: Duration) {.inline.} =
let secs = dur.inMicroseconds.float / 1_000_000.0
if ping.count == 0: (ping.rtt.minimum, ping.rtt.maximum) = (secs, secs)
elif secs < ping.rtt.minimum: ping.rtt.minimum = secs
elif secs > ping.rtt.maximum: ping.rtt.maximum = secs
ping.sum = ping.sum + dur
inc ping.count
ping.rtt.average = inMicroseconds(ping.sum div ping.count).float / 1_000_000.0
proc exchangeEcho(ping: Pinger) {.async.} =
inc ping.msg.fields.sequenceNumber
# updateChecksum(ping.msg) # Linux does this?
let
a = getMonoTime()
r = sendto(ping.socket.getFd,
unsafeAddr ping.msg.buffer[0], ping.msg.buffer.len, 0,
cast[ptr SockAddr](unsafeAddr ping.sad), # neckbeard loser API
ping.sadLen)
if r == -1'i32:
let osError = osLastError()
raiseOSError(osError)
while true:
var
(data, address, _) = await recvFrom(ping.socket, 128)
b = getMonoTime()
if address != $ping.rtt.address:
stderr.writeLine "want ICMP from ", ping.rtt.address, " but received from ", address, " instead"
elif data.len >= ping.msg.buffer.len:
let
period = b - a
resp = cast[ptr IcmpEcho](unsafeAddr data[0])
if match(ping.msg.fields, resp.fields):
update(ping, period)
return
else:
stderr.writeLine "ICMP mismatch"
else:
stderr.writeLine "reply data has a bad length ", data.len
proc kick(ping: Pinger) {.gcsafe.} =
if not ping.socket.isClosed:
addTimer(ping.interval.inMilliseconds.int, oneshot = true) do (fd: AsyncFD) -> bool:
let fut = exchangeEcho(ping)
fut.addCallback do ():
if fut.failed and ping.rttHandle != Handle(0):
ping.facet.run do (turn: var Turn):
retract(turn, ping.rttHandle)
reset ping.rttHandle
else:
ping.facet.run do (turn: var Turn):
replace(turn, ping.ds, ping.rttHandle, ping.rtt)
if ping.interval < initDuration(seconds = 20):
ping.interval = ping.interval * 2
kick(ping)
type Args {.preservesDictionary.} = object
dataspace: Cap
runActor("net_mapper") do (root: Cap; turn: var Turn):
connectStdio(turn, root)
let rttObserver = ?Observe(pattern: !RoundTripTime) ?? {0: grabLit()}
during(turn, root, ?:Args) do (ds: Cap):
during(turn, ds, rttObserver) do (address: IpAddress):
var ping: Pinger
if address.family == IpAddressFamily.IPv4:
ping = newPinger(address, turn.facet, ds)
kick(ping)
do:
if not ping.isNil: close(ping)

View File

@ -1,11 +0,0 @@
const
ESC* = "\x1b"
InitializePrinter* = ESC & "@"
CancelLine* = ESC & "\x18"
SelectBoldFont* = ESC & "E"
CancelBoldFont* = ESC & "F"
SelectItalicFont* = ESC & "4"
CanceItalicFont* = ESC & "5"
SelectSuperScript* = ESC & "S0"
SelectSubScript* = ESC & "S1"
CancelAltScript* = ESC & "T"

View File

@ -3,29 +3,40 @@
## See the rofi-script(5) manpage for documentation.
import std/[cmdline, envvars, strutils, tables]
import std/[asyncdispatch, cmdline, envvars, strutils, tables]
import preserves, syndicate, syndicate/relays
import ./schema/rofi
if getEnv("ROFI_OUTSIDE") == "":
quit("run this program in rofi")
proc main =
let
route = envRoute()
rofiPid = getEnv("ROFI_OUTSIDE")
if rofiPid == "":
quit("run this program in rofi")
runActor("rofi_script_actor") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
case paramCount()
of 0:
let pat = ?:Options
onPublish(turn, ds, pat) do (options: seq[string]):
stdout.writeLine options.join("\n")
quit()
runActor("rofi_script_actor") do (turn: var Turn; root: Cap):
let rootFacet = turn.facet
resolve(turn, root, route) do (turn: var Turn; ds: Cap):
case paramCount()
of 0:
let pat = ?:Options
onPublish(turn, ds, pat) do (options: seq[string]):
stdout.writeLine options.join("\n")
quit()
of 1:
var select = Select(option: commandLineParams()[0])
for (key, val) in envPairs():
if key.startsWith "ROFI_":
select.environment[Symbol key] = val
message(turn, ds, select)
sync(turn, ds, stopActor)
of 1:
var select = Select(option: commandLineParams()[0])
for (key, val) in envPairs():
if key.startsWith "ROFI_":
select.environment[Symbol key] = val
message(turn, ds, select)
# TODO: sync not implemented correctly
# sync(turn, ds, stopActor)
callSoon do ():
waitFor sleepAsync(1)
quit()
else:
quit("rofi passed an unexpected number of arguments")
else:
quit("rofi passed an unexpected number of arguments")
main()

View File

@ -1,2 +1,2 @@
include_rules
: foreach ../../*.prs |> !preserves-schema-nim |> %B.nim | $(PROJECT_DIR)/<schema>
: foreach ../../*.prs |> !preserves_schema_nim |> %B.nim | ../<schema>

View File

@ -1,19 +0,0 @@
import
preserves
type
Base64File* {.preservesRecord: "base64-file".} = object
`txt`*: string
`path`*: string
`size`*: BiggestInt
Base64Text* {.preservesRecord: "base64".} = object
`txt`*: string
`bin`*: seq[byte]
proc `$`*(x: Base64File | Base64Text): string =
`$`(toPreserves(x))
proc encode*(x: Base64File | Base64Text): seq[byte] =
encode(toPreserves(x))

View File

@ -1,10 +1,14 @@
import
preserves
preserves, std/tables
type
PulseStep* {.preservesRecord: "pulse".} = object
`detail`*: PulseDetail
WebsocketArgumentsField0* {.preservesDictionary.} = object
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
`url`*: string
WebsocketArguments* {.preservesRecord: "websocket".} = object
`field0`*: WebsocketArgumentsField0
JsonTranslatorArgumentsField0* {.preservesDictionary.} = object
`argv`*: seq[string]
@ -13,34 +17,15 @@ type
JsonTranslatorArguments* {.preservesRecord: "json-stdio-translator".} = object
`field0`*: JsonTranslatorArgumentsField0
SocketAddressKind* {.pure.} = enum
`TcpAddress`, `UnixAddress`
`SocketAddress`* {.preservesOr.} = object
case orKind*: SocketAddressKind
of SocketAddressKind.`TcpAddress`:
`tcpaddress`*: TcpAddress
JsonTranslatorConnected* {.preservesRecord: "connected".} = object
`path`*: string
of SocketAddressKind.`UnixAddress`:
`unixaddress`*: UnixAddress
PulseDetail* {.preservesDictionary.} = object
`dither`*: float
`interval`*: float
`period`*: float
`target`* {.preservesEmbedded.}: EmbeddedRef
Base64DecoderArgumentsField0* {.preservesDictionary.} = object
JsonSocketTranslatorArgumentsField0* {.preservesDictionary.} = object
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
`socket`*: string
Base64DecoderArguments* {.preservesRecord: "base64-decoder".} = object
`field0`*: Base64DecoderArgumentsField0
SqliteStepField0* {.preservesDictionary.} = object
`database`*: string
SqliteStep* {.preservesRecord: "sqlite".} = object
`field0`*: SqliteStepField0
JsonSocketTranslatorArguments* {.preservesRecord: "json-socket-translator".} = object
`field0`*: JsonSocketTranslatorArgumentsField0
XsltArgumentsField0* {.preservesDictionary.} = object
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
@ -48,17 +33,12 @@ type
XsltArguments* {.preservesRecord: "xslt".} = object
`field0`*: XsltArgumentsField0
HttpClientStepDetail* {.preservesDictionary.} = object
`response-content-type-override`*: string
WebhooksArgumentsField0* {.preservesDictionary.} = object
`endpoints`*: Table[seq[string], EmbeddedRef]
`listen`*: Tcp
FileSystemDetail* {.preservesDictionary.} = object
`root`*: string
JsonSocketTranslatorStepField0* {.preservesDictionary.} = object
`socket`*: SocketAddress
JsonSocketTranslatorStep* {.preservesRecord: "json-socket-translator".} = object
`field0`*: JsonSocketTranslatorStepField0
WebhooksArguments* {.preservesRecord: "webhooks".} = object
`field0`*: WebhooksArgumentsField0
FileSystemUsageArgumentsField0* {.preservesDictionary.} = object
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
@ -66,23 +46,12 @@ type
FileSystemUsageArguments* {.preservesRecord: "file-system-usage".} = object
`field0`*: FileSystemUsageArgumentsField0
HttpClientStep* {.preservesRecord: "http-client".} = object
`detail`*: HttpClientStepDetail
SqliteArgumentsField0* {.preservesDictionary.} = object
`database`*: string
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
HttpDriverStepField0* {.preservesDictionary.} = object
HttpDriverStep* {.preservesRecord: "http-driver".} = object
`field0`*: HttpDriverStepField0
PostgreStepField0* {.preservesDictionary.} = object
`connection`*: seq[PostgreConnectionParameter]
PostgreStep* {.preservesRecord: "postgre".} = object
`field0`*: PostgreStepField0
TcpAddress* {.preservesRecord: "tcp".} = object
`host`*: string
`port`*: BiggestInt
SqliteArguments* {.preservesRecord: "sqlite".} = object
`field0`*: SqliteArgumentsField0
CacheArgumentsField0* {.preservesDictionary.} = object
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
@ -101,6 +70,13 @@ type
`key`*: string
`val`*: string
PostgreArgumentsField0* {.preservesDictionary.} = object
`connection`*: seq[PostgreConnectionParameter]
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
PostgreArguments* {.preservesRecord: "postgre".} = object
`field0`*: PostgreArgumentsField0
PulseArgumentsField0* {.preservesDictionary.} = object
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
@ -111,58 +87,32 @@ type
`host`*: string
`port`*: BiggestInt
FileSystemStep* {.preservesRecord: "file-system".} = object
`detail`*: FileSystemDetail
UnixAddress* {.preservesRecord: "unix".} = object
`path`*: string
PrinterStepField0* {.preservesDictionary.} = object
PrinterStep* {.preservesRecord: "printer".} = object
`field0`*: PrinterStepField0
proc `$`*(x: PulseStep | JsonTranslatorArguments | SocketAddress | PulseDetail |
Base64DecoderArguments |
SqliteStep |
proc `$`*(x: WebsocketArguments | JsonTranslatorArguments |
JsonTranslatorConnected |
JsonSocketTranslatorArguments |
XsltArguments |
HttpClientStepDetail |
FileSystemDetail |
JsonSocketTranslatorStep |
WebhooksArguments |
FileSystemUsageArguments |
HttpClientStep |
HttpDriverStep |
PostgreStep |
TcpAddress |
SqliteArguments |
CacheArguments |
XmlTranslatorArguments |
PostgreConnectionParameter |
PostgreArguments |
PulseArguments |
Tcp |
FileSystemStep |
UnixAddress |
PrinterStep): string =
Tcp): string =
`$`(toPreserves(x))
proc encode*(x: PulseStep | JsonTranslatorArguments | SocketAddress |
PulseDetail |
Base64DecoderArguments |
SqliteStep |
proc encode*(x: WebsocketArguments | JsonTranslatorArguments |
JsonTranslatorConnected |
JsonSocketTranslatorArguments |
XsltArguments |
HttpClientStepDetail |
FileSystemDetail |
JsonSocketTranslatorStep |
WebhooksArguments |
FileSystemUsageArguments |
HttpClientStep |
HttpDriverStep |
PostgreStep |
TcpAddress |
SqliteArguments |
CacheArguments |
XmlTranslatorArguments |
PostgreConnectionParameter |
PostgreArguments |
PulseArguments |
Tcp |
FileSystemStep |
UnixAddress |
PrinterStep): seq[byte] =
Tcp): seq[byte] =
encode(toPreserves(x))

View File

@ -1,23 +0,0 @@
import
preserves
type
Read* {.preservesRecord: "read".} = object
`path`*: string
`offset`*: BiggestInt
`count`*: BiggestInt
`sink`* {.preservesEmbedded.}: EmbeddedRef
Write* {.preservesRecord: "Write".} = object
`path`*: string
`offset`*: BiggestInt
`count`*: BiggestInt
`data`*: seq[byte]
`written`* {.preservesEmbedded.}: EmbeddedRef
proc `$`*(x: Read | Write): string =
`$`(toPreserves(x))
proc encode*(x: Read | Write): seq[byte] =
encode(toPreserves(x))

View File

@ -4,15 +4,11 @@ import
type
Query* {.preservesRecord: "query".} = object
`statement`*: seq[Value]
`statement`*: string
`target`* {.preservesEmbedded.}: Value
SqlError* {.preservesRecord: "sql-error".} = object
`msg`*: string
`context`*: string
proc `$`*(x: Query | SqlError): string =
proc `$`*(x: Query): string =
`$`(toPreserves(x))
proc encode*(x: Query | SqlError): seq[byte] =
proc encode*(x: Query): seq[byte] =
encode(toPreserves(x))

View File

@ -1,156 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import
pkg/preserves,
pkg/syndicate, pkg/syndicate/protocols/[gatekeeper, sturdy],
./schema/[config, sql]
# Avoid Sqlite3 from the standard library because it is
# only held together by wishful thinking and dlload.
{.passC: staticExec"$PKG_CONFIG --cflags sqlite3".}
{.passL: staticExec"$PKG_CONFIG --libs sqlite3".}
{.pragma: sqlite3h, header: "sqlite3.h".}
var
SQLITE_VERSION_NUMBER {.importc, sqlite3h.}: cint
SQLITE_OK {.importc, sqlite3h.}: cint
SQLITE_ROW {.importc, sqlite3h.}: cint
SQLITE_DONE {.importc, sqlite3h.}: cint
SQLITE_OPEN_READONLY {.importc, sqlite3h.}: cint
const
SQLITE_INTEGER = 1
SQLITE_FLOAT = 2
SQLITE_TEXT = 3
SQLITE_BLOB = 4
# SQLITE_NULL = 5
type
Sqlite3 {.importc: "sqlite3", sqlite3h.} = distinct pointer
Stmt {.importc: "sqlite3_stmt", sqlite3h.} = distinct pointer
{.pragma: importSqlite3, importc: "sqlite3_$1", sqlite3h.}
proc libversion_number: cint {.importSqlite3.}
proc open_v2(filename: cstring; ppDb: ptr Sqlite3; flags: cint; zVfs: cstring): cint {.importSqlite3.}
proc close(ds: Sqlite3): int32 {.discardable, importSqlite3.}
proc errmsg(db: Sqlite3): cstring {.importSqlite3.}
proc prepare_v2(db: Sqlite3; zSql: cstring, nByte: cint; ppStmt: ptr Stmt; pzTail: ptr cstring): cint {.importSqlite3.}
proc step(para1: Stmt): cint {.importSqlite3.}
proc column_count(stmt: Stmt): int32 {.importSqlite3.}
proc column_blob(stmt: Stmt; col: cint): pointer {.importSqlite3.}
proc column_bytes(stmt: Stmt; col: cint): cint {.importSqlite3.}
proc column_double(stmt: Stmt; col: cint): float64 {.importSqlite3.}
proc column_int64(stmt: Stmt; col: cint): int64 {.importSqlite3.}
proc column_text(stmt: Stmt; col: cint): cstring {.importSqlite3.}
proc column_type(stmt: Stmt; col: cint): cint {.importSqlite3.}
proc finalize(stmt: Stmt): cint {.importSqlite3.}
doAssert libversion_number() == SQLITE_VERSION_NUMBER
proc assertError(facet: Facet; cap: Cap; db: Sqlite3; context: string) =
run(facet) do (turn: Turn):
publish(turn, cap, SqlError(
msg: $errmsg(db),
context: context,
))
proc assertError(facet: Facet; cap: Cap; msg, context: string) =
run(facet) do (turn: Turn):
publish(turn, cap, SqlError(
msg: msg,
context: context,
))
proc extractValue(stmt: Stmt; col: cint): Value =
case column_type(stmt, col)
of SQLITE_INTEGER:
result = toPreserves(column_int64(stmt, col))
of SQLITE_FLOAT:
result = toPreserves(column_double(stmt, col))
of SQLITE_TEXT:
result = Value(kind: pkString, string: newString(column_bytes(stmt, col)))
if result.string.len > 0:
copyMem(addr result.string[0], column_text(stmt, col), result.string.len)
of SQLITE_BLOB:
result = Value(kind: pkByteString, bytes: newSeq[byte](column_bytes(stmt, col)))
if result.bytes.len > 0:
copyMem(addr result.bytes[0], column_blob(stmt, col), result.bytes.len)
else:
result = initRecord("null")
proc extractTuple(stmt: Stmt; arity: cint): Value =
result = initSequence(arity)
for col in 0..<arity: result[col] = extractValue(stmt, col)
proc renderSql(tokens: openarray[Value]): string =
for token in tokens:
if result.len > 0: result.add ' '
case token.kind
of pkSymbol:
result.add token.symbol.string
of pkString:
result.add '\''
result.add token.string
result.add '\''
of pkFloat, pkRegister, pkBigInt:
result.add $token
of pkBoolean:
if token.bool: result.add '1'
else: result.add '0'
else:
return ""
proc spawnSqliteActor*(turn: Turn; relay: Cap): Actor {.discardable.} =
result = spawnActor(turn, "sqlite") do (turn: Turn):
let pat = Resolve?:{ 0: SqliteStep.grabTypeFlat, 1: grab() }
during(turn, relay, pat) do (path: string, observer: Cap):
linkActor(turn, path) do (turn: Turn):
let facet = turn.facet
stderr.writeLine("opening SQLite database ", path)
var db: Sqlite3
if open_v2(path, addr db, SQLITE_OPEN_READONLY, nil) != SQLITE_OK:
discard publish(turn, observer,
Rejected(detail: toPreserves($errmsg(db))))
else:
turn.onStop do (turn: Turn):
close(db)
stderr.writeLine("closed SQLite database ", path)
let ds = turn.newDataspace()
discard publish(turn, observer,
ResolvedAccepted(responderSession: ds))
during(turn, ds, ?:Query) do (statement: seq[Value], target: Cap):
var
stmt: Stmt
text = renderSql statement
if text == "":
assertError(facet, target, "invalid statement", $statement)
elif prepare_v2(db, text, text.len.cint, addr stmt, nil) != SQLITE_OK:
assertError(facet, target, db, text)
else:
try:
let arity = column_count(stmt)
var res = step(stmt)
while res == SQLITE_ROW:
var rec = extractTuple(stmt, arity)
discard publish(turn, target, rec)
res = step(stmt)
assert res != 100
if res != SQLITE_DONE:
assertError(facet, target, db, text)
finally:
if finalize(stmt) != SQLITE_OK: assertError(facet, target, db, text)
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
spawnSqliteActor(turn, relay)

View File

@ -3,28 +3,42 @@
## Syndicate multitool.
import syndicate, syndicate/relays, syndicate/drivers/timers
import syndicate, syndicate/relays, syndicate/actors/timers
const
withPostgre* {.booldefine.}: bool = true
withSqlite* {.booldefine.}: bool = true
import ./syndesizer/[
base64_decoder,
cache_actor,
file_systems,
file_system_usage,
http_driver,
json_socket_translator,
json_translator,
pulses,
xml_translator]
webhooks,
websockets,
xml_translator,
xslt_actor]
runActor("syndesizer") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
discard spawnTimerDriver(turn, relay)
discard spawnBase64Decoder(turn, relay)
discard spawnCacheActor(turn, relay)
discard spawnFileSystemActor(turn, relay)
discard spawnFileSystemUsageActor(turn, relay)
discard spawnHttpDriver(turn, relay)
discard spawnJsonSocketTranslator(turn, relay)
discard spawnJsonStdioTranslator(turn, relay)
discard spawnPulseActor(turn, relay)
discard spawnXmlTranslator(turn, relay)
when withPostgre:
import ./syndesizer/postgre_actor
when withSqlite:
import ./syndesizer/sqlite_actor
runActor("syndesizer") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
discard spawnTimers(turn, root)
discard spawnCacheActor(turn, root)
discard spawnFileSystemUsageActor(turn, root)
discard spawnJsonSocketTranslator(turn, root)
discard spawnJsonStdioTranslator(turn, root)
discard spawnPulseActor(turn, root)
discard spawnWebhookActor(turn, root)
discard spawnWebsocketActor(turn, root)
discard spawnXmlTranslator(turn, root)
discard spawnXsltActor(turn, root)
when withPostgre:
discard spawnPostgreActor(turn, root)
when withSqlite:
discard spawnSqliteActor(turn, root)

View File

@ -1,3 +1,2 @@
include_rules
: foreach *.nim |> !nim_bin |> {bin}
: foreach {bin} |> !assert_built |>
: foreach *.nim | $(SYNDICATE_PROTOCOL) ../<schema> |> !nim_check |> | ./<checks>

View File

@ -1,50 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import
std/[base64, os],
pkg/nimcrypto/blake2,
preserves, preserves/sugar, syndicate,
../schema/config,
../schema/base64 as schema
export Base64DecoderArguments
export schema
proc spawnBase64Decoder*(turn: Turn; root: Cap): Actor {.discardable.} =
spawnActor(turn, "base64-decoder") do (turn: Turn):
let tmpDir = getTempDir()
during(turn, root, ?:Base64DecoderArguments) do (ds: Cap):
let decTextPat = observePattern(!Base64Text, { @[%0]: grabLit() })
during(turn, ds, decTextPat) do (txt: string):
discard publish(turn, ds, Base64Text(
txt: txt,
bin: cast[seq[byte]](decode(txt)),
))
let encTextPat = observePattern(!Base64Text, { @[%1]: grabLit() })
during(turn, ds, encTextPat) do (bin: seq[byte]):
discard publish(turn, ds, Base64Text(
txt: encode(bin),
bin: bin,
))
let decFilePat = observePattern( !Base64File, { @[%0]: grabLit() })
during(turn, ds, decFilePat) do (txt: string):
var
bin = decode(txt)
digest = $blake2_256.digest(bin)
path = tmpDir / digest
writeFile(path, bin)
discard publish(turn, ds, Base64File(
txt: txt,
path: path,
size: bin.len,
))
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
spawnBase64Decoder(turn, ds)

View File

@ -3,8 +3,8 @@
import std/times
import preserves, syndicate,
syndicate/durings,
syndicate/drivers/timers
syndicate/[durings, relays],
syndicate/actors/timers
import ../schema/config
@ -18,9 +18,9 @@ type CacheEntity {.final.} = ref object of Entity
pattern: Pattern
lifetime: float64
method publish(cache: CacheEntity; turn: Turn; ass: AssertionRef; h: Handle) =
method publish(cache: CacheEntity; turn: var Turn; ass: AssertionRef; h: Handle) =
## Re-assert pattern captures in a sub-facet.
discard inFacet(turn) do (turn: Turn):
discard inFacet(turn) do (turn: var Turn):
# TODO: a seperate facet for every assertion, too much?
var ass = depattern(cache.pattern, ass.value.sequence)
# Build an assertion with what he have of the pattern and capture.
@ -30,12 +30,12 @@ method publish(cache: CacheEntity; turn: Turn; ass: AssertionRef; h: Handle) =
stop(turn) # end this facet
proc isObserve(pat: Pattern): bool =
pat.orKind == PatternKind.group and
pat.group.type.orKind == GroupTypeKind.rec and
pat.group.type.rec.label.isSymbol"Observe"
pat.orKind == PatternKind.DCompound and
pat.dcompound.orKind == DCompoundKind.rec and
pat.dcompound.rec.label.isSymbol"Observe"
proc spawnCacheActor*(turn: Turn; root: Cap): Actor =
spawnActor(turn, "cache_actor") do (turn: Turn):
proc spawnCacheActor*(turn: var Turn; root: Cap): Actor =
spawn("cache_actor", turn) do (turn: var Turn):
during(turn, root, ?:CacheArguments) do (ds: Cap, lifetime: float64):
onPublish(turn, ds, ?:Observe) do (pat: Pattern, obs: Cap):
var cache: CacheEntity
@ -51,8 +51,7 @@ proc spawnCacheActor*(turn: Turn; root: Cap): Actor =
discard observe(turn, ds, pat, cache)
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
discard spawnTimerDriver(turn, ds)
discard spawnCacheActor(turn, ds)
runActor("cache_actor") do (turn: var Turn; root: Cap):
spawnTimers(turn, root)
connectStdio(turn, root)
discard spawnCacheActor(turn, root)

View File

@ -2,15 +2,15 @@
# SPDX-License-Identifier: Unlicense
import std/[dirs, os, paths]
import preserves, preserves/sugar
import syndicate
import preserves
import syndicate, syndicate/relays
import ../schema/[assertions, config]
proc spawnFileSystemUsageActor*(turn: Turn; root: Cap): Actor {.discardable.} =
spawn("file-system-usage", turn) do (turn: Turn):
proc spawnFileSystemUsageActor*(turn: var Turn; root: Cap): Actor {.discardable.} =
spawn("file-system-usage", turn) do (turn: var Turn):
during(turn, root, ?:FileSystemUsageArguments) do (ds: Cap):
let pat = observePattern(!FileSystemUsage, { @[%0]: grab() })
var pat = ?Observe(pattern: !FileSystemUsage) ?? { 0: grab() }
during(turn, ds, pat) do (lit: Literal[string]):
var ass = FileSystemUsage(path: lit.value)
if fileExists(ass.path): ass.size = getFileSize(ass.path)
@ -22,7 +22,6 @@ proc spawnFileSystemUsageActor*(turn: Turn; root: Cap): Actor {.discardable.} =
# TODO: updates?
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
discard spawnFileSystemUsageActor(turn, ds)
runActor("main") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
discard spawnFileSystemUsageActor(turn, root)

View File

@ -1,101 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import
std/[cmdline, oserrors, posix],
pkg/sys/[files, ioqueue],
pkg/preserves,
pkg/syndicate,
pkg/syndicate/protocols/gatekeeper,
pkg/syndicate/drivers/timers
from std/os import `/`
import ../schema/[config, file_system]
from pkg/sys/handles import FD
proc echo(args: varargs[string, `$`]) {.used.} =
stderr.writeLine(args)
proc stopForOsError(turn: Turn; cap: Cap) =
message(turn, cap, initRecord("error", osLastError().osErrorMsg().toPreserves))
turn.stopFacet()
proc stopAsOkay(turn: Turn; cap: Cap) =
message(turn, cap, initRecord"ok")
turn.stopFacet()
const iounit = 0x1000
type Buffer = ref seq[byte]
proc newBuffer(n: int): Buffer =
new result
if n < 0: result[].setLen iounit
else: result[].setLen min(n, iounit)
proc read(facet: Facet; file: AsyncFile; count: BiggestInt; buf: Buffer; dst: Cap)
proc readAsync(facet: Facet; file: AsyncFile; count: BiggestInt; buf: Buffer; dst: Cap) {.asyncio.} =
# TODO: optimise
assert count != 0
let n = file.read(buf)
proc deliver(turn: Turn) {.closure.} =
case n
of -1:
turn.stopForOsError(dst)
else:
if n < buf[].len:
buf[].setLen(n)
if n > 0:
message(turn, dst, buf[])
turn.stopAsOkay(dst)
else:
message(turn, dst, buf[])
var count = count
if count != -1:
count = count - n
read(facet, file, count, buf, dst)
facet.run(deliver)
proc read(facet: Facet; file: AsyncFile; count: BiggestInt; buf: Buffer; dst: Cap) =
discard trampoline:
whelp readAsync(facet, file, count, buf, dst)
proc read(facet: Facet; file: AsyncFile; count: BiggestInt; dst: Cap) =
## Call read with a reuseable buffer.
read(facet, file, count, newBuffer(count.int), dst)
proc serve(turn: Turn; detail: FileSystemDetail; ds: Cap) =
during(turn, ds, Read.grabType) do (op: Read):
let dst = op.sink.Cap
let fd = posix.open(detail.root / op.path, O_RDONLY or O_NONBLOCK, 0)
if fd < 0:
turn.stopForOsError(dst)
else:
if op.count == 0:
discard close(fd)
message(turn, dst, initRecord"ok")
turn.facet.stop()
elif posix.lseek(fd, op.offset, SEEK_SET) < 0:
discard close(fd)
turn.stopForOsError(dst)
else:
# fd is hopefully automagically closed.
turn.facet.read(fd.FD.newAsyncFile, op.count, dst)
proc spawnFileSystemActor*(turn: Turn; relay: Cap): Actor {.discardable.} =
spawnActor(turn, "file-system") do (turn: Turn):
let resolvePat = Resolve?:{ 0: FileSystemStep.grabWithin, 1: grab() }
during(turn, relay, resolvePat) do (detail: FileSystemDetail; observer: Cap):
let ds = turn.newDataspace()
serve(turn, detail, ds)
discard publish(turn, observer, ResolvedAccepted(responderSession: ds))
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
spawnFileSystemActor(turn, relay)

View File

@ -1,53 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
## Thin wrapper over `syndicate/drivers/http_driver`.
import
pkg/taps,
pkg/preserves,
pkg/syndicate,
pkg/syndicate/drivers/http_driver,
pkg/syndicate/protocols/[gatekeeper, sturdy],
../schema/config
proc spawnHttpDriver*(turn: Turn; relay: Cap): Actor {.discardable.} =
## Create a dataspace for the driver and to the gatekeeper dance.
spawnActor(turn, "http-driver") do (turn: Turn):
let pat = Resolve?:{ 0: HttpDriverStep.matchType }
during(turn, relay, pat):
let ds = turn.newDataspace()
http_driver.spawnHttpDriver(turn, ds)
# Spawn a shared driver.
let pat = Resolve?:{ 0: HttpDriverStep.matchType, 1: grab() }
during(turn, relay, pat) do (obs: Cap):
discard publish(turn, obs, ResolvedAccepted(responderSession: ds))
# Pass the shared driver dataspace.
when isMainModule:
import syndicate/relays
when defined(solo5):
import solo5
acquireDevices([("eth0", netBasic)], netAcquireHook)
proc envRoute: Route =
var pr = parsePreserves $solo5_start_info.cmdline
if result.fromPreserves pr:
return
elif pr.isSequence:
for e in pr:
if result.fromPreserves e:
return
quit("failed to parse command line for route to Syndicate gatekeeper")
runActor("main") do (turn: Turn):
let relay = newDataspace(turn)
spawnRelays(turn, relay)
resolve(turn, relay, envRoute(), spawnHttpDriver)
else:
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
spawnHttpDriver(turn, relay)

View File

@ -1,2 +0,0 @@
define:ipv6Enabled
include:"std/assertions"

View File

@ -1,82 +1,39 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import
std/[json, options],
pkg/sys/[ioqueue, sockets],
pkg/preserves, pkg/preserves/jsonhooks,
pkg/syndicate, pkg/syndicate/protocols/[gatekeeper, sturdy],
../schema/[config, json_messages]
import std/[asyncdispatch, asyncnet, json]
from std/nativesockets import AF_UNIX, SOCK_STREAM, Protocol
import preserves, preserves/jsonhooks, syndicate, syndicate/relays
template translateSocketBody {.dirty.} =
# Template workaround for CPS and parameterized types.
var
guard = initGuard(facet)
dec = newBufferedDecoder(0)
buf = new string #TODO: get a pointer into the decoder
alive = true
proc kill(turn: Turn) =
alive = false
proc setup(turn: Turn) =
# Closure, not CPS.
onMessage(turn, ds, ?:SendJson) do (data: JsonNode):
if alive:
discard trampoline:
whelp write(socket[], $data & "\n")
else:
stderr.writeLine "dropped send of ", data
discard publish(turn, observer, ResolvedAccepted(responderSession: ds))
# Resolve the <json-socket-translator { }> step.
onStop(facet, kill)
run(facet, setup)
while alive:
# TODO: parse buffer
buf[].setLen(0x4000)
let n = read(socket[], buf)
if n < 1:
stderr.writeLine "socket read returned ", n
else:
buf[].setLen(n)
dec.feed(buf[])
var data = dec.parse()
if data.isSome:
proc send(turn: Turn) =
# Closure, not CPS.
message(turn, ds, initRecord("recv", data.get))
run(facet, send)
stderr.writeLine "close socket ", sa
close(socket[])
import ../schema/config, ../json_messages
proc translateSocket(facet: Facet; sa: TcpAddress; ds, observer: Cap) {.asyncio.} =
var
socket = new AsyncConn[Protocol.Tcp]
conn = connectTcpAsync(sa.host, Port sa.port)
socket[] = conn
translateSocketBody()
proc spawnJsonSocketTranslator*(turn: var Turn; root: Cap): Actor =
spawn("json-socket-translator", turn) do (turn: var Turn):
during(turn, root, ?:JsonSocketTranslatorArguments) do (ds: Cap, socketPath: string):
let socket = newAsyncSocket(
domain = AF_UNIX,
sockType = SOCK_STREAM,
protocol = cast[Protocol](0),
buffered = false,
)
addCallback(connectUnix(socket, socketPath), turn) do (turn: var Turn):
let a = JsonTranslatorConnected(path: socketPath)
discard publish(turn, ds, a)
proc translateSocket(facet: Facet; sa: UnixAddress; ds, observer: Cap) {.asyncio.} =
var
socket = new AsyncConn[Protocol.Unix]
conn = connectUnixAsync(sa.path)
socket[] = conn
translateSocketBody()
let socketFacet = turn.facet
proc processOutput(fut: Future[string]) {.gcsafe.} =
run(socketFacet) do (turn: var Turn):
var data = fut.read.parseJson
message(turn, ds, RecvJson(data: data))
socket.recvLine.addCallback(processOutput)
socket.recvLine.addCallback(processOutput)
proc spawnJsonSocketTranslator*(turn: Turn; relay: Cap): Actor {.discardable.} =
let pat = Resolve?:{ 0: JsonSocketTranslatorStep.grabTypeFlat, 1: grab() }
spawnActor(turn, "json-socket-translator") do (turn: Turn):
during(turn, relay, pat) do (sa: TcpAddress, observer: Cap):
linkActor(turn, "json-socket-translator") do (turn: Turn):
let ds = turn.newDataspace()
discard trampoline:
whelp translateSocket(turn.facet, sa, ds, observer)
during(turn, relay, pat) do (sa: UnixAddress, observer: Cap):
linkActor(turn, "json-socket-translator") do (turn: Turn):
let ds = turn.newDataspace()
discard trampoline:
whelp translateSocket(turn.facet, sa, ds, observer)
onMessage(turn, ds, ?:SendJson) do (data: JsonNode):
asyncCheck(turn, send(socket, $data & "\n"))
do:
close(socket)
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
spawnJsonSocketTranslator(turn, relay)
runActor("json_socket_translator") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
discard spawnJsonSocketTranslator(turn, root)

View File

@ -3,9 +3,10 @@
import std/[json, osproc]
import preserves
import syndicate
import syndicate, syndicate/relays
import ../schema/[config, json_messages]
import ../schema/config
import ../json_messages
proc runChild(params: seq[string]): string =
if params.len < 1:
@ -19,15 +20,14 @@ proc runChild(params: seq[string]): string =
if result == "":
stderr.writeLine "no ouput"
proc spawnJsonStdioTranslator*(turn: Turn; root: Cap): Actor {.discardable.} =
spawnActor(turn, "json-stdio-translator") do (turn: Turn):
proc spawnJsonStdioTranslator*(turn: var Turn; root: Cap): Actor {.discardable.} =
spawn("json-stdio-translator", turn) do (turn: var Turn):
during(turn, root, ?:JsonTranslatorArguments) do (argv: seq[string], ds: Cap):
var js = parseJson(runChild(argv))
message(turn, ds, RecvJson(data: js))
discard publish(turn, ds, RecvJson(data: js))
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
spawnJsonStdioTranslator(turn, ds)
runActor("main") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
spawnJsonStdioTranslator(turn, root)

View File

@ -1,10 +1,9 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import
pkg/preserves,
pkg/syndicate, pkg/syndicate/protocols/[gatekeeper, sturdy],
./schema/[config, sql]
import preserves, syndicate, syndicate/relays
import ../schema/[config, sql]
{.passL: "-lpq".}
@ -89,75 +88,44 @@ proc splitParams(params: StringPairs): (cstringArray, cstringArray) =
for i, _ in params: strings[i] = params[i][1]
result[1] = allocCStringArray(strings)
proc renderSql(tokens: openarray[Value]): string =
for token in tokens:
if result.len > 0: result.add ' '
case token.kind
of pkSymbol:
result.add token.symbol.string
of pkString:
result.add '\''
result.add token.string
result.add '\''
of pkFloat, pkRegister, pkBigInt:
result.add $token
of pkBoolean:
if token.bool: result.add '1'
else: result.add '0'
else:
return ""
proc spawnPostgreActor*(turn: Turn; relay: Cap): Actor {.discardable.} =
result = spawnActor(turn, "postgre") do (turn: Turn):
let pat = Resolve?:{ 0: PostgreStep.grabTypeFlat, 1: grab() }
during(turn, relay, pat) do (params: StringPairs, observer: Cap):
linkActor(turn, "postgre-conn") do (turn: Turn):
var
(keys, vals) = splitParams(params)
conn = PQconnectdbParams(keys, vals, 0)
checkPointer(conn)
let
status = PQstatus(conn)
msg = $PQerrorMessage(conn)
deallocCStringArray(keys)
deallocCStringArray(vals)
onStop(turn) do (turn: Turn):
PQfinish(conn)
if status == CONNECTION_OK:
let ds = turn.newDataspace()
discard publish(turn, ds, initRecord("status", toSymbol($status), msg.toPreserves))
during(turn, ds, ?:Query) do (statement: seq[Value], target: Cap):
var text = renderSql statement
if text == "":
discard publish(turn, ds, SqlError(msg: "invalid statement", context: $statement))
else:
var
res = PQexec(conn, text)
st = PQresultStatus(res)
if st == PGRES_TUPLES_OK or st == PGRES_SINGLE_TUPLE:
let tuples = PQntuples(res)
let fields = PQnfields(res)
if tuples > 0 and fields > 0:
for r in 0..<tuples:
var tupl = initSequence(fields)
for f in 0..<fields:
tupl[f] = toPreserves($PQgetvalue(res, r, f))
discard publish(turn, target, tupl)
else:
discard publish(turn, ds, SqlError(
msg: $PQresStatus(st),
context: $PQresultErrorMessage(res),
))
PQclear(res)
discard publish(turn, observer,
ResolvedAccepted(responderSession: ds))
else:
discard publish(turn, observer,
Rejected(detail: msg.toPreserves))
proc spawnPostgreActor*(turn: var Turn; root: Cap): Actor {.discardable.} =
spawn("postgre", turn) do (turn: var Turn):
during(turn, root, ?:PostgreArguments) do (params: StringPairs, ds: Cap):
var
conn: PGconn
statusHandle: Handle
(keys, vals) = splitParams(params)
conn = PQconnectdbParams(keys, vals, 0)
checkPointer(conn)
let
status = PQstatus(conn)
msg = $PQerrorMessage(conn)
statusHandle = publish(turn, ds,
initRecord("status", toSymbol($status), msg.toPreserves))
if status == CONNECTION_OK:
during(turn, ds, ?:Query) do (statement: string, target: Cap):
var res = PQexec(conn, statement)
var st = PQresultStatus(res)
discard publish(turn, ds, toRecord(
"error", statement, toSymbol($PQresStatus(st)), $PQresultErrorMessage(res)))
if st == PGRES_TUPLES_OK or st == PGRES_SINGLE_TUPLE:
let tuples = PQntuples(res)
let fields = PQnfields(res)
if tuples > 0 and fields > 0:
for r in 0..<tuples:
var tupl = initSequence(fields)
for f in 0..<fields:
tupl[f] = toPreserves($PQgetvalue(res, r, f))
discard publish(turn, target, tupl)
PQclear(res)
else:
stderr.writeLine "refusing to do anything when status is ", status
do:
deallocCStringArray(keys)
deallocCStringArray(vals)
PQfinish(conn)
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
spawnPostgreActor(turn, relay)
runActor("main") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
spawnPostgreActor(turn, root)

View File

@ -1,143 +1,106 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import
std/[options, random, tables, times],
pkg/preserves, preserves/sugar,
pkg/syndicate,
pkg/syndicate/protocols/gatekeeper,
pkg/syndicate/drivers/timers
import std/[options, tables, times]
import preserves, syndicate,
syndicate/relays,
syndicate/actors/timers
import ../schema/[assertions, config]
type
ProxyEntity {.final.} = ref object of Entity
## An entity that asserts and retracts observers on a pulse.
self, target: Cap
hold: Table[Handle, Forward]
isActive: bool
Forward = tuple
ass: Value
hand: Handle
type PulseEntity {.final.} = ref object of Entity
## An entity that asserts and retracts observers on a pulse.
self, timers: Cap
target: Entity
period: float
timerHandle: Handle
observers: Table[Handle, AssertionRef]
observePattern: Pattern
observing: bool
proc flipOn(proxy: ProxyEntity; turn: Turn) =
assert proxy.isActive == false
proxy.isActive = true
for fwd in proxy.hold.mvalues:
assert fwd.hand == 0.Handle
fwd.hand = publish(turn, proxy.target, fwd.ass)
proc flipOff(proxy: ProxyEntity; turn: Turn) =
if proxy.isActive:
proxy.isActive = false
for fwd in proxy.hold.mvalues:
assert fwd.hand != 0.Handle
retract(turn, fwd.hand)
fwd.hand = 0.Handle
method publish(proxy: ProxyEntity; turn: Turn; ass: AssertionRef; h: Handle) =
var fwd: Forward
fwd.ass = ass.value
if proxy.isActive:
fwd.hand = publish(turn, proxy.target, fwd.ass)
proxy.hold[h] = fwd
method retract(proxy: ProxyEntity; turn: Turn; h: Handle) =
var fwd: Forward
if proxy.hold.pop(h, fwd):
if fwd.hand > 0:
retract(turn, fwd.hand)
method message(proxy: ProxyEntity; turn: Turn; v: AssertionRef) =
## Messages passthru.
message(turn, proxy.target, v.value)
method sync(proxy: ProxyEntity; turn: Turn; peer: Cap) =
## Sync passthru.
sync(turn, proxy.target, peer)
type
PulseEntity {.final.} = ref object of Entity
self, driver: Cap
proxy: ProxyEntity
detail: PulseDetail
timerHandle: Handle
proc scheduleFlipOn(pulse: PulseEntity; turn: Turn) =
var period: float
while period <= 0.0:
period = gauss(mu = pulse.detail.interval, sigma = pulse.detail.dither)
replace(turn, pulse.driver, pulse.timerHandle, SetTimer(
label: true.toPreserves,
seconds: period,
kind: TimerKind.relative,
peer: pulse.self.embed,
proc schedule(turn: var Turn; pulse: PulseEntity) =
## Schedule the next pulse.
## The next pulse will be schedule using the current time as
## reference point and not the moment of the previous pulse.
let then = getTime().toUnixFloat()+pulse.period
pulse.timerHandle = publish(turn, pulse.timers, Observe(
pattern: LaterThan ?: { 0: ?then },
observer: pulse.self,
))
proc scheduleFlipOff(pulse: PulseEntity; turn: Turn) =
replace(turn, pulse.driver, pulse.timerHandle, SetTimer(
label: false.toPreserves,
seconds: pulse.detail.period,
kind: TimerKind.relative,
peer: pulse.self.embed,
))
method publish(pulse: PulseEntity; turn: var Turn; ass: AssertionRef; h: Handle) =
## Publish observers in reponse to <later-than …> assertions.
pulse.timers.target.retract(turn, pulse.timerHandle)
schedule(turn, pulse)
pulse.observing = true
for h, a in pulse.observers.pairs:
pulse.target.publish(turn, a, h)
pulse.target.sync(turn, pulse.self)
method message(pulse: PulseEntity; turn: Turn; v: AssertionRef) =
var exp: TimerExpired
if exp.fromPreserves(v.value):
if exp.label.isFalse:
pulse.scheduleFlipOn(turn)
pulse.proxy.flipOff(turn)
else:
pulse.scheduleFlipOff(turn)
pulse.proxy.flipOn(turn)
method message(pulse: PulseEntity; turn: var Turn; v: AssertionRef) =
## Retract observers in response to a sync message.
pulse.observing = false
for h in pulse.observers.keys:
pulse.target.retract(turn, h)
proc stop(pulse: PulseEntity, turn: Turn) =
if pulse.proxy.isActive:
pulse.proxy.flipOff(turn)
retract(turn, pulse.timerHandle)
# TODO: is this automatic?
type ProxyEntity {.final.} = ref object of Entity
## A proxy `Entity` that diverts observers to a `PulseEntity`.
pulse: PulseEntity
proc newPulseEntity(turn: Turn; detail: PulseDetail; timerDriver: Cap): PulseEntity =
if not (detail.target of Cap):
raise newException(ValueError, "pulse target is not an embedded Cap")
result = PulseEntity(
facet: turn.facet,
driver: timerDriver,
detail: detail,
proxy: ProxyEntity(
facet: turn.facet,
target: detail.target.Cap,
)
method publish(proxy: ProxyEntity; turn: var Turn; ass: AssertionRef; h: Handle) =
## Proxy assertions that are not observations.
if proxy.pulse.observePattern.matches ass.value:
if proxy.pulse.observers.len == 0:
schedule(turn, proxy.pulse)
proxy.pulse.observers[h] = ass
else:
proxy.pulse.target.publish(turn, ass, h)
method retract(proxy: ProxyEntity; turn: var Turn; h: Handle) =
## Retract proxied assertions.
var obs: AssertionRef
if proxy.pulse.observers.pop(h, obs):
if proxy.pulse.observing:
proxy.pulse.target.retract(turn, h)
if proxy.pulse.observers.len == 0:
proxy.pulse.timers.target.retract(turn, proxy.pulse.timerHandle)
else:
proxy.pulse.target.retract(turn, h)
method message(proxy: ProxyEntity; turn: var Turn; v: AssertionRef) =
## Proxy mesages.
proxy.pulse.target.message(turn, v)
method sync(proxy: ProxyEntity; turn: var Turn; peer: Cap) =
## Proxy sync.
proxy.pulse.target.sync(turn, peer)
proc newProxyEntity(turn: var Turn; timers, ds: Cap; period: float): ProxyEntity =
new result
result.pulse = PulseEntity(
target: ds.target,
timers: timers,
observePattern: ?:Observe,
period: period,
)
result.proxy.self = newCap(turn, result.proxy)
result.self = newCap(turn, result)
result.pulse.self = newCap(turn, result.pulse)
proc spawnPulseActor*(turn: Turn; relay: Cap): Actor {.discardable.} =
spawnActor(turn, "pulse") do (turn: Turn):
let timerDriver = turn.newDataspace()
spawnTimerDriver(turn, timerDriver)
let resolvePat = Resolve?:{ 0: PulseStep.grabWithin, 1: grab() }
during(turn, relay, resolvePat) do (detail: PulseDetail; observer: Cap):
var pulse: PulseEntity
if detail.period < 0.000_0001 or
detail.interval < detail.period or
detail.interval < detail.dither:
var r = Resolved(orKind: ResolvedKind.Rejected)
r.rejected.detail = "invalid pulse parameters".toPreserves
discard publish(turn, observer, r)
else:
randomize()
pulse = turn.newPulseEntity(detail, timerDriver)
discard publish(turn, observer, ResolvedAccepted(
responderSession: pulse.proxy.self))
pulse.scheduleFlipOn(turn)
do:
if not pulse.isNil:
pulse.stop(turn)
proc spawnPulseActor*(turn: var Turn; root: Cap): Actor =
## Spawn an actor that retracts and re-asserts observers on
## a timed pulse. Requires a timer service on the `root` capability.
spawn("pulse", turn) do (turn: var Turn):
let grabPeriod = ?Observe(pattern: !Pulse) ?? { 0: grab() }
during(turn, root, ?:PulseArguments) do (ds: Cap):
during(turn, ds, grabPeriod) do (lit: Literal[float]):
if lit.value < 0.000_1:
stderr.writeLine("pulse period is too small: ", lit.value, "s")
else:
let proxyCap = newCap(turn, newProxyEntity(turn, root, ds, lit.value))
var pulse = Pulse(periodSec: lit.value, proxy: embed proxyCap)
discard publish(turn, ds, pulse)
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
spawnPulseActor(turn, relay)
runActor("main") do (turn: var Turn; root: Cap):
spawnTimers(turn, root)
connectStdio(turn, root)
discard spawnPulseActor(turn, root)

View File

@ -0,0 +1,113 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import preserves, syndicate, syndicate/relays
import ../schema/[config, sql]
# Avoid Sqlite3 from the standard library because it is
# only held together by wishful thinking and dlload.
{.passC: staticExec("pkg-config --cflags sqlite3").}
{.passL: staticExec("pkg-config --libs sqlite3").}
{.pragma: sqlite3h, header: "sqlite3.h".}
var
SQLITE_VERSION_NUMBER {.importc, sqlite3h.}: cint
SQLITE_OK {.importc, sqlite3h.}: cint
SQLITE_ROW {.importc, sqlite3h.}: cint
SQLITE_DONE {.importc, sqlite3h.}: cint
SQLITE_OPEN_READONLY {.importc, sqlite3h.}: cint
const
SQLITE_INTEGER = 1
SQLITE_FLOAT = 2
SQLITE_TEXT = 3
SQLITE_BLOB = 4
# SQLITE_NULL = 5
type
Sqlite3 {.importc: "sqlite3", sqlite3h.} = distinct pointer
Stmt {.importc: "sqlite3_stmt", sqlite3h.} = distinct pointer
{.pragma: importSqlite3, importc: "sqlite3_$1", sqlite3h.}
proc libversion_number: cint {.importSqlite3.}
proc open_v2(filename: cstring; ppDb: ptr Sqlite3; flags: cint; zVfs: cstring): cint {.importSqlite3.}
proc close(ds: Sqlite3): int32 {.discardable, importSqlite3.}
proc errmsg(db: Sqlite3): cstring {.importSqlite3.}
proc prepare_v2(db: Sqlite3; zSql: cstring, nByte: cint; ppStmt: ptr Stmt; pzTail: ptr cstring): cint {.importSqlite3.}
proc step(para1: Stmt): cint {.importSqlite3.}
proc column_count(stmt: Stmt): int32 {.importSqlite3.}
proc column_blob(stmt: Stmt; col: cint): pointer {.importSqlite3.}
proc column_bytes(stmt: Stmt; col: cint): cint {.importSqlite3.}
proc column_double(stmt: Stmt; col: cint): float64 {.importSqlite3.}
proc column_int64(stmt: Stmt; col: cint): int64 {.importSqlite3.}
proc column_text(stmt: Stmt; col: cint): cstring {.importSqlite3.}
proc column_type(stmt: Stmt; col: cint): cint {.importSqlite3.}
proc finalize(stmt: Stmt): cint {.importSqlite3.}
doAssert libversion_number() == SQLITE_VERSION_NUMBER
proc logError(db: Sqlite3; context: string) =
writeLine(stderr, errmsg(db), ": ", context)
proc extractValue(stmt: Stmt; col: cint): Value =
case column_type(stmt, col)
of SQLITE_INTEGER:
result = toPreserves(column_int64(stmt, col))
of SQLITE_FLOAT:
result = toPreserves(column_double(stmt, col))
of SQLITE_TEXT:
result = Value(kind: pkString, string: newString(column_bytes(stmt, col)))
if result.string.len > 0:
copyMem(addr result.string[0], column_text(stmt, col), result.string.len)
of SQLITE_BLOB:
result = Value(kind: pkByteString, bytes: newSeq[byte](column_bytes(stmt, col)))
if result.bytes.len > 0:
copyMem(addr result.bytes[0], column_blob(stmt, col), result.bytes.len)
else:
result = initRecord("null")
proc extractTuple(stmt: Stmt; arity: cint): Value =
result = initSequence(arity)
for col in 0..<arity: result[col] = extractValue(stmt, col)
proc spawnSqliteActor*(turn: var Turn; root: Cap): Actor {.discardable.} =
spawn("sqlite-actor", turn) do (turn: var Turn):
during(turn, root, ?:SqliteArguments) do (path: string, ds: Cap):
stderr.writeLine("opening SQLite database ", path)
var db: Sqlite3
if open_v2(path, addr db, SQLITE_OPEN_READONLY, nil) != SQLITE_OK:
logError(db, path)
else:
during(turn, ds, ?:Query) do (statement: string, target: Cap):
var stmt: Stmt
if prepare_v2(db, statement, statement.len.cint, addr stmt, nil) != SQLITE_OK:
logError(db, statement)
else:
try:
let arity = column_count(stmt)
var res = step(stmt)
while res == SQLITE_ROW:
var rec = extractTuple(stmt, arity)
discard publish(turn, target, rec)
res = step(stmt)
assert res != 100
if res != SQLITE_DONE:
logError(db, statement)
finally:
if finalize(stmt) != SQLITE_OK: logError(db, statement)
do:
close(db)
stderr.writeLine("closed SQLite database ", path)
when isMainModule:
runActor("main") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
spawnSqliteActor(turn, root)

105
src/syndesizer/webhooks.nim Normal file
View File

@ -0,0 +1,105 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
## An actor for relaying Webhooks.
import std/[asyncdispatch, asynchttpserver, net, strutils, tables, uri]
import preserves, preserves/jsonhooks
import syndicate, syndicate/[bags, relays]
import syndicate/protocols/http
import ../schema/config
type
CapBag = Bag[Cap]
Endpoints = Table[seq[string], Cap]
func splitPath(s: string): seq[string] = s.strip(chars={'/'}).split('/')
proc toRecord(req: Request; seqnum: BiggestInt; path: seq[string]): Value =
## Convert a request value from the std/asynchttpserver module
## to a request type from syndicate/protocols/http.
var record: HttpRequest
record.sequenceNumber = seqnum
record.host = req.hostname
record.`method` = Symbol($req.reqMethod)
record.path = path
for key, val in req.headers.pairs:
record.headers[Symbol key] = val
for key, val in decodeQuery(req.url.query):
record.query[Symbol key] =
@[QueryValue(orKind: QueryValueKind.string, string: val)]
let contentType = req.headers.getOrDefault("content-type")
result = toPreserves record
if req.body.len > 0:
result[7] =
case contentType.toString
of "application/json":
req.body.parsePreserves
of "application/octet-stream":
cast[seq[byte]](req.body).toPreserves
else:
req.body.toPreserves
proc spawnWebhookActor*(turn: var Turn; root: Cap): Actor =
spawn("webhooks", turn) do (turn: var Turn):
let pat = grabRecord("webhooks", grabDictionary({ "listen": ?:config.Tcp }))
# Grab the details on listening for requests.
# Disregard endpoints so the server doesn't restart as those change.
during(turn, root, pat) do (host: string; port: Port):
let endpointsPat = grabRecord("webhooks", grabDictionary({
"listen": ?config.Tcp(host: host, port: BiggestInt port),
"endpoints": grab(),
}))
# construct a pattern for grabbing endpoints when the server is ready
var seqNum: BiggestInt
let facet = turn.facet
let endpoints = newTable[seq[string], CapBag]()
# use a bag so the same capability registered multiple
# times with the same path does not get duplicate messages
proc cb(req: Request): Future[void] =
inc(seqNum)
let path = req.url.path.splitPath
if not endpoints.hasKey path:
result = respond(req, Http404,
"no capabilities registered at $1\n" % [req.url.path])
else:
result = respond(req, Http200, "")
proc act(turn: var Turn) {.gcsafe.} =
let rec = req.toRecord(seqNum, path)
for cap in endpoints[path]:
message(turn, cap, rec)
run(facet, act)
let server = newAsyncHttpServer()
stderr.writeLine("listening for webhooks at ", host, ":", port)
if host.isIpAddress:
var ip = parseIpAddress host
case ip.family
of IPv6:
asyncCheck(turn, server.serve(port, cb, host, domain = AF_INET6))
of IPv4:
asyncCheck(turn, server.serve(port, cb, host, domain = AF_INET))
else:
asyncCheck(turn, server.serve(port, cb, host, domain = AF_INET6))
asyncCheck(turn, server.serve(port, cb, host, domain = AF_INET))
during(turn, root, endpointsPat) do (eps: Endpoints):
for path, cap in eps:
if not endpoints.hasKey path:
endpoints[path] = CapBag()
discard endpoints[path].change(cap, +1)
do:
for path, cap in eps:
discard endpoints[path].change(cap, -1)
do:
stderr.writeLine("closing for webhook server at ", host, ":", port)
close(server)
when isMainModule:
runActor("webhooks") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
discard spawnWebhookActor(turn, root)

View File

@ -0,0 +1,55 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import std/[asyncdispatch, json]
import preserves
import syndicate, syndicate/relays
import ws
import ../schema/config, ../json_messages
type WebSocket = ws.WebSocket
# not the object from the transportAddress schema
proc spawnWebsocketActor*(turn: var Turn; root: Cap): Actor =
spawn("websocket-actor", turn) do (turn: var Turn):
during(turn, root, ?:WebsocketArguments) do (ds: Cap, url: string):
let facet = turn.facet
var
ws: WebSocket
connectedHandle: Handle
newWebSocket(url).addCallback(turn) do (turn: var Turn; sock: WebSocket):
ws = sock
connectedHandle = publish(turn, ds, initRecord("connected", url.toPreserves))
var fut: Future[(Opcode, string)]
proc recvMessage() {.gcsafe.} =
fut = receivePacket ws
addCallback(fut, facet) do (turn: var Turn):
let (opcode, data) = read fut
case opcode
of Text:
message(turn, ds,
RecvJson(data: data.parseJson))
of Binary:
message(turn, ds,
initRecord("recv", cast[seq[byte]](data).toPreserves))
of Ping:
asyncCheck(turn, ws.send(data, Pong))
of Pong, Cont:
discard
of Close:
retract(turn, connectedHandle)
stderr.writeLine "closed connection with ", url
stop(turn)
return
recvMessage()
recvMessage()
onMessage(turn, ds, ?:SendJson) do (data: JsonNode):
asyncCheck(turn, ws.send($data, Text))
do:
close(ws)
when isMainModule:
runActor("main") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
discard spawnWebsocketActor(turn, root)

View File

@ -2,7 +2,7 @@
# SPDX-License-Identifier: Unlicense
import std/[options, parsexml, xmlparser, xmltree]
import preserves, preserves/sugar, preserves/xmlhooks
import preserves, preserves/xmlhooks
import syndicate
import ../schema/[assertions, config]
@ -17,18 +17,17 @@ proc translatePreserves(pr: Value): XmlTranslation {.gcsafe.} =
var xn = result.pr.preservesTo(XmlNode)
if xn.isSome: result.xml = $get(xn)
proc spawnXmlTranslator*(turn: Turn; root: Cap): Actor {.discardable.} =
spawnActor(turn, "xml-translator") do (turn: Turn):
proc spawnXmlTranslator*(turn: var Turn; root: Cap): Actor {.discardable.} =
spawn("xml-translator", turn) do (turn: var Turn):
during(turn, root, ?:XmlTranslatorArguments) do (ds: Cap):
let xmlPat = observePattern(!XmlTranslation, {@[%0]:grab()})
during(turn, ds, xmlPat) do (xs: Literal[string]):
let obsPat = ?Observe(pattern: !XmlTranslation)
during(turn, ds, obsPat ?? {0: grab()}) do (xs: Literal[string]):
publish(turn, ds, translateXml(xs.value))
let prPat = observePattern(!XmlTranslation, {@[%1]:grab()})
during(turn, ds, prPat) do (pr: Literal[Value]):
during(turn, ds, obsPat ?? {1: grab()}) do (pr: Literal[Value]):
publish(turn, ds, translatePreserves(pr.value))
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
spawnXmlTranslator(turn, ds)
runActor("main") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
spawnXmlTranslator(turn, root)

View File

@ -2,11 +2,11 @@
# SPDX-License-Identifier: Unlicense
import std/[os, strutils]
import preserves, preserves/sugar, syndicate
import ./schema/[assertions, config]
import preserves, syndicate
import ../schema/[assertions, config]
{.passC: staticExec"$PKG_CONFIG --cflags libxslt".}
{.passL: staticExec"$PKG_CONFIG --libs libxslt".}
{.passC: staticExec("pkg-config --cflags libxslt").}
{.passL: staticExec("pkg-config --libs libxslt").}
{.pragma: libxslt, header: "libxslt/xslt.h", importc.}
@ -173,11 +173,11 @@ proc toPreservesHook*(xn: xmlNodePtr): Value =
preserveSiblings(items, xn)
items[0]
proc spawnXsltActor*(turn: Turn; root: Cap): Actor {.discardable.} =
spawnActor(turn, "xslt") do (turn: Turn):
proc spawnXsltActor*(turn: var Turn; root: Cap): Actor {.discardable.} =
spawn("xslt", turn) do (turn: var Turn):
initLibXml()
during(turn, root, ?:XsltArguments) do (ds: Cap):
let sheetsPat = observePattern(!XsltTransform, {@[%0]: grab(), @[%1]: grab()})
let sheetsPat = ?Observe(pattern: !XsltTransform) ?? {0: grab(), 1: grab()}
during(turn, ds, sheetsPat) do (stylesheet: Literal[string], input: Literal[string]):
let cur = loadStylesheet(stylesheet.value)
if cur.isNil:
@ -206,6 +206,6 @@ proc spawnXsltActor*(turn: Turn; root: Cap): Actor {.discardable.} =
when isMainModule:
import syndicate/relays
runActor("main") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
spawnXsltActor(turn, ds)
runActor("main") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
spawnXsltActor(turn, root)

133
src/syndex_card.nim Normal file
View File

@ -0,0 +1,133 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
## This was all Tony's idea, except for the silly name.
import std/[asyncdispatch, os, terminal]
import preserves
import syndicate, syndicate/[durings, relays]
import illwill
proc exitProc() {.noconv.} =
illwillDeinit()
showCursor()
quit QuitSuccess
setControlCHook(exitProc)
proc parsePattern(pr: Value): Pattern =
let
dropSigil = initRecord("lit", "_".toSymbol)
grabSigil = initRecord("lit", "?".toSymbol)
var pr = grab(pr).toPreserves
apply(pr) do (pr: var Value):
if pr == dropSigil:
pr = initRecord("_")
elif pr == grabSigil:
pr = initRecord("bind", initRecord("_"))
doAssert result.fromPreserves(pr)
proc inputPattern: Pattern =
var args = commandLineParams()
if args.len != 1:
quit "expected a single pattern argument"
else:
var input = pop args
if input == "":
quit "expected Preserves Pattern on stdin"
else:
var pr: Value
try: pr = decodePreserves(input)
except ValueError: discard
try: pr = parsePreserves(input)
except ValueError: discard
if pr.isFalse:
quit "failed to parse Preserves argument"
result = parsePattern(pr)
type TermEntity {.final.} = ref object of Entity
pattern: Pattern
value: Value
method publish(te: TermEntity; turn: var Turn; v: AssertionRef; h: Handle) =
te.value = v.value
var termBuf = newTerminalBuffer(terminalWidth(), terminalHeight())
var y = 1
termBuf.write(1, y, $te.pattern, styleBright)
inc(y)
termBuf.drawHorizLine(0, termBuf.width(), y)
inc(y)
termBuf.write(0, y, $h, styleBright)
for i, e in te.value.sequence:
inc(y)
termBuf.write(1, y, $e)
termBuf.display()
method retract(te: TermEntity; turn: var Turn; h: Handle) =
var termBuf = newTerminalBuffer(terminalWidth(), terminalHeight())
var y = 1
termBuf.write(1, y, $te.pattern, styleDim)
inc y
termBuf.drawHorizLine(0, termBuf.width(), y, true)
inc(y)
termBuf.write(0, y, $h, styleBright)
if te.value.isSequence:
for i, e in te.value.sequence:
inc(y)
termBuf.write(1, y, $e)
else:
inc(y)
termBuf.write(1, y, $te.value)
termBuf.display()
type DumpEntity {.final.} = ref object of Entity
discard
method publish(dump: DumpEntity; turn: var Turn; ass: AssertionRef; h: Handle) =
stdout.writeLine($ass.value)
stdout.flushFile()
method message*(dump: DumpEntity; turn: var Turn; ass: AssertionRef) =
stdout.writeLine($ass.value)
stdout.flushFile()
proc exit {.noconv.} =
illwillDeinit()
showCursor()
quit()
setControlCHook(exit)
proc main =
let
route = envRoute()
pat = inputPattern()
if stdout.is_a_TTY:
illwillInit()
hideCursor()
discard bootDataspace("syndex_card") do (turn: var Turn; root: Cap):
resolve(turn, root, route) do (turn: var Turn; ds: Cap):
var termBuf = newTerminalBuffer(terminalWidth(), terminalHeight())
termBuf.write(1, 1, $pat, styleBright)
termBuf.drawHorizLine(1, termBuf.width(), 2)
termBuf.display()
discard observe(turn, ds, pat, TermEntity(pattern: pat))
while true:
try: poll()
except CatchableError:
illwillDeinit()
showCursor()
quit getCurrentExceptionMsg()
else:
let entity = DumpEntity()
runActor("syndex_card") do (root: Cap; turn: var Turn):
spawnRelays(turn, root)
resolve(turn, root, route) do (turn: var Turn; ds: Cap):
discard observe(turn, ds, pat, entity)
main()

View File

@ -1,64 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import std/[os, tables]
import preserves, syndicate, syndicate/[durings, relays]
proc parsePattern(pr: Value): Pattern =
let
dropSigil = initRecord("lit", "_".toSymbol)
grabSigil = initRecord("lit", "?".toSymbol)
var pr = drop(pr).toPreserves
apply(pr) do (pr: var Value):
if pr == dropSigil:
pr = initRecord("_")
elif pr == grabSigil:
pr = initRecord("bind", initRecord("_"))
doAssert result.fromPreserves(pr)
proc inputPatterns: seq[Pattern] =
var args = commandLineParams()
result.setLen(args.len)
for i, input in args:
try: result[i] = input.parsePreserves.parsePattern
except ValueError:
quit "failed to parse Preserves argument"
type DumpEntity {.final.} = ref object of Entity
assertions: Table[Handle, seq[Value]]
proc toLine(values: seq[Value]; prefix: char): string =
result = newStringOfCap(1024)
let sep = getEnv("FS", " ")
result.add(prefix)
for v in values:
add(result, sep)
add(result, $v)
add(result, '\n')
method publish(dump: DumpEntity; turn: Turn; ass: AssertionRef; h: Handle) =
var values = ass.value.sequence
stdout.write(values.toLine('+'))
stdout.flushFile()
dump.assertions[h] = values
method retract(dump: DumpEntity; turn: Turn; h: Handle) =
var values: seq[Value]
if dump.assertions.pop(h, values):
stdout.write(values.toLine('-'))
stdout.flushFile()
method message*(dump: DumpEntity; turn: Turn; ass: AssertionRef) =
stdout.write(ass.value.sequence.toLine('!'))
stdout.flushFile()
proc main =
let
patterns = inputPatterns()
entity = DumpEntity()
runActor("syndex_card") do (turn: Turn):
resolveEnvironment(turn) do (turn: Turn; peer: Cap):
for pat in patterns:
discard observe(turn, peer, pat, entity)
main()

View File

@ -1,61 +1,13 @@
# Emulate Nimble from CycloneDX data at sbom.json.
# Package
import std/json
version = "20240319"
author = "Emery Hemingway"
description = "Utilites for Syndicated Actors and Synit"
license = "unlicense"
srcDir = "src"
bin = @["mintsturdyref", "mount_actor", "msg", "net_mapper", "preserve_process_environment", "syndesizer", "syndex_card"]
proc lookupComponent(sbom: JsonNode; bomRef: string): JsonNode =
for c in sbom{"components"}.getElems.items:
if c{"bom-ref"}.getStr == bomRef:
return c
result = newJNull()
let
sbom = (getPkgDir() & "/sbom.json").readFile.parseJson
comp = sbom{"metadata", "component"}
bomRef = comp{"bom-ref"}.getStr
# Dependencies
version = comp{"version"}.getStr
author = comp{"authors"}[0]{"name"}.getStr
description = comp{"description"}.getStr
license = comp{"licenses"}[0]{"license", "id"}.getStr
for prop in comp{"properties"}.getElems.items:
let (key, val) = (prop{"name"}.getStr, prop{"value"}.getStr)
case key
of "nim:skipDirs:":
add(skipDirs, val)
of "nim:skipFiles:":
add(skipFiles, val)
of "nim:skipExt":
add(skipExt, val)
of "nim:installDirs":
add(installDirs, val)
of "nim:installFiles":
add(installFiles, val)
of "nim:installExt":
add(installExt, val)
of "nim:binDir":
add(binDir, val)
of "nim:srcDir":
add(srcDir, val)
of "nim:backend":
add(backend, val)
else:
if key.startsWith "nim:bin:":
namedBin[key[8..key.high]] = val
for depend in sbom{"dependencies"}.items:
if depend{"ref"}.getStr == bomRef:
for depRef in depend{"dependsOn"}.items:
let dep = sbom.lookupComponent(depRef.getStr)
var spec = dep{"name"}.getStr
for extRef in dep{"externalReferences"}.elems:
if extRef{"type"}.getStr == "vcs":
spec = extRef{"url"}.getStr
break
let ver = dep{"version"}.getStr
if ver != "":
if ver.allCharsInSet {'0'..'9', '.'}: spec.add " == "
else: spec.add '#'
spec.add ver
requires spec
break
requires "syndicate#b209548f5d15f7391c08fcaec3615ed843f8a410", "https://git.sr.ht/~ehmry/nim_taps#6f1252d0d17cd56fd707b831c893758ddca08755"