Compare commits

...

8 Commits

33 changed files with 713 additions and 910 deletions

183
README.md
View File

@ -75,7 +75,7 @@ let ?mpvSpace = dataspace
? <service-object <daemon syndesizer> ?cap> [
$cap <json-socket-translator {
dataspace: $mpvSpace
socket: "/run/user/1000/mpv.sock"
socket: <unix "/run/user/1000/mpv.sock">
}>
]
]
@ -123,38 +123,6 @@ let ?ds = dataspace
]
```
### PostgreSQL
Readonly access to PostgreSQL databases. Asserts rows as records in response to SQL query assertions. Dynamic updates are not implemented.
Can be disabled by passing `--define:withPostgre=no` to the Nim compiler.
```
# Configuration example
<require-service <daemon syndesizer>>
let ?sqlspace = dataspace
? <service-object <daemon syndesizer> ?cap> [
$cap <postgre {
dataspace: $sqlspace
connection: [
["host" "example.com"]
["dbname" "foobar"]
["user" "hackme"]
]
}>
]
let ?tuplespace = dataspace
$sqlspace <query "SELECT id, name FROM stuff" $tuplespace>
$tuplespace ? [?id ?name] [
$log ! <log "-" { row: <example-row $id $name> }>
]
```
### Pulse proxy
A proxy actor that passes assertions and messages to a configured capability but only asserts observations on a a periodic pulse.
@ -204,51 +172,6 @@ $tuplespace ? [?id ?name] [
]
```
### Webooks
Listens for webhook requests and sends request data to a dataspace as messages.
Request data is formated according to the http schema [defined in syndicate-protocols](https://git.syndicate-lang.org/syndicate-lang/syndicate-protocols/src/branch/main/schemas/http.prs), with the exception that messages bodies may be **bytes**, **string**, or **any** for the `content-type`s of `application/octet-stream`, `text/*`, and `application/json` respectively.
```
# Configuration example
<require-service <daemon syndesizer>>
? <service-object <daemon syndesizer> ?cap> [
$cap <webhooks {
listen: <tcp "0.0.0.0" 1048>
endpoints: {
# http://0.0.0.0:1048/my-endpoint
["my-endpoint"]: $target-dataspace
# http://0.0.0.0:1048/some/multi-element/path
["some", "multi-element", "path"]: $target-dataspace
}
}>
]
```
### Websockets
connects to a websocket endpoint. During the lifetime of the connection a `<connected $URL>` assertion is made. Messages received from the server are sent to the dataspace wrapped in `<recv …>` records and messages observed as `<send …>` are sent to the server.
```
# Configuration example
<require-service <daemon syndesizer>>
let ?websocketspace = dataspace
? <service-object <daemon syndesizer> ?cap> [
$cap <websocket {
dataspace: $websocketspace
url: "ws://127.0.0.1:5225/"
}>
]
$websocketspace ? <connected $websocketUrl> [
<bind <ref { oid: "websocket" key: #x"" }> $websocketspace #f>
]
```
### XML translator
Translates between Preserves and XML according to the [Conventions for Common Data Types](https://preserves.dev/conventions.html).
@ -270,28 +193,6 @@ Examples:
]
```
### XSLT processor
Perform XML stylesheet transformations. For a given textual XSLT stylesheet and a textual XML document generate an abstract XML document in Preserves form. Inputs may be XML text or paths to XML files.
```
# Configuration example
let ?ds = dataspace
$ds [
? <xslt-transform "/stylesheet.xls" "/doc.xml" ?output> [
? <xml-translation ?text $output> [
$log ! <log "-" { xslt-output: $text }>
]
]
]
<require-service <daemon syndesizer>>
? <service-object <daemon syndesizer> ?cap> $cap [
<xml-translator { dataspace: $ds }>
<xslt { dataspace: $ds }>
]
```
---
## mintsturdyref
@ -370,7 +271,89 @@ Example script:
]
```
## PostgreSQL
Readonly access to PostgreSQL databases. Asserts rows as records in response to SQL query assertions. Dynamic updates are not implemented.
Can be disabled by passing `--define:withPostgre=no` to the Nim compiler.
```
# Configuration example
<require-service <daemon postgre_actor>>
let ?sqlspace = dataspace
? <service-object <daemon postgre_actor> ?cap> [
$cap <postgre {
dataspace: $sqlspace
connection: [
["host" "example.com"]
["dbname" "foobar"]
["user" "hackme"]
]
}>
]
let ?tuplespace = dataspace
$sqlspace <query "SELECT id, name FROM stuff" $tuplespace>
$tuplespace ? [?id ?name] [
$log ! <log "-" { row: <example-row $id $name> }>
]
```
## preserve_process_environment
This utility serializes it's process environment to Preserves and prints it to stdout.
It can be used to feed the environment variables of a nested child of the Syndicate server back to the server. For example, to retreive the environmental variables that a desktop manager passed on to its children.
## SQLite
Readonly access to SQLite databases. Asserts rows as records in response to SQL query assertions. Dynamic updates are not implemented.
Can be disabled by passing `--define:withSqlite=no` to the Nim compiler.
```
# Configuration example
<require-service <daemon sqlite_actor>>
let ?sqlspace = dataspace
? <service-object <daemon sqlite_actor> ?cap> [
$cap <sqlite {
dataspace: $sqlspace
database: "/var/db/example.db"
}>
]
let ?tuplespace = dataspace
$sqlspace <query "SELECT id, name FROM stuff" $tuplespace>
$tuplespace ? [?id ?name] [
$log ! <log "-" { row: <example-row $id $name> }>
]
```
## XSLT processor
Perform XML stylesheet transformations. For a given textual XSLT stylesheet and a textual XML document generate an abstract XML document in Preserves form. Inputs may be XML text or paths to XML files.
```
# Configuration example
let ?ds = dataspace
$ds [
? <xslt-transform "/stylesheet.xls" "/doc.xml" ?output> [
? <xml-translation ?text $output> [
$log ! <log "-" { xslt-output: $text }>
]
]
]
<require-service <daemon xslt_actor>>
? <service-object <daemon xslt_actor> ?cap> $cap [
<xml-translator { dataspace: $ds }>
<xslt { dataspace: $ds }>
]
```

View File

@ -1,2 +1,3 @@
include_rules
: lock.json |> !nim_cfg |> | ./<lock>
: |> !nim_lk |> {lockfile}
: {lockfile} |> !nim_cfg |> | ./<lock>

4
base64.prs Normal file
View File

@ -0,0 +1,4 @@
version 1.
Base64Text = <base64 @txt string @bin bytes> .
Base64File = <base64-file @txt string @path string @size int> .

View File

@ -1,6 +1,10 @@
version 1 .
embeddedType EntityRef.Cap .
Base64DecoderArguments = <base64-decoder {
dataspace: #:any
}>.
CacheArguments = <cache {
dataspace: #:any
lifetime: float
@ -15,11 +19,20 @@ JsonTranslatorArguments = <json-stdio-translator {
dataspace: #:any
}>.
JsonTranslatorConnected = <connected @path string>.
JsonTranslatorConnected = <connected @address SocketAddress>.
TcpAddress = <tcp @host string @port int>.
UnixAddress = <unix @path string>.
SocketAddress = TcpAddress / UnixAddress .
HttpDriverArguments = <http-driver {
dataspace: #:any
}>.
JsonSocketTranslatorArguments = <json-socket-translator {
dataspace: #:any
socket: string
socket: SocketAddress
}>.
PostgreArguments = <postgre {

View File

@ -1,10 +0,0 @@
{ pkgs ? import <nixpkgs> { } }:
pkgs.buildNimPackage {
name = "syndicate_utils";
propagatedNativeBuildInputs = [ pkgs.pkg-config ];
propagatedBuildInputs =
[ pkgs.postgresql pkgs.sqlite pkgs.libxml2 pkgs.libxslt ];
lockFile = ./lock.json;
src = pkgs.lib.sources.cleanSource ./.;
}

309
lock.json
View File

@ -6,12 +6,55 @@
"bigints"
],
"path": "/nix/store/jvrm392g8adfsgf36prgwkbyd7vh5jsw-source",
"ref": "20231006",
"rev": "86ea14d31eea9275e1408ca34e6bfe9c99989a96",
"sha256": "15pcpmnk1bnw3k8769rjzcpg00nahyrypwbxs88jnwr4aczp99j4",
"srcDir": "src",
"url": "https://github.com/ehmry/nim-bigints/archive/86ea14d31eea9275e1408ca34e6bfe9c99989a96.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"cps"
],
"path": "/nix/store/8gbhwni0akqskdb3qhn5nfgv6gkdz0vz-source",
"rev": "c90530ac57f98a842b7be969115c6ef08bdcc564",
"sha256": "0h8ghs2fqg68j3jdcg7grnxssmllmgg99kym2w0a3vlwca1zvr62",
"srcDir": "",
"url": "https://github.com/ehmry/cps/archive/c90530ac57f98a842b7be969115c6ef08bdcc564.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"cps"
],
"path": "/nix/store/8gbhwni0akqskdb3qhn5nfgv6gkdz0vz-source",
"rev": "c90530ac57f98a842b7be969115c6ef08bdcc564",
"sha256": "0h8ghs2fqg68j3jdcg7grnxssmllmgg99kym2w0a3vlwca1zvr62",
"srcDir": "",
"url": "https://github.com/ehmry/cps/archive/c90530ac57f98a842b7be969115c6ef08bdcc564.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"getdns"
],
"path": "/nix/store/x9xmn7w4k6jg8nv5bnx148ibhnsfh362-source",
"rev": "c73cbe288d9f9480586b8fa87f6d794ffb6a6ce6",
"sha256": "1sbgx2x51szr22i72n7c8jglnfmr8m7y7ga0v85d58fwadiv7g6b",
"srcDir": "src",
"url": "https://git.sr.ht/~ehmry/getdns-nim/archive/c73cbe288d9f9480586b8fa87f6d794ffb6a6ce6.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"getdns"
],
"path": "/nix/store/x9xmn7w4k6jg8nv5bnx148ibhnsfh362-source",
"rev": "c73cbe288d9f9480586b8fa87f6d794ffb6a6ce6",
"sha256": "1sbgx2x51szr22i72n7c8jglnfmr8m7y7ga0v85d58fwadiv7g6b",
"srcDir": "src",
"url": "https://git.sr.ht/~ehmry/getdns-nim/archive/c73cbe288d9f9480586b8fa87f6d794ffb6a6ce6.tar.gz"
},
{
"method": "fetchzip",
"packages": [
@ -23,77 +66,237 @@
"srcDir": "",
"url": "https://github.com/ehmry/hashlib/archive/f9455d4be988e14e3dc7933eb7cc7d7c4820b7ac.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"illwill"
],
"path": "/nix/store/3lmm3z36qn4gz7bfa209zv0pqrpm3di9-source",
"ref": "v0.3.2",
"rev": "1d12cb36ab7b76c31d2d25fa421013ecb382e625",
"sha256": "0f9yncl5gbdja18mrqf5ixrdgrh95k0khda923dm1jd1x1b7ar8z",
"srcDir": "",
"url": "https://github.com/johnnovak/illwill/archive/1d12cb36ab7b76c31d2d25fa421013ecb382e625.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"nimcrypto"
],
"path": "/nix/store/zyr8zwh7vaiycn1s4r8cxwc71f2k5l0h-source",
"ref": "traditional-api",
"rev": "602c5d20c69c76137201b5d41f788f72afb95aa8",
"sha256": "1dmdmgb6b9m5f8dyxk781nnd61dsk3hdxqks7idk9ncnpj9fng65",
"srcDir": "",
"url": "https://github.com/cheatfate/nimcrypto/archive/602c5d20c69c76137201b5d41f788f72afb95aa8.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"npeg"
],
"path": "/nix/store/ffkxmjmigfs7zhhiiqm0iw2c34smyciy-source",
"ref": "1.2.1",
"rev": "26d62fdc40feb84c6533956dc11d5ee9ea9b6c09",
"sha256": "0xpzifjkfp49w76qmaylan8q181bs45anmp46l4bwr3lkrr7bpwh",
"srcDir": "src",
"url": "https://github.com/zevv/npeg/archive/26d62fdc40feb84c6533956dc11d5ee9ea9b6c09.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"preserves"
],
"path": "/nix/store/6nnn5di5vip1vladlb7z56rbw18d1y7j-source",
"ref": "20240208",
"rev": "2825bceecf33a15b9b7942db5331a32cbc39b281",
"sha256": "145vf46fy3wc52j6vs509fm9bi5lx7c53gskbkpcfbkv82l86dgk",
"path": "/nix/store/2hy124xgabz134dxj3wji7mp47fdwy3w-source",
"rev": "9ae435a83c6d5028405538af5d24a023af625b6e",
"sha256": "1k7ywcp1a53x2fpc6wc2b0qzb264dkifash0s1wcp66rw3lx15k2",
"srcDir": "src",
"url": "https://git.syndicate-lang.org/ehmry/preserves-nim/archive/2825bceecf33a15b9b7942db5331a32cbc39b281.tar.gz"
"url": "https://git.syndicate-lang.org/ehmry/preserves-nim/archive/9ae435a83c6d5028405538af5d24a023af625b6e.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"stew"
],
"path": "/nix/store/mqg8qzsbcc8xqabq2yzvlhvcyqypk72c-source",
"rev": "3c91b8694e15137a81ec7db37c6c58194ec94a6a",
"sha256": "17lfhfxp5nxvld78xa83p258y80ks5jb4n53152cdr57xk86y07w",
"srcDir": "",
"url": "https://github.com/status-im/nim-stew/archive/3c91b8694e15137a81ec7db37c6c58194ec94a6a.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"syndicate"
],
"path": "/nix/store/y9f3j4m7vmhf8gbpkvqa77jvzrc5ynlm-source",
"ref": "20240208",
"rev": "50a77995bcfe15e6062f54c6af0f55fba850c329",
"sha256": "1avrk86c34qg39w8vlixsksli2gwgbsf29jhlap27ffzdbj2zbal",
"path": "/nix/store/kl628g7vg2ww8wilf8h2ag7qqnvvwdzb-source",
"rev": "c2e1e2e0fa403529750196ce3ccb5a99a4d6c006",
"sha256": "1r8ab79pgrrnzmp49h8rp50c9x8zd0p7bsvzxaphc221nvyfx09j",
"srcDir": "src",
"url": "https://git.syndicate-lang.org/ehmry/syndicate-nim/archive/50a77995bcfe15e6062f54c6af0f55fba850c329.tar.gz"
"url": "https://git.syndicate-lang.org/ehmry/syndicate-nim/archive/c2e1e2e0fa403529750196ce3ccb5a99a4d6c006.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"ws"
"sys"
],
"path": "/nix/store/zd51j4dphs6h1hyhdbzdv840c8813ai8-source",
"ref": "0.5.0",
"rev": "9536bf99ddf5948db221ccb7bb3663aa238a8e21",
"sha256": "0j8z9jlvzb1h60v7rryvh2wx6vg99lra6i62whf3fknc53l641fz",
"path": "/nix/store/syhxsjlsdqfap0hk4qp3s6kayk8cqknd-source",
"rev": "4ef3b624db86e331ba334e705c1aa235d55b05e1",
"sha256": "1q4qgw4an4mmmcbx48l6xk1jig1vc8p9cq9dbx39kpnb0890j32q",
"srcDir": "src",
"url": "https://github.com/treeform/ws/archive/9536bf99ddf5948db221ccb7bb3663aa238a8e21.tar.gz"
"url": "https://github.com/ehmry/nim-sys/archive/4ef3b624db86e331ba334e705c1aa235d55b05e1.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"sys"
],
"path": "/nix/store/syhxsjlsdqfap0hk4qp3s6kayk8cqknd-source",
"rev": "4ef3b624db86e331ba334e705c1aa235d55b05e1",
"sha256": "1q4qgw4an4mmmcbx48l6xk1jig1vc8p9cq9dbx39kpnb0890j32q",
"srcDir": "src",
"url": "https://github.com/ehmry/nim-sys/archive/4ef3b624db86e331ba334e705c1aa235d55b05e1.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"sys"
],
"path": "/nix/store/vf9ls2wip6d8xhsi3rjh0dqsqg597i6b-source",
"rev": "c117ee60542f084525f254e6ade590675a6a2ed6",
"sha256": "12qzx2lnh84xqfgypy0pka8nflq0y8n1izfwx8mb4zya5nzawmyf",
"srcDir": "src",
"url": "https://github.com/alaviss/nim-sys/archive/c117ee60542f084525f254e6ade590675a6a2ed6.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"taps"
],
"path": "/nix/store/6y14ia52kr7jyaa0izx37mlablmq9s65-source",
"rev": "8c8572cd971d1283e6621006b310993c632da247",
"sha256": "1dp166bv9x773jmfqppg5i3v3rilgff013vb11yzwcid9l7s3iy8",
"srcDir": "src",
"url": "https://git.sr.ht/~ehmry/nim_taps/archive/8c8572cd971d1283e6621006b310993c632da247.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"taps"
],
"path": "/nix/store/6y14ia52kr7jyaa0izx37mlablmq9s65-source",
"rev": "8c8572cd971d1283e6621006b310993c632da247",
"sha256": "1dp166bv9x773jmfqppg5i3v3rilgff013vb11yzwcid9l7s3iy8",
"srcDir": "src",
"url": "https://git.sr.ht/~ehmry/nim_taps/archive/8c8572cd971d1283e6621006b310993c632da247.tar.gz"
},
{
"date": "2024-04-02T15:38:57+01:00",
"deepClone": false,
"fetchLFS": false,
"fetchSubmodules": true,
"hash": "sha256-iZb9aAgYr4FGkqfIg49QWiCqeizIi047kFhugHiP8o0=",
"leaveDotGit": false,
"method": "git",
"packages": [
"solo5_dispatcher"
],
"path": "/nix/store/sf5dgj2ljvahcm6my7d61ibda51vnrii-solo5_dispatcher",
"rev": "a7a894a96a2221284012800e6fd32923d83d20bd",
"sha256": "13gjixw80vjqj0xlx2y85ixal82sa27q7j57j9383bqq11lgv5l9",
"srcDir": "pkg",
"url": "https://git.sr.ht/~ehmry/solo5_dispatcher"
},
{
"method": "fetchzip",
"packages": [
"getdns"
],
"path": "/nix/store/x9xmn7w4k6jg8nv5bnx148ibhnsfh362-source",
"rev": "c73cbe288d9f9480586b8fa87f6d794ffb6a6ce6",
"sha256": "1sbgx2x51szr22i72n7c8jglnfmr8m7y7ga0v85d58fwadiv7g6b",
"srcDir": "src",
"url": "https://git.sr.ht/~ehmry/getdns-nim/archive/c73cbe288d9f9480586b8fa87f6d794ffb6a6ce6.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"getdns"
],
"path": "/nix/store/x9xmn7w4k6jg8nv5bnx148ibhnsfh362-source",
"rev": "c73cbe288d9f9480586b8fa87f6d794ffb6a6ce6",
"sha256": "1sbgx2x51szr22i72n7c8jglnfmr8m7y7ga0v85d58fwadiv7g6b",
"srcDir": "src",
"url": "https://git.sr.ht/~ehmry/getdns-nim/archive/c73cbe288d9f9480586b8fa87f6d794ffb6a6ce6.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"nimcrypto"
],
"path": "/nix/store/fkrcpp8lzj2yi21na79xm63xk0ggnqsp-source",
"rev": "f147d30c69bc1c9bcf0e37f7699bcf0fbaab97b5",
"sha256": "1h3dzdbc9kacwpi10mj73yjglvn7kbizj1x8qc9099ax091cj5xn",
"srcDir": "",
"url": "https://github.com/cheatfate/nimcrypto/archive/f147d30c69bc1c9bcf0e37f7699bcf0fbaab97b5.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"npeg"
],
"path": "/nix/store/xpn694ibgipj8xak3j4bky6b3k0vp7hh-source",
"rev": "ec0cc6e64ea4c62d2aa382b176a4838474238f8d",
"sha256": "1fi9ls3xl20bmv1ikillxywl96i9al6zmmxrbffx448gbrxs86kg",
"srcDir": "src",
"url": "https://github.com/zevv/npeg/archive/ec0cc6e64ea4c62d2aa382b176a4838474238f8d.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"stew"
],
"path": "/nix/store/mqg8qzsbcc8xqabq2yzvlhvcyqypk72c-source",
"rev": "3c91b8694e15137a81ec7db37c6c58194ec94a6a",
"sha256": "17lfhfxp5nxvld78xa83p258y80ks5jb4n53152cdr57xk86y07w",
"srcDir": "",
"url": "https://github.com/status-im/nim-stew/archive/3c91b8694e15137a81ec7db37c6c58194ec94a6a.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"sys"
],
"path": "/nix/store/vf9ls2wip6d8xhsi3rjh0dqsqg597i6b-source",
"rev": "c117ee60542f084525f254e6ade590675a6a2ed6",
"sha256": "12qzx2lnh84xqfgypy0pka8nflq0y8n1izfwx8mb4zya5nzawmyf",
"srcDir": "src",
"url": "https://github.com/alaviss/nim-sys/archive/c117ee60542f084525f254e6ade590675a6a2ed6.tar.gz"
},
{
"date": "2024-04-02T15:38:57+01:00",
"deepClone": false,
"fetchLFS": false,
"fetchSubmodules": true,
"hash": "sha256-iZb9aAgYr4FGkqfIg49QWiCqeizIi047kFhugHiP8o0=",
"leaveDotGit": false,
"method": "git",
"packages": [
"solo5_dispatcher"
],
"path": "/nix/store/sf5dgj2ljvahcm6my7d61ibda51vnrii-solo5_dispatcher",
"rev": "a7a894a96a2221284012800e6fd32923d83d20bd",
"sha256": "13gjixw80vjqj0xlx2y85ixal82sa27q7j57j9383bqq11lgv5l9",
"srcDir": "pkg",
"url": "https://git.sr.ht/~ehmry/solo5_dispatcher"
},
{
"method": "fetchzip",
"packages": [
"cps"
],
"path": "/nix/store/phdf6siqbhj7vx4qq507lzla81si60iz-source",
"rev": "58772ff9ddb38a4b2ec52da142d8532ba2fe7039",
"sha256": "1lph7v27nqwgm3a0ssi8q348gjrkjwgqc50agw38j7xif6wj80cw",
"srcDir": "",
"url": "https://github.com/ehmry/cps/archive/58772ff9ddb38a4b2ec52da142d8532ba2fe7039.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"cps"
],
"path": "/nix/store/phdf6siqbhj7vx4qq507lzla81si60iz-source",
"rev": "58772ff9ddb38a4b2ec52da142d8532ba2fe7039",
"sha256": "1lph7v27nqwgm3a0ssi8q348gjrkjwgqc50agw38j7xif6wj80cw",
"srcDir": "",
"url": "https://github.com/ehmry/cps/archive/58772ff9ddb38a4b2ec52da142d8532ba2fe7039.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"stew"
],
"path": "/nix/store/mqg8qzsbcc8xqabq2yzvlhvcyqypk72c-source",
"rev": "3c91b8694e15137a81ec7db37c6c58194ec94a6a",
"sha256": "17lfhfxp5nxvld78xa83p258y80ks5jb4n53152cdr57xk86y07w",
"srcDir": "",
"url": "https://github.com/status-im/nim-stew/archive/3c91b8694e15137a81ec7db37c6c58194ec94a6a.tar.gz"
},
{
"method": "fetchzip",
"packages": [
"stew"
],
"path": "/nix/store/mqg8qzsbcc8xqabq2yzvlhvcyqypk72c-source",
"rev": "3c91b8694e15137a81ec7db37c6c58194ec94a6a",
"sha256": "17lfhfxp5nxvld78xa83p258y80ks5jb4n53152cdr57xk86y07w",
"srcDir": "",
"url": "https://github.com/status-im/nim-stew/archive/3c91b8694e15137a81ec7db37c6c58194ec94a6a.tar.gz"
}
]
}

11
shell.nix Normal file
View File

@ -0,0 +1,11 @@
{ pkgs ? import <nixpkgs> { } }:
pkgs.buildNimPackage {
name = "syndicate_utils";
propagatedNativeBuildInputs =
builtins.attrValues { inherit (pkgs) pkg-config solo5; };
propagatedBuildInputs = builtins.attrValues {
inherit (pkgs) getdns postgresql solo5 sqlite libxml2 libxslt;
};
lockFile = ./lock.json;
}

View File

@ -1,4 +1,4 @@
include_rules
: foreach *.nim | $(SYNDICATE_PROTOCOL) ./<schema> ./syndesizer/<checks> |> !nim_bin |> {bin}
: foreach *.nim | $(SYNDICATE_PROTOCOL) ./<schema> |> !nim_bin |> {bin}
: foreach {bin} |> !assert_built |>
: $(BIN_DIR)/msg |> cp %f %o |> $(BIN_DIR)/beep

View File

@ -1,114 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
## An actor for filesystem monitoring.
import std/[asyncdispatch, asyncfile, tables]
import posix, posix/inotify
import preserves
import syndicate, syndicate/[bags, relays]
import ./schema/inotify_actor
var IN_NONBLOCK {.importc, nodecl.}: cint
type
BootArgs {.preservesDictionary.} = object
dataspace: Cap
proc toMask(sym: Symbol): uint32 =
case sym.string
of "IN_ACCESS": IN_ACCESS
of "IN_MODIFY": IN_MODIFY
of "IN_ATTRIB": IN_ATTRIB
of "IN_CLOSE_WRITE": IN_CLOSE_WRITE
of "IN_CLOSE_NOWRITE": IN_CLOSE_NOWRITE
of "IN_CLOSE": IN_CLOSE
of "IN_OPEN": IN_OPEN
of "IN_MOVED_FROM": IN_MOVED_FROM
of "IN_MOVED_TO": IN_MOVED_TO
of "IN_MOVE": IN_MOVE
of "IN_CREATE": IN_CREATE
of "IN_DELETE": IN_DELETE
of "IN_DELETE_SELF": IN_DELETE_SELF
of "IN_MOVE_SELF": IN_MOVE_SELF
else: 0
func contains(event, bit: uint32): bool = (event and bit) != 0
iterator symbols(event: uint32): Symbol =
if event.contains IN_ACCESS:
yield Symbol"IN_ACCESS"
if event.contains IN_MODIFY:
yield Symbol"IN_MODIFY"
if event.contains IN_ATTRIB:
yield Symbol"IN_ATTRIB"
if event.contains IN_CLOSE_WRITE:
yield Symbol"IN_CLOSE_WRITE"
if event.contains IN_CLOSE_NOWRITE:
yield Symbol"IN_CLOSE_NOWRITE"
if event.contains IN_OPEN:
yield Symbol"IN_OPEN"
if event.contains IN_MOVED_FROM:
yield Symbol"IN_MOVED_FROM"
if event.contains IN_MOVED_TO:
yield Symbol"IN_MOVED_TO"
if event.contains IN_CREATE:
yield Symbol"IN_CREATE"
if event.contains IN_DELETE:
yield Symbol"IN_DELETE"
if event.contains IN_DELETE_SELF:
yield Symbol"IN_DELETE_SELF"
if event.contains IN_MOVE_SELF:
yield Symbol"IN_MOVE_SELF"
if event.contains (IN_CLOSE_WRITE or IN_CLOSE_NOWRITE):
yield Symbol"IN_CLOSE"
if event.contains (IN_MOVED_FROM or IN_MOVED_TO):
yield Symbol"IN_MOVE"
runActor("inotify_actor") do (root: Cap; turn: var Turn):
let buf = newSeq[byte](8192)
let eventPattern = ?Observe(pattern: !InotifyMessage) ?? { 0: grabLit(), 1: grabLit() }
connectStdio(turn, root)
during(turn, root, ?:BootArgs) do (ds: Cap):
let inf = inotify_init1(IN_NONBLOCK)
doAssert inf != -1, $inf & " - " & $strerror(errno)
var
registry = initTable[cint, string]()
watchBag: Bag[cint]
let
anf = newAsyncFile(AsyncFD inf)
facet = turn.facet
var fut: Future[int]
proc readEvents() {.gcsafe.} =
fut = readBuffer(anf, buf[0].addr, buf.len)
addCallback(fut, facet) do (turn: var Turn):
let n = read(fut)
doAssert n > 0
for event in inotify_events(buf[0].addr, n):
var msg = InotifyMessage(path: registry[event.wd], cookie: event.cookie.BiggestInt)
if event.len > 0:
let n = event.len
msg.name.setLen(n)
copyMem(msg.name[0].addr, event.name.addr, n)
for i, c in msg.name:
if c == '\0':
msg.name.setLen(i)
break
for sym in event.mask.symbols:
msg.event = sym
message(turn, ds, msg)
readEvents()
readEvents()
during(turn, ds, eventPattern) do (path: string, kind: Symbol):
let wd = inotify_add_watch(inf, path, kind.toMask or IN_MASK_ADD)
doAssert wd > 0, $strerror(errno)
registry[wd] = path
discard watchBag.change(wd, 1)
do:
if watchBag.change(wd, -1, clamp = true) == cdPresentToAbsent:
discard close(wd)
registry.del(wd)
do:
close(anf)

View File

@ -8,7 +8,7 @@ when not defined(linux):
import std/oserrors
import preserves
import syndicate, syndicate/relays
import syndicate
import ./schema/mountpoints
type BootArgs {.preservesDictionary.} = object
@ -20,25 +20,31 @@ proc mount(source, target, fsType: cstring; flags: culong; data: pointer): cint
proc umount(target: cstring): cint {.importc, header: "<sys/mount.h>".}
## `umount(2)`
runActor("mount_actor") do (turn: var Turn; root: Cap):
let
targetPat = ?Observe(pattern: !Mountpoint) ?? { 1: grabLit() }
sourcePat = ?Observe(pattern: !Mountpoint) ?? { 0: grabLit(), 2: grabLit() }
connectStdio(turn, root)
during(turn, root, ?:BootArgs) do (ds: Cap):
during(turn, ds, targetPat) do (target: string):
during(turn, ds, sourcePat) do (source: string, fsType: string):
var mountpoint = Mountpoint(
source: source,
target: target,
`type`: fsType,
)
var rc = mount(source, target, fsType, 0, nil)
if rc == 0:
mountpoint.status = Status(orKind: StatusKind.success)
else:
mountpoint.status = Status(orKind: StatusKind.Failure)
mountpoint.status.failure.msg = osErrorMsg(osLastError())
discard publish(turn, ds, mountpoint)
do:
discard umount(target)
proc spawnMountActor*(turn: var Turn; ds: Cap): Actor {.discardable.} =
spawnActor(turn, "mount_actor") do (turn: var Turn):
let
targetPat = ?Observe(pattern: !Mountpoint) ?? { 1: grabLit() }
sourcePat = ?Observe(pattern: !Mountpoint) ?? { 0: grabLit(), 2: grabLit() }
during(turn, ds, ?:BootArgs) do (ds: Cap):
during(turn, ds, targetPat) do (target: string):
during(turn, ds, sourcePat) do (source: string, fsType: string):
var mountpoint = Mountpoint(
source: source,
target: target,
`type`: fsType,
)
var rc = mount(source, target, fsType, 0, nil)
if rc == 0:
mountpoint.status = Status(orKind: StatusKind.success)
else:
mountpoint.status = Status(orKind: StatusKind.Failure)
mountpoint.status.failure.msg = osErrorMsg(osLastError())
discard publish(turn, ds, mountpoint)
do:
discard umount(target)
when isMainModule:
import syndicate/relays
runActor("main") do (turn: var Turn):
resolveEnvironment(turn) do (turn: var Turn; ds: Cap):
discard spawnMountActor(turn, ds)

View File

@ -1,167 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
## A ping utility for Syndicate.
import std/[asyncdispatch, asyncnet, monotimes, nativesockets, net, os, strutils, tables, times]
import preserves
import syndicate, syndicate/relays
import ./schema/net_mapper
#[
var
SOL_IP {.importc, nodecl, header: "<sys/socket.h>".}: int
IP_TTL {.importc, nodecl, header: "<netinet/in.h>".}: int
]#
proc toPreservesHook(address: IpAddress): Value = toPreserves($address)
proc fromPreservesHook(address: var IpAddress; pr: Value): bool =
try:
if pr.isString:
address = parseIpAddress(pr.string)
result = true
except ValueError: discard
when isMainModule:
# verify that the hook catches
var ip: IpAddress
assert fromPreservesHook(ip, toPreservesHook(ip))
type
IcmpHeader {.packed.} = object
`type`: uint8
code: uint8
checksum: uint16
IcmpEchoFields {.packed.} = object
header: IcmpHeader
identifier: array[2, byte]
sequenceNumber: uint16
IcmpEcho {.union.} = object
fields: IcmpEchoFields
buffer: array[8, uint8]
IcmpTypes = enum
icmpEchoReply = 0,
icmpEcho = 8,
proc initIcmpEcho(): IcmpEcho =
result.fields.header.`type` = uint8 icmpEcho
# doAssert urandom(result.fields.identifier) # Linux does this?
proc updateChecksum(msg: var IcmpEcho) =
var sum: uint32
msg.fields.header.checksum = 0
for n in cast[array[4, uint16]](msg.buffer): sum = sum + uint32(n)
while (sum and 0xffff0000'u32) != 0:
sum = (sum and 0xffff) + (sum shr 16)
msg.fields.header.checksum = not uint16(sum)
proc match(a, b: IcmpEchoFields): bool =
({a.header.type, b.header.type} == {uint8 icmpEcho, uint8 icmpEchoReply}) and
(a.header.code == b.header.code) and
(a.sequenceNumber == b.sequenceNumber)
type
Pinger = ref object
facet: Facet
ds: Cap
rtt: RoundTripTime
rttHandle: Handle
sum: Duration
count: int64
msg: IcmpEcho
socket: AsyncSocket
sad: Sockaddr_storage
sadLen: SockLen
interval: Duration
proc newPinger(address: IpAddress; facet: Facet; ds: Cap): Pinger =
result = Pinger(
facet: facet,
ds: ds,
rtt: RoundTripTime(address: $address),
msg: initIcmpEcho(),
socket: newAsyncSocket(AF_INET, SOCK_DGRAM, IPPROTO_ICMP, false, true),
interval: initDuration(milliseconds = 500))
toSockAddr(address, Port 0, result.sad, result.sadLen)
# setSockOptInt(getFd socket, SOL_IP, IP_TTL, _)
proc close(ping: Pinger) = close(ping.socket)
proc sqr(dur: Duration): Duration =
let us = dur.inMicroseconds
initDuration(microseconds = us * us)
proc update(ping: Pinger; dur: Duration) {.inline.} =
let secs = dur.inMicroseconds.float / 1_000_000.0
if ping.count == 0: (ping.rtt.minimum, ping.rtt.maximum) = (secs, secs)
elif secs < ping.rtt.minimum: ping.rtt.minimum = secs
elif secs > ping.rtt.maximum: ping.rtt.maximum = secs
ping.sum = ping.sum + dur
inc ping.count
ping.rtt.average = inMicroseconds(ping.sum div ping.count).float / 1_000_000.0
proc exchangeEcho(ping: Pinger) {.async.} =
inc ping.msg.fields.sequenceNumber
# updateChecksum(ping.msg) # Linux does this?
let
a = getMonoTime()
r = sendto(ping.socket.getFd,
unsafeAddr ping.msg.buffer[0], ping.msg.buffer.len, 0,
cast[ptr SockAddr](unsafeAddr ping.sad), # neckbeard loser API
ping.sadLen)
if r == -1'i32:
let osError = osLastError()
raiseOSError(osError)
while true:
var
(data, address, _) = await recvFrom(ping.socket, 128)
b = getMonoTime()
if address != $ping.rtt.address:
stderr.writeLine "want ICMP from ", ping.rtt.address, " but received from ", address, " instead"
elif data.len >= ping.msg.buffer.len:
let
period = b - a
resp = cast[ptr IcmpEcho](unsafeAddr data[0])
if match(ping.msg.fields, resp.fields):
update(ping, period)
return
else:
stderr.writeLine "ICMP mismatch"
else:
stderr.writeLine "reply data has a bad length ", data.len
proc kick(ping: Pinger) {.gcsafe.} =
if not ping.socket.isClosed:
addTimer(ping.interval.inMilliseconds.int, oneshot = true) do (fd: AsyncFD) -> bool:
let fut = exchangeEcho(ping)
fut.addCallback do ():
if fut.failed and ping.rttHandle != Handle(0):
ping.facet.run do (turn: var Turn):
retract(turn, ping.rttHandle)
reset ping.rttHandle
else:
ping.facet.run do (turn: var Turn):
replace(turn, ping.ds, ping.rttHandle, ping.rtt)
if ping.interval < initDuration(seconds = 20):
ping.interval = ping.interval * 2
kick(ping)
type Args {.preservesDictionary.} = object
dataspace: Cap
runActor("net_mapper") do (root: Cap; turn: var Turn):
connectStdio(turn, root)
let rttObserver = ?Observe(pattern: !RoundTripTime) ?? {0: grabLit()}
during(turn, root, ?:Args) do (ds: Cap):
during(turn, ds, rttObserver) do (address: IpAddress):
var ping: Pinger
if address.family == IpAddressFamily.IPv4:
ping = newPinger(address, turn.facet, ds)
kick(ping)
do:
if not ping.isNil: close(ping)

View File

@ -1,9 +1,8 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import preserves, syndicate, syndicate/relays
import ../schema/[config, sql]
import preserves, syndicate
import ./schema/[config, sql]
{.passL: "-lpq".}
@ -126,6 +125,8 @@ proc spawnPostgreActor*(turn: var Turn; root: Cap): Actor {.discardable.} =
PQfinish(conn)
when isMainModule:
runActor("main") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
spawnPostgreActor(turn, root)
import syndicate/relays
runActor("main") do (turn: var Turn):
resolveEnvironment(turn) do (turn: var Turn; ds: Cap):
spawnPostgreActor(turn, ds)

View File

@ -3,40 +3,29 @@
## See the rofi-script(5) manpage for documentation.
import std/[asyncdispatch, cmdline, envvars, strutils, tables]
import std/[cmdline, envvars, strutils, tables]
import preserves, syndicate, syndicate/relays
import ./schema/rofi
proc main =
let
route = envRoute()
rofiPid = getEnv("ROFI_OUTSIDE")
if rofiPid == "":
quit("run this program in rofi")
if getEnv("ROFI_OUTSIDE") == "":
quit("run this program in rofi")
runActor("rofi_script_actor") do (turn: var Turn; root: Cap):
let rootFacet = turn.facet
resolve(turn, root, route) do (turn: var Turn; ds: Cap):
case paramCount()
of 0:
let pat = ?:Options
onPublish(turn, ds, pat) do (options: seq[string]):
stdout.writeLine options.join("\n")
quit()
runActor("rofi_script_actor") do (turn: var Turn):
resolveEnvironment(turn) do (turn: var Turn; ds: Cap):
case paramCount()
of 0:
let pat = ?:Options
onPublish(turn, ds, pat) do (options: seq[string]):
stdout.writeLine options.join("\n")
quit()
of 1:
var select = Select(option: commandLineParams()[0])
for (key, val) in envPairs():
if key.startsWith "ROFI_":
select.environment[Symbol key] = val
message(turn, ds, select)
# TODO: sync not implemented correctly
# sync(turn, ds, stopActor)
callSoon do ():
waitFor sleepAsync(1)
quit()
of 1:
var select = Select(option: commandLineParams()[0])
for (key, val) in envPairs():
if key.startsWith "ROFI_":
select.environment[Symbol key] = val
message(turn, ds, select)
sync(turn, ds, stopActor)
else:
quit("rofi passed an unexpected number of arguments")
main()
else:
quit("rofi passed an unexpected number of arguments")

19
src/schema/base64.nim Normal file
View File

@ -0,0 +1,19 @@
import
preserves
type
Base64File* {.preservesRecord: "base64-file".} = object
`txt`*: string
`path`*: string
`size`*: BiggestInt
Base64Text* {.preservesRecord: "base64".} = object
`txt`*: string
`bin`*: seq[byte]
proc `$`*(x: Base64File | Base64Text): string =
`$`(toPreserves(x))
proc encode*(x: Base64File | Base64Text): seq[byte] =
encode(toPreserves(x))

View File

@ -17,12 +17,29 @@ type
JsonTranslatorArguments* {.preservesRecord: "json-stdio-translator".} = object
`field0`*: JsonTranslatorArgumentsField0
SocketAddressKind* {.pure.} = enum
`TcpAddress`, `UnixAddress`
`SocketAddress`* {.preservesOr.} = object
case orKind*: SocketAddressKind
of SocketAddressKind.`TcpAddress`:
`tcpaddress`*: TcpAddress
of SocketAddressKind.`UnixAddress`:
`unixaddress`*: UnixAddress
Base64DecoderArgumentsField0* {.preservesDictionary.} = object
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
Base64DecoderArguments* {.preservesRecord: "base64-decoder".} = object
`field0`*: Base64DecoderArgumentsField0
JsonTranslatorConnected* {.preservesRecord: "connected".} = object
`path`*: string
`address`*: SocketAddress
JsonSocketTranslatorArgumentsField0* {.preservesDictionary.} = object
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
`socket`*: string
`socket`*: SocketAddress
JsonSocketTranslatorArguments* {.preservesRecord: "json-socket-translator".} = object
`field0`*: JsonSocketTranslatorArgumentsField0
@ -33,6 +50,12 @@ type
XsltArguments* {.preservesRecord: "xslt".} = object
`field0`*: XsltArgumentsField0
HttpDriverArgumentsField0* {.preservesDictionary.} = object
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
HttpDriverArguments* {.preservesRecord: "http-driver".} = object
`field0`*: HttpDriverArgumentsField0
WebhooksArgumentsField0* {.preservesDictionary.} = object
`endpoints`*: Table[seq[string], EmbeddedRef]
`listen`*: Tcp
@ -53,6 +76,10 @@ type
SqliteArguments* {.preservesRecord: "sqlite".} = object
`field0`*: SqliteArgumentsField0
TcpAddress* {.preservesRecord: "tcp".} = object
`host`*: string
`port`*: BiggestInt
CacheArgumentsField0* {.preservesDictionary.} = object
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
`lifetime`*: float
@ -83,36 +110,47 @@ type
PulseArguments* {.preservesRecord: "pulse".} = object
`field0`*: PulseArgumentsField0
UnixAddress* {.preservesRecord: "unix".} = object
`path`*: string
Tcp* {.preservesRecord: "tcp".} = object
`host`*: string
`port`*: BiggestInt
proc `$`*(x: WebsocketArguments | JsonTranslatorArguments |
proc `$`*(x: WebsocketArguments | JsonTranslatorArguments | SocketAddress |
Base64DecoderArguments |
JsonTranslatorConnected |
JsonSocketTranslatorArguments |
XsltArguments |
HttpDriverArguments |
WebhooksArguments |
FileSystemUsageArguments |
SqliteArguments |
TcpAddress |
CacheArguments |
XmlTranslatorArguments |
PostgreConnectionParameter |
PostgreArguments |
PulseArguments |
UnixAddress |
Tcp): string =
`$`(toPreserves(x))
proc encode*(x: WebsocketArguments | JsonTranslatorArguments |
proc encode*(x: WebsocketArguments | JsonTranslatorArguments | SocketAddress |
Base64DecoderArguments |
JsonTranslatorConnected |
JsonSocketTranslatorArguments |
XsltArguments |
HttpDriverArguments |
WebhooksArguments |
FileSystemUsageArguments |
SqliteArguments |
TcpAddress |
CacheArguments |
XmlTranslatorArguments |
PostgreConnectionParameter |
PostgreArguments |
PulseArguments |
UnixAddress |
Tcp): seq[byte] =
encode(toPreserves(x))

View File

@ -1,8 +1,8 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import preserves, syndicate, syndicate/relays
import ../schema/[config, sql]
import preserves, syndicate
import ./schema/[config, sql]
# Avoid Sqlite3 from the standard library because it is
# only held together by wishful thinking and dlload.
@ -108,6 +108,7 @@ proc spawnSqliteActor*(turn: var Turn; root: Cap): Actor {.discardable.} =
stderr.writeLine("closed SQLite database ", path)
when isMainModule:
runActor("main") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
spawnSqliteActor(turn, root)
import syndicate/relays
runActor("main") do (turn: var Turn):
resolveEnvironment(turn) do (turn: var Turn; ds: Cap):
spawnSqliteActor(turn, ds)

View File

@ -3,42 +3,26 @@
## Syndicate multitool.
import syndicate, syndicate/relays, syndicate/actors/timers
const
withPostgre* {.booldefine.}: bool = true
withSqlite* {.booldefine.}: bool = true
import syndicate, syndicate/relays, syndicate/drivers/timers
import ./syndesizer/[
base64_decoder,
cache_actor,
file_system_usage,
http_driver,
json_socket_translator,
json_translator,
pulses,
webhooks,
websockets,
xml_translator,
xslt_actor]
xml_translator]
when withPostgre:
import ./syndesizer/postgre_actor
when withSqlite:
import ./syndesizer/sqlite_actor
runActor("syndesizer") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
discard spawnTimers(turn, root)
discard spawnCacheActor(turn, root)
discard spawnFileSystemUsageActor(turn, root)
discard spawnJsonSocketTranslator(turn, root)
discard spawnJsonStdioTranslator(turn, root)
discard spawnPulseActor(turn, root)
discard spawnWebhookActor(turn, root)
discard spawnWebsocketActor(turn, root)
discard spawnXmlTranslator(turn, root)
discard spawnXsltActor(turn, root)
when withPostgre:
discard spawnPostgreActor(turn, root)
when withSqlite:
discard spawnSqliteActor(turn, root)
runActor("syndesizer") do (turn: var Turn):
resolveEnvironment(turn) do (turn: var Turn; ds: Cap):
discard spawnTimerDriver(turn, ds)
discard spawnBase64Decoder(turn, ds)
discard spawnCacheActor(turn, ds)
discard spawnFileSystemUsageActor(turn, ds)
discard spawnHttpDriver(turn, ds)
discard spawnJsonSocketTranslator(turn, ds)
discard spawnJsonStdioTranslator(turn, ds)
discard spawnPulseActor(turn, ds)
discard spawnXmlTranslator(turn, ds)

View File

@ -1,2 +1,3 @@
include_rules
: foreach *.nim | $(SYNDICATE_PROTOCOL) ../<schema> |> !nim_check |> | ./<checks>
: foreach *.nim | $(SYNDICATE_PROTOCOL) ../<schema> |> !nim_bin |> {bin}
: foreach {bin} |> !assert_built |>

View File

@ -0,0 +1,51 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import std/[base64, os]
import pkg/hashlib/misc/blake2
import preserves, syndicate
import ../schema/config
import ../schema/base64 as schema
export Base64DecoderArguments
export schema
proc spawnBase64Decoder*(turn: var Turn; root: Cap): Actor {.discardable.} =
spawnActor(turn, "base64-decoder") do (turn: var Turn):
let tmpDir = getTempDir()
during(turn, root, ?:Base64DecoderArguments) do (ds: Cap):
let decTextPat = ?Observe(pattern: !Base64Text) ?? { 0: grabLit() }
during(turn, ds, decTextPat) do (txt: string):
discard publish(turn, ds, Base64Text(
txt: txt,
bin: cast[seq[byte]](decode(txt)),
))
let encTextPat = ?Observe(pattern: !Base64Text) ?? { 1: grabLit() }
during(turn, ds, encTextPat) do (bin: seq[byte]):
discard publish(turn, ds, Base64Text(
txt: encode(bin),
bin: bin,
))
let decFilePat = ?Observe(pattern: !Base64File) ?? { 0: grabLit() }
during(turn, ds, decFilePat) do (txt: string):
var bin = decode(txt)
var ctx = init[BLAKE2B_512]()
ctx.update(bin)
let
digest = $ctx.final()
path = tmpDir / digest
writeFile(path, bin)
discard publish(turn, ds, Base64File(
txt: txt,
path: path,
size: bin.len,
))
when isMainModule:
import syndicate/relays
runActor("main") do (turn: var Turn):
resolveEnvironment(turn) do (turn: var Turn; ds: Cap):
spawnBase64Decoder(turn, ds)

View File

@ -3,8 +3,8 @@
import std/times
import preserves, syndicate,
syndicate/[durings, relays],
syndicate/actors/timers
syndicate/durings,
syndicate/drivers/timers
import ../schema/config
@ -35,7 +35,7 @@ proc isObserve(pat: Pattern): bool =
pat.dcompound.rec.label.isSymbol"Observe"
proc spawnCacheActor*(turn: var Turn; root: Cap): Actor =
spawn("cache_actor", turn) do (turn: var Turn):
spawnActor(turn, "cache_actor") do (turn: var Turn):
during(turn, root, ?:CacheArguments) do (ds: Cap, lifetime: float64):
onPublish(turn, ds, ?:Observe) do (pat: Pattern, obs: Cap):
var cache: CacheEntity
@ -51,7 +51,8 @@ proc spawnCacheActor*(turn: var Turn; root: Cap): Actor =
discard observe(turn, ds, pat, cache)
when isMainModule:
runActor("cache_actor") do (turn: var Turn; root: Cap):
spawnTimers(turn, root)
connectStdio(turn, root)
discard spawnCacheActor(turn, root)
import syndicate/relays
runActor("main") do (turn: var Turn):
resolveEnvironment(turn) do (turn: var Turn; ds: Cap):
discard spawnTimerDriver(turn, ds)
discard spawnCacheActor(turn, ds)

View File

@ -3,7 +3,7 @@
import std/[dirs, os, paths]
import preserves
import syndicate, syndicate/relays
import syndicate
import ../schema/[assertions, config]
@ -22,6 +22,7 @@ proc spawnFileSystemUsageActor*(turn: var Turn; root: Cap): Actor {.discardable.
# TODO: updates?
when isMainModule:
runActor("main") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
discard spawnFileSystemUsageActor(turn, root)
import syndicate/relays
runActor("main") do (turn: var Turn):
resolveEnvironment(turn) do (turn: var Turn; ds: Cap):
discard spawnFileSystemUsageActor(turn, ds)

View File

@ -0,0 +1,42 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
## Thin wrapper over `syndicate/drivers/http_driver`.
import pkg/preserves, pkg/syndicate
import pkg/syndicate/drivers/http_driver
import pkg/taps
import ../schema/config
proc spawnHttpDriver*(turn: var Turn; ds: Cap): Actor {.discardable.}=
http_driver.spawnHttpDriver(turn, ds)
during(turn, ds, ?:HttpDriverArguments) do (ds: Cap):
http_driver.spawnHttpDriver(turn, ds)
when isMainModule:
import syndicate/relays
when defined(solo5):
import solo5
acquireDevices([("eth0", netBasic)], netAcquireHook)
proc envRoute: Route =
var pr = parsePreserves $solo5_start_info.cmdline
if result.fromPreserves pr:
return
elif pr.isSequence:
for e in pr:
if result.fromPreserves e:
return
quit("failed to parse command line for route to Syndicate gatekeeper")
runActor("main") do (turn: var Turn):
let ds = newDataspace(turn)
spawnRelays(turn, ds)
resolve(turn, ds, envRoute(), spawnHttpDriver)
else:
runActor("main") do (turn: var Turn):
resolveEnvironment(turn) do (turn: var Turn; ds: Cap):
spawnHttpDriver(turn, ds)

View File

@ -0,0 +1,2 @@
define:ipv6Enabled
include:"std/assertions"

View File

@ -1,39 +1,77 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import std/[asyncdispatch, asyncnet, json]
from std/nativesockets import AF_UNIX, SOCK_STREAM, Protocol
import preserves, preserves/jsonhooks, syndicate, syndicate/relays
import std/[json, options]
import pkg/sys/[ioqueue, sockets]
import preserves, preserves/jsonhooks, syndicate
import ../schema/config, ../json_messages
import ../schema/[config, json_messages]
proc spawnJsonSocketTranslator*(turn: var Turn; root: Cap): Actor =
spawn("json-socket-translator", turn) do (turn: var Turn):
during(turn, root, ?:JsonSocketTranslatorArguments) do (ds: Cap, socketPath: string):
let socket = newAsyncSocket(
domain = AF_UNIX,
sockType = SOCK_STREAM,
protocol = cast[Protocol](0),
buffered = false,
)
addCallback(connectUnix(socket, socketPath), turn) do (turn: var Turn):
let a = JsonTranslatorConnected(path: socketPath)
discard publish(turn, ds, a)
template translateSocketBody {.dirty.} =
# Template workaround for CPS and parameterized types.
var
guard = initGuard(facet)
dec = newBufferedDecoder(0)
buf = new string #TODO: get a pointer into the decoder
alive = true
proc kill(turn: var Turn) =
alive = false
proc setup(turn: var Turn) =
# Closure, not CPS.
onMessage(turn, ds, ?:SendJson) do (data: JsonNode):
if alive:
discard trampoline:
whelp write(socket[], $data & "\n")
else:
stderr.writeLine "dropped send of ", data
discard publish(turn, ds, initRecord("connected", sa.toPreserves))
onStop(facet, kill)
run(facet, setup)
while alive:
# TODO: parse buffer
buf[].setLen(0x4000)
let n = read(socket[], buf)
if n < 1:
stderr.writeLine "socket read returned ", n
else:
buf[].setLen(n)
dec.feed(buf[])
var data = dec.parse()
if data.isSome:
proc send(turn: var Turn) =
# Closure, not CPS.
message(turn, ds, initRecord("recv", data.get))
run(facet, send)
stderr.writeLine "close socket ", sa
close(socket[])
let socketFacet = turn.facet
proc processOutput(fut: Future[string]) {.gcsafe.} =
run(socketFacet) do (turn: var Turn):
var data = fut.read.parseJson
message(turn, ds, RecvJson(data: data))
socket.recvLine.addCallback(processOutput)
socket.recvLine.addCallback(processOutput)
proc translateSocket(facet: Facet; ds: Cap; sa: TcpAddress) {.asyncio.} =
var
socket = new AsyncConn[Protocol.Tcp]
conn = connectTcpAsync(sa.host, Port sa.port)
socket[] = conn
translateSocketBody()
onMessage(turn, ds, ?:SendJson) do (data: JsonNode):
asyncCheck(turn, send(socket, $data & "\n"))
do:
close(socket)
proc translateSocket(facet: Facet; ds: Cap; sa: UnixAddress) {.asyncio.} =
var
socket = new AsyncConn[Protocol.Unix]
conn = connectUnixAsync(sa.path)
socket[] = conn
translateSocketBody()
proc spawnJsonSocketTranslator*(turn: var Turn; root: Cap): Actor {.discardable.} =
spawnActor(turn, "json-socket-translator") do (turn: var Turn):
during(turn, root, ?:JsonSocketTranslatorArguments) do (ds: Cap, sa: TcpAddress):
linkActor(turn, "json-socket-translator") do (turn: var Turn):
discard trampoline:
whelp translateSocket(turn.facet, ds, sa)
during(turn, root, ?:JsonSocketTranslatorArguments) do (ds: Cap, sa: UnixAddress):
linkActor(turn, "json-socket-translator") do (turn: var Turn):
discard trampoline:
whelp translateSocket(turn.facet, ds, sa)
when isMainModule:
runActor("json_socket_translator") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
discard spawnJsonSocketTranslator(turn, root)
import syndicate/relays
runActor("main") do (turn: var Turn):
resolveEnvironment(turn) do (turn: var Turn; ds: Cap):
spawnJsonSocketTranslator(turn, ds)

View File

@ -3,10 +3,9 @@
import std/[json, osproc]
import preserves
import syndicate, syndicate/relays
import syndicate
import ../schema/config
import ../json_messages
import ../schema/[config, json_messages]
proc runChild(params: seq[string]): string =
if params.len < 1:
@ -21,13 +20,14 @@ proc runChild(params: seq[string]): string =
stderr.writeLine "no ouput"
proc spawnJsonStdioTranslator*(turn: var Turn; root: Cap): Actor {.discardable.} =
spawn("json-stdio-translator", turn) do (turn: var Turn):
spawnActor(turn, "json-stdio-translator") do (turn: var Turn):
during(turn, root, ?:JsonTranslatorArguments) do (argv: seq[string], ds: Cap):
var js = parseJson(runChild(argv))
message(turn, ds, RecvJson(data: js))
discard publish(turn, ds, RecvJson(data: js))
when isMainModule:
runActor("main") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
spawnJsonStdioTranslator(turn, root)
import syndicate/relays
runActor("main") do (turn: var Turn):
resolveEnvironment(turn) do (turn: var Turn; ds: Cap):
spawnJsonStdioTranslator(turn, ds)

View File

@ -2,9 +2,7 @@
# SPDX-License-Identifier: Unlicense
import std/[options, tables, times]
import preserves, syndicate,
syndicate/relays,
syndicate/actors/timers
import preserves, syndicate, syndicate/drivers/timers
import ../schema/[assertions, config]
@ -88,7 +86,7 @@ proc newProxyEntity(turn: var Turn; timers, ds: Cap; period: float): ProxyEntity
proc spawnPulseActor*(turn: var Turn; root: Cap): Actor =
## Spawn an actor that retracts and re-asserts observers on
## a timed pulse. Requires a timer service on the `root` capability.
spawn("pulse", turn) do (turn: var Turn):
spawnActor(turn, "pulse") do (turn: var Turn):
let grabPeriod = ?Observe(pattern: !Pulse) ?? { 0: grab() }
during(turn, root, ?:PulseArguments) do (ds: Cap):
during(turn, ds, grabPeriod) do (lit: Literal[float]):
@ -100,7 +98,7 @@ proc spawnPulseActor*(turn: var Turn; root: Cap): Actor =
discard publish(turn, ds, pulse)
when isMainModule:
runActor("main") do (turn: var Turn; root: Cap):
spawnTimers(turn, root)
connectStdio(turn, root)
discard spawnPulseActor(turn, root)
import syndicate/relays
runActor("main") do (turn: var Turn):
resolveEnvironment(turn) do (turn: var Turn; ds: Cap):
discard spawnPulseActor(turn, ds)

View File

@ -1,105 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
## An actor for relaying Webhooks.
import std/[asyncdispatch, asynchttpserver, net, strutils, tables, uri]
import preserves, preserves/jsonhooks
import syndicate, syndicate/[bags, relays]
import syndicate/protocols/http
import ../schema/config
type
CapBag = Bag[Cap]
Endpoints = Table[seq[string], Cap]
func splitPath(s: string): seq[string] = s.strip(chars={'/'}).split('/')
proc toRecord(req: Request; seqnum: BiggestInt; path: seq[string]): Value =
## Convert a request value from the std/asynchttpserver module
## to a request type from syndicate/protocols/http.
var record: HttpRequest
record.sequenceNumber = seqnum
record.host = req.hostname
record.`method` = Symbol($req.reqMethod)
record.path = path
for key, val in req.headers.pairs:
record.headers[Symbol key] = val
for key, val in decodeQuery(req.url.query):
record.query[Symbol key] =
@[QueryValue(orKind: QueryValueKind.string, string: val)]
let contentType = req.headers.getOrDefault("content-type")
result = toPreserves record
if req.body.len > 0:
result[7] =
case contentType.toString
of "application/json":
req.body.parsePreserves
of "application/octet-stream":
cast[seq[byte]](req.body).toPreserves
else:
req.body.toPreserves
proc spawnWebhookActor*(turn: var Turn; root: Cap): Actor =
spawn("webhooks", turn) do (turn: var Turn):
let pat = grabRecord("webhooks", grabDictionary({ "listen": ?:config.Tcp }))
# Grab the details on listening for requests.
# Disregard endpoints so the server doesn't restart as those change.
during(turn, root, pat) do (host: string; port: Port):
let endpointsPat = grabRecord("webhooks", grabDictionary({
"listen": ?config.Tcp(host: host, port: BiggestInt port),
"endpoints": grab(),
}))
# construct a pattern for grabbing endpoints when the server is ready
var seqNum: BiggestInt
let facet = turn.facet
let endpoints = newTable[seq[string], CapBag]()
# use a bag so the same capability registered multiple
# times with the same path does not get duplicate messages
proc cb(req: Request): Future[void] =
inc(seqNum)
let path = req.url.path.splitPath
if not endpoints.hasKey path:
result = respond(req, Http404,
"no capabilities registered at $1\n" % [req.url.path])
else:
result = respond(req, Http200, "")
proc act(turn: var Turn) {.gcsafe.} =
let rec = req.toRecord(seqNum, path)
for cap in endpoints[path]:
message(turn, cap, rec)
run(facet, act)
let server = newAsyncHttpServer()
stderr.writeLine("listening for webhooks at ", host, ":", port)
if host.isIpAddress:
var ip = parseIpAddress host
case ip.family
of IPv6:
asyncCheck(turn, server.serve(port, cb, host, domain = AF_INET6))
of IPv4:
asyncCheck(turn, server.serve(port, cb, host, domain = AF_INET))
else:
asyncCheck(turn, server.serve(port, cb, host, domain = AF_INET6))
asyncCheck(turn, server.serve(port, cb, host, domain = AF_INET))
during(turn, root, endpointsPat) do (eps: Endpoints):
for path, cap in eps:
if not endpoints.hasKey path:
endpoints[path] = CapBag()
discard endpoints[path].change(cap, +1)
do:
for path, cap in eps:
discard endpoints[path].change(cap, -1)
do:
stderr.writeLine("closing for webhook server at ", host, ":", port)
close(server)
when isMainModule:
runActor("webhooks") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
discard spawnWebhookActor(turn, root)

View File

@ -1,55 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
import std/[asyncdispatch, json]
import preserves
import syndicate, syndicate/relays
import ws
import ../schema/config, ../json_messages
type WebSocket = ws.WebSocket
# not the object from the transportAddress schema
proc spawnWebsocketActor*(turn: var Turn; root: Cap): Actor =
spawn("websocket-actor", turn) do (turn: var Turn):
during(turn, root, ?:WebsocketArguments) do (ds: Cap, url: string):
let facet = turn.facet
var
ws: WebSocket
connectedHandle: Handle
newWebSocket(url).addCallback(turn) do (turn: var Turn; sock: WebSocket):
ws = sock
connectedHandle = publish(turn, ds, initRecord("connected", url.toPreserves))
var fut: Future[(Opcode, string)]
proc recvMessage() {.gcsafe.} =
fut = receivePacket ws
addCallback(fut, facet) do (turn: var Turn):
let (opcode, data) = read fut
case opcode
of Text:
message(turn, ds,
RecvJson(data: data.parseJson))
of Binary:
message(turn, ds,
initRecord("recv", cast[seq[byte]](data).toPreserves))
of Ping:
asyncCheck(turn, ws.send(data, Pong))
of Pong, Cont:
discard
of Close:
retract(turn, connectedHandle)
stderr.writeLine "closed connection with ", url
stop(turn)
return
recvMessage()
recvMessage()
onMessage(turn, ds, ?:SendJson) do (data: JsonNode):
asyncCheck(turn, ws.send($data, Text))
do:
close(ws)
when isMainModule:
runActor("main") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
discard spawnWebsocketActor(turn, root)

View File

@ -18,7 +18,7 @@ proc translatePreserves(pr: Value): XmlTranslation {.gcsafe.} =
if xn.isSome: result.xml = $get(xn)
proc spawnXmlTranslator*(turn: var Turn; root: Cap): Actor {.discardable.} =
spawn("xml-translator", turn) do (turn: var Turn):
spawnActor(turn, "xml-translator") do (turn: var Turn):
during(turn, root, ?:XmlTranslatorArguments) do (ds: Cap):
let obsPat = ?Observe(pattern: !XmlTranslation)
during(turn, ds, obsPat ?? {0: grab()}) do (xs: Literal[string]):
@ -28,6 +28,6 @@ proc spawnXmlTranslator*(turn: var Turn; root: Cap): Actor {.discardable.} =
when isMainModule:
import syndicate/relays
runActor("main") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
spawnXmlTranslator(turn, root)
runActor("main") do (turn: var Turn):
resolveEnvironment(turn) do (turn: var Turn; ds: Cap):
spawnXmlTranslator(turn, ds)

View File

@ -1,133 +0,0 @@
# SPDX-FileCopyrightText: ☭ Emery Hemingway
# SPDX-License-Identifier: Unlicense
## This was all Tony's idea, except for the silly name.
import std/[asyncdispatch, os, terminal]
import preserves
import syndicate, syndicate/[durings, relays]
import illwill
proc exitProc() {.noconv.} =
illwillDeinit()
showCursor()
quit QuitSuccess
setControlCHook(exitProc)
proc parsePattern(pr: Value): Pattern =
let
dropSigil = initRecord("lit", "_".toSymbol)
grabSigil = initRecord("lit", "?".toSymbol)
var pr = grab(pr).toPreserves
apply(pr) do (pr: var Value):
if pr == dropSigil:
pr = initRecord("_")
elif pr == grabSigil:
pr = initRecord("bind", initRecord("_"))
doAssert result.fromPreserves(pr)
proc inputPattern: Pattern =
var args = commandLineParams()
if args.len != 1:
quit "expected a single pattern argument"
else:
var input = pop args
if input == "":
quit "expected Preserves Pattern on stdin"
else:
var pr: Value
try: pr = decodePreserves(input)
except ValueError: discard
try: pr = parsePreserves(input)
except ValueError: discard
if pr.isFalse:
quit "failed to parse Preserves argument"
result = parsePattern(pr)
type TermEntity {.final.} = ref object of Entity
pattern: Pattern
value: Value
method publish(te: TermEntity; turn: var Turn; v: AssertionRef; h: Handle) =
te.value = v.value
var termBuf = newTerminalBuffer(terminalWidth(), terminalHeight())
var y = 1
termBuf.write(1, y, $te.pattern, styleBright)
inc(y)
termBuf.drawHorizLine(0, termBuf.width(), y)
inc(y)
termBuf.write(0, y, $h, styleBright)
for i, e in te.value.sequence:
inc(y)
termBuf.write(1, y, $e)
termBuf.display()
method retract(te: TermEntity; turn: var Turn; h: Handle) =
var termBuf = newTerminalBuffer(terminalWidth(), terminalHeight())
var y = 1
termBuf.write(1, y, $te.pattern, styleDim)
inc y
termBuf.drawHorizLine(0, termBuf.width(), y, true)
inc(y)
termBuf.write(0, y, $h, styleBright)
if te.value.isSequence:
for i, e in te.value.sequence:
inc(y)
termBuf.write(1, y, $e)
else:
inc(y)
termBuf.write(1, y, $te.value)
termBuf.display()
type DumpEntity {.final.} = ref object of Entity
discard
method publish(dump: DumpEntity; turn: var Turn; ass: AssertionRef; h: Handle) =
stdout.writeLine($ass.value)
stdout.flushFile()
method message*(dump: DumpEntity; turn: var Turn; ass: AssertionRef) =
stdout.writeLine($ass.value)
stdout.flushFile()
proc exit {.noconv.} =
illwillDeinit()
showCursor()
quit()
setControlCHook(exit)
proc main =
let
route = envRoute()
pat = inputPattern()
if stdout.is_a_TTY:
illwillInit()
hideCursor()
discard bootDataspace("syndex_card") do (turn: var Turn; root: Cap):
resolve(turn, root, route) do (turn: var Turn; ds: Cap):
var termBuf = newTerminalBuffer(terminalWidth(), terminalHeight())
termBuf.write(1, 1, $pat, styleBright)
termBuf.drawHorizLine(1, termBuf.width(), 2)
termBuf.display()
discard observe(turn, ds, pat, TermEntity(pattern: pat))
while true:
try: poll()
except CatchableError:
illwillDeinit()
showCursor()
quit getCurrentExceptionMsg()
else:
let entity = DumpEntity()
runActor("syndex_card") do (root: Cap; turn: var Turn):
spawnRelays(turn, root)
resolve(turn, root, route) do (turn: var Turn; ds: Cap):
discard observe(turn, ds, pat, entity)
main()

View File

@ -3,7 +3,7 @@
import std/[os, strutils]
import preserves, syndicate
import ../schema/[assertions, config]
import ./schema/[assertions, config]
{.passC: staticExec("pkg-config --cflags libxslt").}
{.passL: staticExec("pkg-config --libs libxslt").}
@ -174,7 +174,7 @@ proc toPreservesHook*(xn: xmlNodePtr): Value =
items[0]
proc spawnXsltActor*(turn: var Turn; root: Cap): Actor {.discardable.} =
spawn("xslt", turn) do (turn: var Turn):
spawnActor(turn, "xslt") do (turn: var Turn):
initLibXml()
during(turn, root, ?:XsltArguments) do (ds: Cap):
let sheetsPat = ?Observe(pattern: !XsltTransform) ?? {0: grab(), 1: grab()}
@ -206,6 +206,6 @@ proc spawnXsltActor*(turn: var Turn; root: Cap): Actor {.discardable.} =
when isMainModule:
import syndicate/relays
runActor("main") do (turn: var Turn; root: Cap):
connectStdio(turn, root)
spawnXsltActor(turn, root)
runActor("main") do (turn: var Turn):
resolveEnvironment(turn) do (turn: var Turn; ds: Cap):
spawnXsltActor(turn, ds)

View File

@ -1,13 +1,13 @@
# Package
version = "20240209"
version = "20240405"
author = "Emery Hemingway"
description = "Utilites for Syndicated Actors and Synit"
license = "unlicense"
srcDir = "src"
bin = @["mintsturdyref", "mount_actor", "msg", "net_mapper", "preserve_process_environment", "syndesizer", "syndex_card"]
bin = @["mintsturdyref", "mount_actor", "msg", "net_mapper", "preserve_process_environment", "syndesizer"]
# Dependencies
requires "nim >= 2.0.0", "illwill", "syndicate >= 20240208", "ws", "https://github.com/ehmry/nim-sys.git#b974e1a4ca6ae7d89fc9e7b3714b1e7daf6f33e5", "https://github.com/nim-works/cps"
requires "http://git.syndicate-lang.org/ehmry/syndicate-nim.git >= 20240405", "https://github.com/ehmry/nim-sys.git#4ef3b624db86e331ba334e705c1aa235d55b05e1", "https://git.sr.ht/~ehmry/nim_taps >= 20240405"