Compare commits
31 Commits
Author | SHA1 | Date |
---|---|---|
Emery Hemingway | 3a0bd1cd02 | |
Emery Hemingway | cda940cf75 | |
Emery Hemingway | bf0b5d6b86 | |
Emery Hemingway | b3a417a072 | |
Emery Hemingway | 48408d2763 | |
Emery Hemingway | 494418540a | |
Emery Hemingway | e954fdefec | |
Emery Hemingway | de7683b467 | |
Emery Hemingway | 5e075f3a0c | |
Emery Hemingway | e2b96e39ef | |
Emery Hemingway | d40c29ecad | |
Emery Hemingway | 1f099d6bd2 | |
Emery Hemingway | 920cd28c89 | |
Emery Hemingway | fc9762eb87 | |
Emery Hemingway | 2b80be0fcf | |
Emery Hemingway | c9b38dd86e | |
Emery Hemingway | 242bda24e5 | |
Emery Hemingway | aa8ff4c364 | |
Emery Hemingway | 4f2e19b0b2 | |
Emery Hemingway | 1827c91da0 | |
Emery Hemingway | 40ad6a2dbc | |
Emery Hemingway | 25d1e40990 | |
Emery Hemingway | 89f23f14f5 | |
Emery Hemingway | f072525dd4 | |
Emery Hemingway | 2d3189288f | |
Emery Hemingway | ff1f1ac44b | |
Emery Hemingway | 028df08d66 | |
Emery Hemingway | 48ce4ac7e0 | |
Emery Hemingway | 119d89ff1c | |
Emery Hemingway | 29b19c711c | |
Emery Hemingway | c89a9a333a |
|
@ -1 +1,2 @@
|
|||
/nim.cfg
|
||||
*.check
|
||||
|
|
297
README.md
297
README.md
|
@ -29,6 +29,64 @@ Example configuration:
|
|||
]
|
||||
```
|
||||
|
||||
### File System Usage
|
||||
|
||||
Summarize the size of file-system directory. Equivalent to `du -s -b`.
|
||||
Query the size of a directory in bytes by observing `<file-system-usage "/SOME/PATH" ?size>`.
|
||||
|
||||
```
|
||||
# Configuration example
|
||||
|
||||
? <exposed-dataspace ?ds> [
|
||||
|
||||
<require-service <daemon syndesizer>>
|
||||
? <service-object <daemon syndesizer> ?cap> [
|
||||
$cap <file-system-usage { dataspace: $ds }>
|
||||
]
|
||||
|
||||
]
|
||||
```
|
||||
|
||||
### HTTP driver
|
||||
|
||||
Experimental HTTP server that services requests using [some version](https://git.syndicate-lang.org/syndicate-lang/syndicate-protocols/src/commit/9864ce0ec86fb2f916c2aab318a1e6994ab8834c/schemas/http.prs) of the http Syndicate protocol schema.
|
||||
|
||||
```
|
||||
# Configuration example
|
||||
|
||||
let ?not-found = dataspace
|
||||
$not-found ? <request _ ?res> [
|
||||
$res ! <status 503 "Service unavailable">
|
||||
$res ! <done "No binding here.">
|
||||
]
|
||||
|
||||
let ?greeting = dataspace
|
||||
$greeting ? <request _ ?res> [
|
||||
$res ! <status 200 "ok">
|
||||
$res ! <chunk "Hello world">
|
||||
$res ! <done "!">
|
||||
]
|
||||
|
||||
let ?http = dataspace
|
||||
$http [
|
||||
<http-bind #f 80 get [ ] $not-found>
|
||||
<http-bind #f 80 get [|...|] $not-found>
|
||||
<http-bind #f 80 get ["hello"] $greeting>
|
||||
]
|
||||
|
||||
? <service-object <daemon http-driver> ?cap> [
|
||||
$cap <http-driver { dataspace: $http }>
|
||||
]
|
||||
|
||||
<daemon http-driver {
|
||||
argv: [ "/bin/syndesizer" ]
|
||||
clearEnv: #t
|
||||
protocol: application/syndicate
|
||||
}>
|
||||
|
||||
<require-service <daemon http-driver>>
|
||||
```
|
||||
|
||||
### JSON Socket Translator
|
||||
|
||||
Communicate with sockets that send and receive lines of JSON using `<send …>` and `<recv …>` messages.
|
||||
|
@ -58,7 +116,7 @@ let ?mpvSpace = dataspace
|
|||
? <service-object <daemon syndesizer> ?cap> [
|
||||
$cap <json-socket-translator {
|
||||
dataspace: $mpvSpace
|
||||
socket: "/run/user/1000/mpv.sock"
|
||||
socket: <unix "/run/user/1000/mpv.sock">
|
||||
}>
|
||||
]
|
||||
]
|
||||
|
@ -106,6 +164,27 @@ let ?ds = dataspace
|
|||
]
|
||||
```
|
||||
|
||||
### Pulse proxy
|
||||
|
||||
A proxy actor that passes assertions and messages to a configured capability but only asserts observations on a a periodic pulse.
|
||||
This can be used to implement polling behavior.
|
||||
|
||||
```
|
||||
# Example config
|
||||
let ?ds = dataspace
|
||||
|
||||
<require-service <daemon syndesizer>>
|
||||
? <service-object <daemon syndesizer> ?cap> [
|
||||
$cap <pulse {dataspace: $ds}>
|
||||
]
|
||||
|
||||
$ds ? <pulse 3600.0 ?proxy> [
|
||||
$proxy ? <assertion-updated-hourly ?value> [
|
||||
$log ! <log "-" {assertion-updated-hourly: $value}>
|
||||
]
|
||||
]
|
||||
```
|
||||
|
||||
### SQLite
|
||||
|
||||
Readonly access to SQLite databases. Asserts rows as records in response to SQL query assertions. Dynamic updates are not implemented.
|
||||
|
@ -125,61 +204,100 @@ let ?sqlspace = dataspace
|
|||
}>
|
||||
]
|
||||
|
||||
$sqlspace <query example-row "SELECT id, name FROM stuff">
|
||||
let ?tuplespace = dataspace
|
||||
|
||||
$sqlspace ? <example-row ?id ?name> [
|
||||
$log ! <log "-" { row: <example-row $id $name> }>
|
||||
$sqlspace <query "SELECT id, name FROM stuff" $tuplespace>
|
||||
|
||||
$tuplespace [
|
||||
? [?id ?name] [
|
||||
$log ! <log "-" { row: <example-row $id $name> }>
|
||||
]
|
||||
? <sqlite-error ?msg ?ctx> [
|
||||
$log ! <log "-" { msg: $msg ctx: $ctx }>
|
||||
]
|
||||
]
|
||||
```
|
||||
|
||||
### Webooks
|
||||
### XML translator
|
||||
|
||||
Listens for webhook requests and sends request data to a dataspace as messages.
|
||||
Request data is formated according to the http schema [defined in syndicate-protocols](https://git.syndicate-lang.org/syndicate-lang/syndicate-protocols/src/branch/main/schemas/http.prs), with the exception that messages bodies may be **bytes**, **string**, or **any** for the `content-type`s of `application/octet-stream`, `text/*`, and `application/json` respectively.
|
||||
Translates between Preserves and XML according to the [Conventions for Common Data Types](https://preserves.dev/conventions.html).
|
||||
|
||||
Examples:
|
||||
- `<xml-translation "<foo a=\"1\"> <bar>hello world!</bar></foo>" <foo {"a": 1}<bar "hello world!">>>`
|
||||
- `<xml-translation "" [#t #f]>`
|
||||
- `<xml-translation "<<</>>" #f>`
|
||||
|
||||
```
|
||||
# Configuration example
|
||||
<require-service <daemon syndesizer>>
|
||||
? <service-object <daemon syndesizer> ?cap> [
|
||||
$cap <webhooks {
|
||||
listen: <tcp "0.0.0.0" 1048>
|
||||
endpoints: {
|
||||
|
||||
# http://0.0.0.0:1048/my-endpoint
|
||||
["my-endpoint"]: $target-dataspace
|
||||
|
||||
# http://0.0.0.0:1048/some/multi-element/path
|
||||
["some", "multi-element", "path"]: $target-dataspace
|
||||
|
||||
}
|
||||
}>
|
||||
]
|
||||
```
|
||||
|
||||
### Websockets
|
||||
|
||||
connects to a websocket endpoint. During the lifetime of the connection a `<connected $URL>` assertion is made. Messages received from the server are sent to the dataspace wrapped in `<recv …>` records and messages observed as `<send …>` are sent to the server.
|
||||
|
||||
```
|
||||
# Configuration example
|
||||
<require-service <daemon syndesizer>>
|
||||
|
||||
let ?websocketspace = dataspace
|
||||
|
||||
? <service-object <daemon syndesizer> ?cap> [
|
||||
$cap <websocket {
|
||||
dataspace: $websocketspace
|
||||
url: "ws://127.0.0.1:5225/"
|
||||
}>
|
||||
]
|
||||
|
||||
$websocketspace ? <connected $websocketUrl> [
|
||||
<bind <ref { oid: "websocket" key: #x"" }> $websocketspace #f>
|
||||
? <sharedspace ?ds> [
|
||||
$ds ? <Observe <rec xml-translation _> _> $config [
|
||||
$config <require-service <daemon syndesizer>>
|
||||
$config ? <service-object <daemon syndesizer> ?cap> [
|
||||
$cap <xml-translator { dataspace: $ds }>
|
||||
]
|
||||
]
|
||||
]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## http_client
|
||||
|
||||
The inverse of `http-driver`.
|
||||
|
||||
### Caveats
|
||||
- HTTPS is assumed unless the request is to port 80.
|
||||
- If the request or response sets `Content-Type` to `application/json` or `…/preserves`
|
||||
the body will be a parsed Preserves value.
|
||||
- No cache support.
|
||||
- Internal errors propagate using a `400 Internal client error` response.
|
||||
|
||||
Sample Syndicate server script:
|
||||
```
|
||||
# A top-level dataspace
|
||||
let ?ds = dataspace
|
||||
|
||||
# A dataspace for handling the HTTP response.
|
||||
let ?response = dataspace
|
||||
$response [
|
||||
?? <done { "code": "EUR" "exchange_middle": ?middle } > [
|
||||
$ds <exchange EUR RSD $middle>
|
||||
]
|
||||
]
|
||||
|
||||
$ds [
|
||||
<request
|
||||
# Request Euro to Dinar exchange rate.
|
||||
<http-request 0 "kurs.resenje.org" 443
|
||||
get ["api" "v1" "currencies" "eur" "rates" "today"]
|
||||
{Content-Type: "application/json"} {} #f
|
||||
>
|
||||
$response
|
||||
>
|
||||
|
||||
# Log all assertions.
|
||||
? ?any [
|
||||
$log ! <log "-" { assertion: $any }>
|
||||
]
|
||||
]
|
||||
|
||||
? <service-object <daemon http-client> ?cap> [
|
||||
$cap <http-client {
|
||||
dataspace: $ds
|
||||
}>
|
||||
]
|
||||
|
||||
<require-service <daemon http-client>>
|
||||
|
||||
? <built http-client ?path ?sum> [
|
||||
<daemon http-client {
|
||||
argv: [ "/bin/http_client" ]
|
||||
clearEnv: #t
|
||||
protocol: application/syndicate
|
||||
}>
|
||||
]
|
||||
```
|
||||
|
||||
## mintsturdyref
|
||||
|
||||
A utility for minting [Sturdyrefs](https://synit.org/book/operation/builtin/gatekeeper.html#sturdyrefs).
|
||||
|
@ -229,30 +347,38 @@ Sample Syndicate server script:
|
|||
|
||||
## msg
|
||||
|
||||
A utility that sends messages to `$SYNDICATE_ROUTE`.
|
||||
A utility that parses its command-line arguments as Preserves and send them as messages to `$SYNDICATE_ROUTE`.
|
||||
When called as `assert` (by a symlink or a rename) it will make assertions instead.
|
||||
|
||||
## PostgreSQL
|
||||
|
||||
## net_mapper
|
||||
Readonly access to PostgreSQL databases. Asserts rows as records in response to SQL query assertions. Dynamic updates are not implemented.
|
||||
|
||||
Publishes ICMP packet round-trip-times. See [net_mapper.prs](./net_mapper.prs) for a protocol description. [Source](./src/net_mapper.nim).
|
||||
Can be disabled by passing `--define:withPostgre=no` to the Nim compiler.
|
||||
|
||||
Example script:
|
||||
```
|
||||
? <machine-dataspace ?machine> [
|
||||
$machine ? <rtt "10.0.33.136" ?min ?avg ?max> [
|
||||
$log ! <log "-" { ping: { min: $min avg: $avg max: $max } }>
|
||||
]
|
||||
# Configuration example
|
||||
<require-service <daemon postgre_actor>>
|
||||
|
||||
$config [
|
||||
<require-service <daemon net_mapper>>
|
||||
<daemon net_mapper {
|
||||
argv: ["/bin/net_mapper"]
|
||||
protocol: application/syndicate
|
||||
}>
|
||||
? <service-object <daemon net_mapper> ?cap> [
|
||||
$cap { dataspace: $machine }
|
||||
]
|
||||
]
|
||||
let ?sqlspace = dataspace
|
||||
|
||||
? <service-object <daemon postgre_actor> ?cap> [
|
||||
$cap <postgre {
|
||||
dataspace: $sqlspace
|
||||
connection: [
|
||||
["host" "example.com"]
|
||||
["dbname" "foobar"]
|
||||
["user" "hackme"]
|
||||
]
|
||||
}>
|
||||
]
|
||||
|
||||
let ?tuplespace = dataspace
|
||||
|
||||
$sqlspace <query "SELECT id, name FROM stuff" $tuplespace>
|
||||
|
||||
$tuplespace ? [?id ?name] [
|
||||
$log ! <log "-" { row: <example-row $id $name> }>
|
||||
]
|
||||
```
|
||||
|
||||
|
@ -261,6 +387,33 @@ Example script:
|
|||
This utility serializes it's process environment to Preserves and prints it to stdout.
|
||||
It can be used to feed the environment variables of a nested child of the Syndicate server back to the server. For example, to retreive the environmental variables that a desktop manager passed on to its children.
|
||||
|
||||
## SQLite
|
||||
|
||||
Readonly access to SQLite databases. Asserts rows as records in response to SQL query assertions. Dynamic updates are not implemented.
|
||||
|
||||
Can be disabled by passing `--define:withSqlite=no` to the Nim compiler.
|
||||
|
||||
```
|
||||
# Configuration example
|
||||
<require-service <daemon sqlite_actor>>
|
||||
|
||||
let ?sqlspace = dataspace
|
||||
|
||||
? <service-object <daemon sqlite_actor> ?cap> [
|
||||
$cap <sqlite {
|
||||
dataspace: $sqlspace
|
||||
database: "/var/db/example.db"
|
||||
}>
|
||||
]
|
||||
|
||||
let ?tuplespace = dataspace
|
||||
|
||||
$sqlspace <query "SELECT id, name FROM stuff" $tuplespace>
|
||||
|
||||
$tuplespace ? [?id ?name] [
|
||||
$log ! <log "-" { row: <example-row $id $name> }>
|
||||
]
|
||||
```
|
||||
|
||||
## syndump
|
||||
|
||||
|
@ -271,3 +424,25 @@ Example
|
|||
# Print patterns in use, filter down with AWK to only the published patterns.
|
||||
$ FS=':' syndump '<Observe ? _>' | awk -F : '/^+/ { print $2 }'
|
||||
```
|
||||
|
||||
## XSLT processor
|
||||
|
||||
Perform XML stylesheet transformations. For a given textual XSLT stylesheet and a textual XML document generate an abstract XML document in Preserves form. Inputs may be XML text or paths to XML files.
|
||||
|
||||
```
|
||||
# Configuration example
|
||||
let ?ds = dataspace
|
||||
$ds [
|
||||
? <xslt-transform "/stylesheet.xls" "/doc.xml" ?output> [
|
||||
? <xml-translation ?text $output> [
|
||||
$log ! <log "-" { xslt-output: $text }>
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
<require-service <daemon xslt_actor>>
|
||||
? <service-object <daemon xslt_actor> ?cap> $cap [
|
||||
<xml-translator { dataspace: $ds }>
|
||||
<xslt { dataspace: $ds }>
|
||||
]
|
||||
```
|
||||
|
|
3
Tupfile
3
Tupfile
|
@ -1,2 +1,3 @@
|
|||
include_rules
|
||||
: lock.json |> !nim_cfg |> | ./<lock>
|
||||
: |> !nim_lk |> {lockfile}
|
||||
: {lockfile} |> !nim_cfg |> | ./<lock>
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
version 1.
|
||||
|
||||
FileSystemUsage = <file-system-usage @path string @size int>.
|
||||
|
||||
# This assertion publishes a dataspace that proxies assertions with
|
||||
# an exception for <Observe …> which is pulsed every periodSec.
|
||||
# The pulse resolution is no more than one millisecond.
|
||||
Pulse = <pulse @periodSec float @proxy #:any>.
|
||||
|
||||
XmlTranslation = <xml-translation @xml string @pr any>.
|
||||
|
||||
XsltTransform = <xslt-transform @stylesheet string @input string @output any>.
|
||||
XsltItems = [XsltItem ...].
|
||||
XsltItem = string.
|
|
@ -0,0 +1,4 @@
|
|||
version 1.
|
||||
|
||||
Base64Text = <base64 @txt string @bin bytes> .
|
||||
Base64File = <base64-file @txt string @path string @size int> .
|
55
config.prs
55
config.prs
|
@ -1,37 +1,76 @@
|
|||
version 1 .
|
||||
embeddedType EntityRef.Cap .
|
||||
|
||||
Base64DecoderArguments = <base64-decoder {
|
||||
dataspace: #:any
|
||||
}>.
|
||||
|
||||
CacheArguments = <cache {
|
||||
dataspace: #!any
|
||||
dataspace: #:any
|
||||
lifetime: float
|
||||
}>.
|
||||
|
||||
FileSystemUsageArguments = <file-system-usage {
|
||||
dataspace: #:any
|
||||
}>.
|
||||
|
||||
JsonTranslatorArguments = <json-stdio-translator {
|
||||
argv: [string ...]
|
||||
dataspace: #!any
|
||||
dataspace: #:any
|
||||
}>.
|
||||
|
||||
JsonTranslatorConnected = <connected @path string>.
|
||||
JsonTranslatorConnected = <connected @address SocketAddress>.
|
||||
|
||||
TcpAddress = <tcp @host string @port int>.
|
||||
UnixAddress = <unix @path string>.
|
||||
|
||||
SocketAddress = TcpAddress / UnixAddress .
|
||||
|
||||
HttpClientArguments = <http-client {
|
||||
dataspace: #:any
|
||||
}>.
|
||||
|
||||
HttpDriverArguments = <http-driver {
|
||||
dataspace: #:any
|
||||
}>.
|
||||
|
||||
JsonSocketTranslatorArguments = <json-socket-translator {
|
||||
dataspace: #!any
|
||||
socket: string
|
||||
dataspace: #:any
|
||||
socket: SocketAddress
|
||||
}>.
|
||||
|
||||
PostgreArguments = <postgre {
|
||||
connection: [PostgreConnectionParameter ...]
|
||||
dataspace: #:any
|
||||
}>.
|
||||
PostgreConnectionParameter = [@key string @val string].
|
||||
|
||||
PulseArguments = <pulse {
|
||||
dataspace: #:any
|
||||
}>.
|
||||
|
||||
SqliteArguments = <sqlite {
|
||||
database: string
|
||||
dataspace: #!any
|
||||
dataspace: #:any
|
||||
}>.
|
||||
|
||||
WebhooksArguments = <webhooks {
|
||||
endpoints: {[string ...]: #!any ...:...}
|
||||
endpoints: {[string ...]: #:any ...:...}
|
||||
listen: Tcp
|
||||
}>.
|
||||
|
||||
WebsocketArguments = <websocket {
|
||||
dataspace: #!any
|
||||
dataspace: #:any
|
||||
url: string
|
||||
}>.
|
||||
|
||||
XmlTranslatorArguments = <xml-translator {
|
||||
dataspace: #:any
|
||||
}>.
|
||||
|
||||
XsltArguments = <xslt {
|
||||
dataspace: #:any
|
||||
}>.
|
||||
|
||||
# Reused from syndicate-protocols/transportAddress
|
||||
Tcp = <tcp @host string @port int>.
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
{
|
||||
pkgs ? import <nixpkgs> { },
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (pkgs)
|
||||
lib
|
||||
buildNimPackage
|
||||
fetchFromGitea
|
||||
libxml2
|
||||
libxslt
|
||||
openssl
|
||||
pkg-config
|
||||
postgresql
|
||||
sqlite
|
||||
;
|
||||
in
|
||||
|
||||
buildNimPackage {
|
||||
pname = "syndicate_utils";
|
||||
version = "unstable";
|
||||
|
||||
src = if lib.inNixShell then null else lib.cleanSource ./.;
|
||||
|
||||
buildInputs = [
|
||||
postgresql.out
|
||||
sqlite
|
||||
libxml2
|
||||
libxslt
|
||||
openssl
|
||||
];
|
||||
|
||||
lockFile = ./lock.json;
|
||||
}
|
159
lock.json
159
lock.json
|
@ -3,14 +3,24 @@
|
|||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"bigints"
|
||||
"cps"
|
||||
],
|
||||
"path": "/nix/store/jvrm392g8adfsgf36prgwkbyd7vh5jsw-source",
|
||||
"ref": "20231006",
|
||||
"rev": "86ea14d31eea9275e1408ca34e6bfe9c99989a96",
|
||||
"sha256": "15pcpmnk1bnw3k8769rjzcpg00nahyrypwbxs88jnwr4aczp99j4",
|
||||
"path": "/nix/store/8gbhwni0akqskdb3qhn5nfgv6gkdz0vz-source",
|
||||
"rev": "c90530ac57f98a842b7be969115c6ef08bdcc564",
|
||||
"sha256": "0h8ghs2fqg68j3jdcg7grnxssmllmgg99kym2w0a3vlwca1zvr62",
|
||||
"srcDir": "",
|
||||
"url": "https://github.com/ehmry/cps/archive/c90530ac57f98a842b7be969115c6ef08bdcc564.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"getdns"
|
||||
],
|
||||
"path": "/nix/store/x9xmn7w4k6jg8nv5bnx148ibhnsfh362-source",
|
||||
"rev": "c73cbe288d9f9480586b8fa87f6d794ffb6a6ce6",
|
||||
"sha256": "1sbgx2x51szr22i72n7c8jglnfmr8m7y7ga0v85d58fwadiv7g6b",
|
||||
"srcDir": "src",
|
||||
"url": "https://github.com/ehmry/nim-bigints/archive/86ea14d31eea9275e1408ca34e6bfe9c99989a96.tar.gz"
|
||||
"url": "https://git.sr.ht/~ehmry/getdns-nim/archive/c73cbe288d9f9480586b8fa87f6d794ffb6a6ce6.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
|
@ -23,77 +33,110 @@
|
|||
"srcDir": "",
|
||||
"url": "https://github.com/ehmry/hashlib/archive/f9455d4be988e14e3dc7933eb7cc7d7c4820b7ac.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"illwill"
|
||||
],
|
||||
"path": "/nix/store/3lmm3z36qn4gz7bfa209zv0pqrpm3di9-source",
|
||||
"ref": "v0.3.2",
|
||||
"rev": "1d12cb36ab7b76c31d2d25fa421013ecb382e625",
|
||||
"sha256": "0f9yncl5gbdja18mrqf5ixrdgrh95k0khda923dm1jd1x1b7ar8z",
|
||||
"srcDir": "",
|
||||
"url": "https://github.com/johnnovak/illwill/archive/1d12cb36ab7b76c31d2d25fa421013ecb382e625.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"nimcrypto"
|
||||
],
|
||||
"path": "/nix/store/zyr8zwh7vaiycn1s4r8cxwc71f2k5l0h-source",
|
||||
"ref": "traditional-api",
|
||||
"rev": "602c5d20c69c76137201b5d41f788f72afb95aa8",
|
||||
"sha256": "1dmdmgb6b9m5f8dyxk781nnd61dsk3hdxqks7idk9ncnpj9fng65",
|
||||
"srcDir": "",
|
||||
"url": "https://github.com/cheatfate/nimcrypto/archive/602c5d20c69c76137201b5d41f788f72afb95aa8.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"npeg"
|
||||
],
|
||||
"path": "/nix/store/ffkxmjmigfs7zhhiiqm0iw2c34smyciy-source",
|
||||
"ref": "1.2.1",
|
||||
"rev": "26d62fdc40feb84c6533956dc11d5ee9ea9b6c09",
|
||||
"sha256": "0xpzifjkfp49w76qmaylan8q181bs45anmp46l4bwr3lkrr7bpwh",
|
||||
"srcDir": "src",
|
||||
"url": "https://github.com/zevv/npeg/archive/26d62fdc40feb84c6533956dc11d5ee9ea9b6c09.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"preserves"
|
||||
],
|
||||
"path": "/nix/store/fpkhfxnfbdcri6k7mac21r3byg738bs4-source",
|
||||
"ref": "20240108",
|
||||
"rev": "a01ba8c96d65f670862ba074bf82b50cbda6ed99",
|
||||
"sha256": "0n8pghy2qfywx0psr54yzjvhdhi5av204150jyyzfxhigczd8sr4",
|
||||
"path": "/nix/store/hzb7af7lbd4kgd5y4hbgxv1lswig36yj-source",
|
||||
"rev": "fd498c6457cb9ad2f3179daa40da69eec00326dd",
|
||||
"sha256": "182xvw04vjw83mlcrkwkip29b44h0v8dapg2014k9011h90mdsj4",
|
||||
"srcDir": "src",
|
||||
"url": "https://git.syndicate-lang.org/ehmry/preserves-nim/archive/a01ba8c96d65f670862ba074bf82b50cbda6ed99.tar.gz"
|
||||
"url": "https://git.syndicate-lang.org/ehmry/preserves-nim/archive/fd498c6457cb9ad2f3179daa40da69eec00326dd.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"stew"
|
||||
],
|
||||
"path": "/nix/store/mqg8qzsbcc8xqabq2yzvlhvcyqypk72c-source",
|
||||
"rev": "3c91b8694e15137a81ec7db37c6c58194ec94a6a",
|
||||
"sha256": "17lfhfxp5nxvld78xa83p258y80ks5jb4n53152cdr57xk86y07w",
|
||||
"srcDir": "",
|
||||
"url": "https://github.com/status-im/nim-stew/archive/3c91b8694e15137a81ec7db37c6c58194ec94a6a.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"syndicate"
|
||||
],
|
||||
"path": "/nix/store/hma19sff6k2bi6qj01yscbynz6x2zvxj-source",
|
||||
"ref": "20240108",
|
||||
"rev": "3e11884a916c0452c90128c29940856e2d347cb7",
|
||||
"sha256": "0n1gbwllwwilz9fp5zyp4054vzcq1p7ddzg02sw8d0vqb1wmpsqm",
|
||||
"path": "/nix/store/dw30cq9gxz3353zgaq4a36ajq6chvbwc-source",
|
||||
"rev": "3a4dc1f13392830b587138199643d30fdbec8541",
|
||||
"sha256": "1mbd17rjm1fsx7d0ckzyjih2nzdjqs52ck9wscqcg9nvf3ib5mvh",
|
||||
"srcDir": "src",
|
||||
"url": "https://git.syndicate-lang.org/ehmry/syndicate-nim/archive/3e11884a916c0452c90128c29940856e2d347cb7.tar.gz"
|
||||
"url": "https://git.syndicate-lang.org/ehmry/syndicate-nim/archive/3a4dc1f13392830b587138199643d30fdbec8541.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"ws"
|
||||
"sys"
|
||||
],
|
||||
"path": "/nix/store/zd51j4dphs6h1hyhdbzdv840c8813ai8-source",
|
||||
"ref": "0.5.0",
|
||||
"rev": "9536bf99ddf5948db221ccb7bb3663aa238a8e21",
|
||||
"sha256": "0j8z9jlvzb1h60v7rryvh2wx6vg99lra6i62whf3fknc53l641fz",
|
||||
"path": "/nix/store/syhxsjlsdqfap0hk4qp3s6kayk8cqknd-source",
|
||||
"rev": "4ef3b624db86e331ba334e705c1aa235d55b05e1",
|
||||
"sha256": "1q4qgw4an4mmmcbx48l6xk1jig1vc8p9cq9dbx39kpnb0890j32q",
|
||||
"srcDir": "src",
|
||||
"url": "https://github.com/treeform/ws/archive/9536bf99ddf5948db221ccb7bb3663aa238a8e21.tar.gz"
|
||||
"url": "https://github.com/ehmry/nim-sys/archive/4ef3b624db86e331ba334e705c1aa235d55b05e1.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"taps"
|
||||
],
|
||||
"path": "/nix/store/6y14ia52kr7jyaa0izx37mlablmq9s65-source",
|
||||
"rev": "8c8572cd971d1283e6621006b310993c632da247",
|
||||
"sha256": "1dp166bv9x773jmfqppg5i3v3rilgff013vb11yzwcid9l7s3iy8",
|
||||
"srcDir": "src",
|
||||
"url": "https://git.sr.ht/~ehmry/nim_taps/archive/8c8572cd971d1283e6621006b310993c632da247.tar.gz"
|
||||
},
|
||||
{
|
||||
"date": "2024-04-02T15:38:57+01:00",
|
||||
"deepClone": false,
|
||||
"fetchLFS": false,
|
||||
"fetchSubmodules": true,
|
||||
"hash": "sha256-iZb9aAgYr4FGkqfIg49QWiCqeizIi047kFhugHiP8o0=",
|
||||
"leaveDotGit": false,
|
||||
"method": "git",
|
||||
"packages": [
|
||||
"solo5_dispatcher"
|
||||
],
|
||||
"path": "/nix/store/sf5dgj2ljvahcm6my7d61ibda51vnrii-solo5_dispatcher",
|
||||
"rev": "a7a894a96a2221284012800e6fd32923d83d20bd",
|
||||
"sha256": "13gjixw80vjqj0xlx2y85ixal82sa27q7j57j9383bqq11lgv5l9",
|
||||
"srcDir": "pkg",
|
||||
"url": "https://git.sr.ht/~ehmry/solo5_dispatcher"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"bigints"
|
||||
],
|
||||
"path": "/nix/store/jvrm392g8adfsgf36prgwkbyd7vh5jsw-source",
|
||||
"rev": "86ea14d31eea9275e1408ca34e6bfe9c99989a96",
|
||||
"sha256": "15pcpmnk1bnw3k8769rjzcpg00nahyrypwbxs88jnwr4aczp99j4",
|
||||
"srcDir": "src",
|
||||
"url": "https://github.com/ehmry/nim-bigints/archive/86ea14d31eea9275e1408ca34e6bfe9c99989a96.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"nimcrypto"
|
||||
],
|
||||
"path": "/nix/store/h7lgq3by9mx8in03vzh0y964lnnlkalp-source",
|
||||
"rev": "ff6afc6a753bd645cad4568472c7733d1715e31e",
|
||||
"sha256": "0h9vpayp66pg66114bl0nsvlv1nzp7f0x5b35gbsbd7svzlcz5zj",
|
||||
"srcDir": "",
|
||||
"url": "https://github.com/cheatfate/nimcrypto/archive/ff6afc6a753bd645cad4568472c7733d1715e31e.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"npeg"
|
||||
],
|
||||
"path": "/nix/store/xpn694ibgipj8xak3j4bky6b3k0vp7hh-source",
|
||||
"rev": "ec0cc6e64ea4c62d2aa382b176a4838474238f8d",
|
||||
"sha256": "1fi9ls3xl20bmv1ikillxywl96i9al6zmmxrbffx448gbrxs86kg",
|
||||
"srcDir": "src",
|
||||
"url": "https://github.com/zevv/npeg/archive/ec0cc6e64ea4c62d2aa382b176a4838474238f8d.tar.gz"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
{ pkgs ? import <nixpkgs> { } }:
|
||||
|
||||
pkgs.buildNimPackage {
|
||||
name = "dummy";
|
||||
propagatedNativeBuildInputs = [ pkgs.pkg-config ];
|
||||
propagatedBuildInputs = [ pkgs.sqlite ];
|
||||
}
|
10
sql.prs
10
sql.prs
|
@ -1,6 +1,8 @@
|
|||
version 1 .
|
||||
|
||||
# When asserted the actor reponds with
|
||||
# rows as records of the given label and
|
||||
# row columns as record fields.
|
||||
Query = <query @label any @statement string> .
|
||||
# When asserted the actor reponds to @target rows as records
|
||||
# of the given label and row columns as record fields.
|
||||
Query = <query @statement [any ...] @target #:any> .
|
||||
|
||||
# When a query fails this is asserted instead.
|
||||
SqlError = <sql-error @msg string @context string>.
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
include_rules
|
||||
: foreach *.nim | $(SYNDICATE_PROTOCOL) ./<schema> |> !nim_bin |> {bin}
|
||||
: foreach {bin} |> !assert_built |>
|
||||
: $(BIN_DIR)/msg |> cp %f %o |> $(BIN_DIR)/beep
|
||||
: $(BIN_DIR)/msg |> !symlink |> $(BIN_DIR)/beep
|
||||
: $(BIN_DIR)/msg |> !symlink |> $(BIN_DIR)/assert
|
||||
|
|
|
@ -0,0 +1,91 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
# TODO: write a TAPS HTTP client. Figure out how to externalise TLS.
|
||||
|
||||
import
|
||||
std/[httpclient, options, streams, strutils, tables, uri],
|
||||
pkg/taps,
|
||||
pkg/preserves,
|
||||
pkg/syndicate, pkg/syndicate/protocols/http,
|
||||
./schema/config
|
||||
|
||||
proc url(req: HttpRequest): Uri =
|
||||
result.scheme = if req.port == 80: "http" else: "https"
|
||||
result.hostname = req.host.present
|
||||
result.port = $req.port
|
||||
for i, p in req.path:
|
||||
if 0 < i: result.path.add '/'
|
||||
result.path.add p.encodeUrl
|
||||
for key, vals in req.query:
|
||||
if result.query.len > 0:
|
||||
result.query.add '&'
|
||||
result.query.add key.string.encodeUrl
|
||||
for i, val in vals:
|
||||
if i == 0: result.query.add '='
|
||||
elif i < vals.high: result.query.add ','
|
||||
result.query.add val.string.encodeUrl
|
||||
|
||||
proc bodyString(req: HttpRequest): string =
|
||||
if req.body.orKind == RequestBodyKind.present:
|
||||
return cast[string](req.body.present)
|
||||
|
||||
proc spawnHttpClient*(turn: Turn; root: Cap): Actor {.discardable.} =
|
||||
|
||||
during(turn, root, ?:HttpClientArguments) do (ds: Cap):
|
||||
spawn("http-client", turn) do (turn: Turn):
|
||||
during(turn, ds, HttpContext.grabType) do (ctx: HttpContext):
|
||||
let peer = ctx.res.unembed(Cap).get
|
||||
var client = newHttpClient()
|
||||
try:
|
||||
var
|
||||
headers = newHttpHeaders()
|
||||
contentType = ""
|
||||
for key, val in ctx.req.headers:
|
||||
if key == Symbol"Content-Type":
|
||||
contentType = val
|
||||
client.headers[key.string] = val
|
||||
let stdRes = client.request(
|
||||
ctx.req.url,
|
||||
ctx.req.method.string.toUpper,
|
||||
ctx.req.bodyString, headers
|
||||
)
|
||||
var resp = HttpResponse(orKind: HttpResponseKind.status)
|
||||
resp.status.code = stdRes.status[0 .. 2].parseInt
|
||||
resp.status.message = stdRes.status[3 .. ^1]
|
||||
message(turn, peer, resp)
|
||||
resp = HttpResponse(orKind: HttpResponseKind.header)
|
||||
for key, val in stdRes.headers:
|
||||
if key == "Content-Type":
|
||||
contentType = val
|
||||
resp.header.name = key.Symbol
|
||||
resp.header.value = val
|
||||
message(turn, peer, resp)
|
||||
case contentType
|
||||
of "application/json", "text/preserves":
|
||||
message(turn, peer,
|
||||
initRecord("done", stdRes.bodyStream.readAll.parsePreserves))
|
||||
of "application/preserves":
|
||||
message(turn, peer,
|
||||
initRecord("done", stdRes.bodyStream.decodePreserves))
|
||||
else:
|
||||
resp = HttpResponse(orKind: HttpResponseKind.done)
|
||||
resp.done.chunk.string = stdRes.bodyStream.readAll()
|
||||
message(turn, peer, resp)
|
||||
except CatchableError as err:
|
||||
var resp = HttpResponse(orKind: HttpResponseKind.status)
|
||||
resp.status.code = 400
|
||||
resp.status.message = "Internal client error"
|
||||
message(turn, peer, resp)
|
||||
resp = HttpResponse(orKind: HttpResponseKind.done)
|
||||
resp.done.chunk.string = err.msg
|
||||
message(turn, peer, resp)
|
||||
client.close()
|
||||
do:
|
||||
client.close()
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
spawnHttpClient(turn, ds)
|
|
@ -0,0 +1 @@
|
|||
define:ssl
|
|
@ -1,114 +0,0 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
## An actor for filesystem monitoring.
|
||||
|
||||
import std/[asyncdispatch, asyncfile, tables]
|
||||
import posix, posix/inotify
|
||||
import preserves
|
||||
import syndicate, syndicate/[bags, relays]
|
||||
import ./schema/inotify_actor
|
||||
|
||||
var IN_NONBLOCK {.importc, nodecl.}: cint
|
||||
|
||||
type
|
||||
BootArgs {.preservesDictionary.} = object
|
||||
dataspace: Cap
|
||||
|
||||
proc toMask(sym: Symbol): uint32 =
|
||||
case sym.string
|
||||
of "IN_ACCESS": IN_ACCESS
|
||||
of "IN_MODIFY": IN_MODIFY
|
||||
of "IN_ATTRIB": IN_ATTRIB
|
||||
of "IN_CLOSE_WRITE": IN_CLOSE_WRITE
|
||||
of "IN_CLOSE_NOWRITE": IN_CLOSE_NOWRITE
|
||||
of "IN_CLOSE": IN_CLOSE
|
||||
of "IN_OPEN": IN_OPEN
|
||||
of "IN_MOVED_FROM": IN_MOVED_FROM
|
||||
of "IN_MOVED_TO": IN_MOVED_TO
|
||||
of "IN_MOVE": IN_MOVE
|
||||
of "IN_CREATE": IN_CREATE
|
||||
of "IN_DELETE": IN_DELETE
|
||||
of "IN_DELETE_SELF": IN_DELETE_SELF
|
||||
of "IN_MOVE_SELF": IN_MOVE_SELF
|
||||
else: 0
|
||||
|
||||
func contains(event, bit: uint32): bool = (event and bit) != 0
|
||||
|
||||
iterator symbols(event: uint32): Symbol =
|
||||
if event.contains IN_ACCESS:
|
||||
yield Symbol"IN_ACCESS"
|
||||
if event.contains IN_MODIFY:
|
||||
yield Symbol"IN_MODIFY"
|
||||
if event.contains IN_ATTRIB:
|
||||
yield Symbol"IN_ATTRIB"
|
||||
if event.contains IN_CLOSE_WRITE:
|
||||
yield Symbol"IN_CLOSE_WRITE"
|
||||
if event.contains IN_CLOSE_NOWRITE:
|
||||
yield Symbol"IN_CLOSE_NOWRITE"
|
||||
if event.contains IN_OPEN:
|
||||
yield Symbol"IN_OPEN"
|
||||
if event.contains IN_MOVED_FROM:
|
||||
yield Symbol"IN_MOVED_FROM"
|
||||
if event.contains IN_MOVED_TO:
|
||||
yield Symbol"IN_MOVED_TO"
|
||||
if event.contains IN_CREATE:
|
||||
yield Symbol"IN_CREATE"
|
||||
if event.contains IN_DELETE:
|
||||
yield Symbol"IN_DELETE"
|
||||
if event.contains IN_DELETE_SELF:
|
||||
yield Symbol"IN_DELETE_SELF"
|
||||
if event.contains IN_MOVE_SELF:
|
||||
yield Symbol"IN_MOVE_SELF"
|
||||
if event.contains (IN_CLOSE_WRITE or IN_CLOSE_NOWRITE):
|
||||
yield Symbol"IN_CLOSE"
|
||||
if event.contains (IN_MOVED_FROM or IN_MOVED_TO):
|
||||
yield Symbol"IN_MOVE"
|
||||
|
||||
runActor("inotify_actor") do (root: Cap; turn: var Turn):
|
||||
let buf = newSeq[byte](8192)
|
||||
let eventPattern = ?Observe(pattern: !InotifyMessage) ?? { 0: grabLit(), 1: grabLit() }
|
||||
connectStdio(turn, root)
|
||||
during(turn, root, ?:BootArgs) do (ds: Cap):
|
||||
let inf = inotify_init1(IN_NONBLOCK)
|
||||
doAssert inf != -1, $inf & " - " & $strerror(errno)
|
||||
var
|
||||
registry = initTable[cint, string]()
|
||||
watchBag: Bag[cint]
|
||||
let
|
||||
anf = newAsyncFile(AsyncFD inf)
|
||||
facet = turn.facet
|
||||
var fut: Future[int]
|
||||
proc readEvents() {.gcsafe.} =
|
||||
fut = readBuffer(anf, buf[0].addr, buf.len)
|
||||
addCallback(fut, facet) do (turn: var Turn):
|
||||
let n = read(fut)
|
||||
doAssert n > 0
|
||||
for event in inotify_events(buf[0].addr, n):
|
||||
var msg = InotifyMessage(path: registry[event.wd], cookie: event.cookie.BiggestInt)
|
||||
if event.len > 0:
|
||||
let n = event.len
|
||||
msg.name.setLen(n)
|
||||
copyMem(msg.name[0].addr, event.name.addr, n)
|
||||
for i, c in msg.name:
|
||||
if c == '\0':
|
||||
msg.name.setLen(i)
|
||||
break
|
||||
for sym in event.mask.symbols:
|
||||
msg.event = sym
|
||||
message(turn, ds, msg)
|
||||
readEvents()
|
||||
readEvents()
|
||||
|
||||
during(turn, ds, eventPattern) do (path: string, kind: Symbol):
|
||||
let wd = inotify_add_watch(inf, path, kind.toMask or IN_MASK_ADD)
|
||||
doAssert wd > 0, $strerror(errno)
|
||||
registry[wd] = path
|
||||
discard watchBag.change(wd, 1)
|
||||
|
||||
do:
|
||||
if watchBag.change(wd, -1, clamp = true) == cdPresentToAbsent:
|
||||
discard close(wd)
|
||||
registry.del(wd)
|
||||
do:
|
||||
close(anf)
|
|
@ -7,8 +7,8 @@ when not defined(linux):
|
|||
{.error: "this component only tested for Linux".}
|
||||
|
||||
import std/oserrors
|
||||
import preserves
|
||||
import syndicate, syndicate/relays
|
||||
import preserves, preserves/sugar
|
||||
import syndicate
|
||||
import ./schema/mountpoints
|
||||
|
||||
type BootArgs {.preservesDictionary.} = object
|
||||
|
@ -20,25 +20,34 @@ proc mount(source, target, fsType: cstring; flags: culong; data: pointer): cint
|
|||
proc umount(target: cstring): cint {.importc, header: "<sys/mount.h>".}
|
||||
## `umount(2)`
|
||||
|
||||
runActor("mount_actor") do (turn: var Turn; root: Cap):
|
||||
let
|
||||
targetPat = ?Observe(pattern: !Mountpoint) ?? { 1: grabLit() }
|
||||
sourcePat = ?Observe(pattern: !Mountpoint) ?? { 0: grabLit(), 2: grabLit() }
|
||||
connectStdio(turn, root)
|
||||
during(turn, root, ?:BootArgs) do (ds: Cap):
|
||||
during(turn, ds, targetPat) do (target: string):
|
||||
during(turn, ds, sourcePat) do (source: string, fsType: string):
|
||||
var mountpoint = Mountpoint(
|
||||
source: source,
|
||||
target: target,
|
||||
`type`: fsType,
|
||||
)
|
||||
var rc = mount(source, target, fsType, 0, nil)
|
||||
if rc == 0:
|
||||
mountpoint.status = Status(orKind: StatusKind.success)
|
||||
else:
|
||||
mountpoint.status = Status(orKind: StatusKind.Failure)
|
||||
mountpoint.status.failure.msg = osErrorMsg(osLastError())
|
||||
discard publish(turn, ds, mountpoint)
|
||||
do:
|
||||
discard umount(target)
|
||||
proc spawnMountActor*(turn: Turn; ds: Cap): Actor {.discardable.} =
|
||||
spawnActor(turn, "mount_actor") do (turn: Turn):
|
||||
let
|
||||
targetPat = observePattern(!Mountpoint, { @[%1]: grabLit() })
|
||||
sourcePat = observePattern(!Mountpoint, {
|
||||
@[%0]: grabLit(),
|
||||
@[%2]: grabLit(),
|
||||
})
|
||||
during(turn, ds, ?:BootArgs) do (ds: Cap):
|
||||
during(turn, ds, targetPat) do (target: string):
|
||||
during(turn, ds, sourcePat) do (source: string, fsType: string):
|
||||
var mountpoint = Mountpoint(
|
||||
source: source,
|
||||
target: target,
|
||||
`type`: fsType,
|
||||
)
|
||||
var rc = mount(source, target, fsType, 0, nil)
|
||||
if rc == 0:
|
||||
mountpoint.status = Status(orKind: StatusKind.success)
|
||||
else:
|
||||
mountpoint.status = Status(orKind: StatusKind.Failure)
|
||||
mountpoint.status.failure.msg = osErrorMsg(osLastError())
|
||||
discard publish(turn, ds, mountpoint)
|
||||
do:
|
||||
discard umount(target)
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
discard spawnMountActor(turn, ds)
|
||||
|
|
25
src/msg.nim
25
src/msg.nim
|
@ -1,19 +1,22 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[asyncdispatch, sequtils, os]
|
||||
import std/[sequtils, os, strutils]
|
||||
import preserves, syndicate, syndicate/relays
|
||||
|
||||
proc main =
|
||||
let
|
||||
route = envRoute()
|
||||
data = map(commandLineParams(), parsePreserves)
|
||||
setControlCHook(proc () {.noconv.} = quit())
|
||||
|
||||
discard bootDataspace("msg") do (turn: var Turn; root: Cap):
|
||||
resolve(turn, root, route) do (turn: var Turn; ds: Cap):
|
||||
runActor("msg") do (turn: Turn):
|
||||
let
|
||||
data = map(commandLineParams(), parsePreserves)
|
||||
cmd = paramStr(0).extractFilename.normalize
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
case cmd
|
||||
of "assert":
|
||||
for e in data:
|
||||
publish(turn, ds, e)
|
||||
else: # "msg"
|
||||
for e in data:
|
||||
message(turn, ds, e)
|
||||
|
||||
for _ in 1..4: poll()
|
||||
|
||||
main()
|
||||
sync(turn, ds) do (turn: Turn):
|
||||
stopActor(turn)
|
||||
|
|
|
@ -1,167 +0,0 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
## A ping utility for Syndicate.
|
||||
|
||||
import std/[asyncdispatch, asyncnet, monotimes, nativesockets, net, os, strutils, tables, times]
|
||||
import preserves
|
||||
import syndicate, syndicate/relays
|
||||
|
||||
import ./schema/net_mapper
|
||||
|
||||
#[
|
||||
var
|
||||
SOL_IP {.importc, nodecl, header: "<sys/socket.h>".}: int
|
||||
IP_TTL {.importc, nodecl, header: "<netinet/in.h>".}: int
|
||||
]#
|
||||
|
||||
proc toPreservesHook(address: IpAddress): Value = toPreserves($address)
|
||||
|
||||
proc fromPreservesHook(address: var IpAddress; pr: Value): bool =
|
||||
try:
|
||||
if pr.isString:
|
||||
address = parseIpAddress(pr.string)
|
||||
result = true
|
||||
except ValueError: discard
|
||||
|
||||
when isMainModule:
|
||||
# verify that the hook catches
|
||||
var ip: IpAddress
|
||||
assert fromPreservesHook(ip, toPreservesHook(ip))
|
||||
|
||||
type
|
||||
IcmpHeader {.packed.} = object
|
||||
`type`: uint8
|
||||
code: uint8
|
||||
checksum: uint16
|
||||
|
||||
IcmpEchoFields {.packed.} = object
|
||||
header: IcmpHeader
|
||||
identifier: array[2, byte]
|
||||
sequenceNumber: uint16
|
||||
|
||||
IcmpEcho {.union.} = object
|
||||
fields: IcmpEchoFields
|
||||
buffer: array[8, uint8]
|
||||
|
||||
IcmpTypes = enum
|
||||
icmpEchoReply = 0,
|
||||
icmpEcho = 8,
|
||||
|
||||
proc initIcmpEcho(): IcmpEcho =
|
||||
result.fields.header.`type` = uint8 icmpEcho
|
||||
# doAssert urandom(result.fields.identifier) # Linux does this?
|
||||
|
||||
proc updateChecksum(msg: var IcmpEcho) =
|
||||
var sum: uint32
|
||||
msg.fields.header.checksum = 0
|
||||
for n in cast[array[4, uint16]](msg.buffer): sum = sum + uint32(n)
|
||||
while (sum and 0xffff0000'u32) != 0:
|
||||
sum = (sum and 0xffff) + (sum shr 16)
|
||||
msg.fields.header.checksum = not uint16(sum)
|
||||
|
||||
proc match(a, b: IcmpEchoFields): bool =
|
||||
({a.header.type, b.header.type} == {uint8 icmpEcho, uint8 icmpEchoReply}) and
|
||||
(a.header.code == b.header.code) and
|
||||
(a.sequenceNumber == b.sequenceNumber)
|
||||
|
||||
type
|
||||
Pinger = ref object
|
||||
facet: Facet
|
||||
ds: Cap
|
||||
rtt: RoundTripTime
|
||||
rttHandle: Handle
|
||||
sum: Duration
|
||||
count: int64
|
||||
msg: IcmpEcho
|
||||
socket: AsyncSocket
|
||||
sad: Sockaddr_storage
|
||||
sadLen: SockLen
|
||||
interval: Duration
|
||||
|
||||
proc newPinger(address: IpAddress; facet: Facet; ds: Cap): Pinger =
|
||||
result = Pinger(
|
||||
facet: facet,
|
||||
ds: ds,
|
||||
rtt: RoundTripTime(address: $address),
|
||||
msg: initIcmpEcho(),
|
||||
socket: newAsyncSocket(AF_INET, SOCK_DGRAM, IPPROTO_ICMP, false, true),
|
||||
interval: initDuration(milliseconds = 500))
|
||||
toSockAddr(address, Port 0, result.sad, result.sadLen)
|
||||
# setSockOptInt(getFd socket, SOL_IP, IP_TTL, _)
|
||||
|
||||
proc close(ping: Pinger) = close(ping.socket)
|
||||
|
||||
proc sqr(dur: Duration): Duration =
|
||||
let us = dur.inMicroseconds
|
||||
initDuration(microseconds = us * us)
|
||||
|
||||
proc update(ping: Pinger; dur: Duration) {.inline.} =
|
||||
let secs = dur.inMicroseconds.float / 1_000_000.0
|
||||
if ping.count == 0: (ping.rtt.minimum, ping.rtt.maximum) = (secs, secs)
|
||||
elif secs < ping.rtt.minimum: ping.rtt.minimum = secs
|
||||
elif secs > ping.rtt.maximum: ping.rtt.maximum = secs
|
||||
ping.sum = ping.sum + dur
|
||||
inc ping.count
|
||||
ping.rtt.average = inMicroseconds(ping.sum div ping.count).float / 1_000_000.0
|
||||
|
||||
proc exchangeEcho(ping: Pinger) {.async.} =
|
||||
inc ping.msg.fields.sequenceNumber
|
||||
# updateChecksum(ping.msg) # Linux does this?
|
||||
let
|
||||
a = getMonoTime()
|
||||
r = sendto(ping.socket.getFd,
|
||||
unsafeAddr ping.msg.buffer[0], ping.msg.buffer.len, 0,
|
||||
cast[ptr SockAddr](unsafeAddr ping.sad), # neckbeard loser API
|
||||
ping.sadLen)
|
||||
if r == -1'i32:
|
||||
let osError = osLastError()
|
||||
raiseOSError(osError)
|
||||
while true:
|
||||
var
|
||||
(data, address, _) = await recvFrom(ping.socket, 128)
|
||||
b = getMonoTime()
|
||||
if address != $ping.rtt.address:
|
||||
stderr.writeLine "want ICMP from ", ping.rtt.address, " but received from ", address, " instead"
|
||||
elif data.len >= ping.msg.buffer.len:
|
||||
let
|
||||
period = b - a
|
||||
resp = cast[ptr IcmpEcho](unsafeAddr data[0])
|
||||
if match(ping.msg.fields, resp.fields):
|
||||
update(ping, period)
|
||||
return
|
||||
else:
|
||||
stderr.writeLine "ICMP mismatch"
|
||||
else:
|
||||
stderr.writeLine "reply data has a bad length ", data.len
|
||||
|
||||
proc kick(ping: Pinger) {.gcsafe.} =
|
||||
if not ping.socket.isClosed:
|
||||
addTimer(ping.interval.inMilliseconds.int, oneshot = true) do (fd: AsyncFD) -> bool:
|
||||
let fut = exchangeEcho(ping)
|
||||
fut.addCallback do ():
|
||||
if fut.failed and ping.rttHandle != Handle(0):
|
||||
ping.facet.run do (turn: var Turn):
|
||||
retract(turn, ping.rttHandle)
|
||||
reset ping.rttHandle
|
||||
else:
|
||||
ping.facet.run do (turn: var Turn):
|
||||
replace(turn, ping.ds, ping.rttHandle, ping.rtt)
|
||||
if ping.interval < initDuration(seconds = 20):
|
||||
ping.interval = ping.interval * 2
|
||||
kick(ping)
|
||||
|
||||
type Args {.preservesDictionary.} = object
|
||||
dataspace: Cap
|
||||
|
||||
runActor("net_mapper") do (root: Cap; turn: var Turn):
|
||||
connectStdio(turn, root)
|
||||
let rttObserver = ?Observe(pattern: !RoundTripTime) ?? {0: grabLit()}
|
||||
during(turn, root, ?:Args) do (ds: Cap):
|
||||
during(turn, ds, rttObserver) do (address: IpAddress):
|
||||
var ping: Pinger
|
||||
if address.family == IpAddressFamily.IPv4:
|
||||
ping = newPinger(address, turn.facet, ds)
|
||||
kick(ping)
|
||||
do:
|
||||
if not ping.isNil: close(ping)
|
|
@ -0,0 +1,158 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import preserves, syndicate
|
||||
import ./schema/[config, sql]
|
||||
|
||||
{.passL: "-lpq".}
|
||||
|
||||
{.pragma: libpq, header: "libpq-fe.h", importc.}
|
||||
|
||||
type
|
||||
Oid = cuint
|
||||
PGconn {.libpq.} = ptr object
|
||||
PGresult {.libpq.} = ptr object
|
||||
ConnStatusType {.libpq.} = enum
|
||||
CONNECTION_OK, CONNECTION_BAD, ## Non-blocking mode only below here
|
||||
##
|
||||
## The existence of these should never be relied upon - they should only
|
||||
## be used for user feedback or similar purposes.
|
||||
##
|
||||
CONNECTION_STARTED, ## Waiting for connection to be made.
|
||||
CONNECTION_MADE, ## Connection OK; waiting to send.
|
||||
CONNECTION_AWAITING_RESPONSE, ## Waiting for a response from the
|
||||
## postmaster.
|
||||
CONNECTION_AUTH_OK, ## Received authentication; waiting for
|
||||
## backend startup.
|
||||
CONNECTION_SETENV, ## This state is no longer used.
|
||||
CONNECTION_SSL_STARTUP, ## Negotiating SSL.
|
||||
CONNECTION_NEEDED, ## Internal state: connect() needed
|
||||
CONNECTION_CHECK_WRITABLE, ## Checking if session is read-write.
|
||||
CONNECTION_CONSUME, ## Consuming any extra messages.
|
||||
CONNECTION_GSS_STARTUP, ## Negotiating GSSAPI.
|
||||
CONNECTION_CHECK_TARGET, ## Checking target server properties.
|
||||
CONNECTION_CHECK_STANDBY ## Checking if server is in standby mode.
|
||||
|
||||
ExecStatusType = enum
|
||||
PGRES_EMPTY_QUERY = 0, ## empty query string was executed
|
||||
PGRES_COMMAND_OK, ## a query command that doesn't return
|
||||
## anything was executed properly by the
|
||||
## backend
|
||||
PGRES_TUPLES_OK, ## a query command that returns tuples was
|
||||
## executed properly by the backend, PGresult
|
||||
## contains the result tuples
|
||||
PGRES_COPY_OUT, ## Copy Out data transfer in progress
|
||||
PGRES_COPY_IN, ## Copy In data transfer in progress
|
||||
PGRES_BAD_RESPONSE, ## an unexpected response was recv'd from the
|
||||
## backend
|
||||
PGRES_NONFATAL_ERROR, ## notice or warning message
|
||||
PGRES_FATAL_ERROR, ## query failed
|
||||
PGRES_COPY_BOTH, ## Copy In/Out data transfer in progress
|
||||
PGRES_SINGLE_TUPLE, ## single tuple from larger resultset
|
||||
PGRES_PIPELINE_SYNC, ## pipeline synchronization point
|
||||
PGRES_PIPELINE_ABORTED ## Command didn't run because of an abort
|
||||
## earlier in a pipeline
|
||||
|
||||
proc PQconnectdbParams(
|
||||
keywords: cstringArray; values: cstringArray; expand_dbname: cint): PGconn {.libpq.}
|
||||
|
||||
proc PQerrorMessage(conn: PGconn): cstring {.libpq.}
|
||||
proc PQfinish(conn: PGconn) {.libpq.}
|
||||
proc PQstatus(conn: PGconn): ConnStatusType {.libpq.}
|
||||
proc PQexec(conn: PGconn; query: cstring): PGresult {.libpq.}
|
||||
proc PQresultStatus(res: PGresult): ExecStatusType {.libpq.}
|
||||
proc PQresStatus (status: ExecStatusType): cstring {.libpq.}
|
||||
proc PQresultErrorMessage(res: PGresult): cstring {.libpq.}
|
||||
proc PQclear(res: PGresult) {.libpq.}
|
||||
proc PQntuples(res: PGresult): cint {.libpq.}
|
||||
proc PQnfields(res: PGresult): cint {.libpq.}
|
||||
proc PQgetvalue(res: PGresult; tup_num: cint; field_num: cint): cstring {.libpq.}
|
||||
proc PQftype(res: PGresult; field_num: cint): Oid {.libpq.}
|
||||
proc PQfsize(res: PGresult; field_num: cint): cint {.libpq.}
|
||||
|
||||
# proc PQsocket(conn: PGconn): cint
|
||||
# proc PQconnectStartParams(
|
||||
# keywords: cstringArray; values: cstringArray; expand_dbname: cint): PGconn
|
||||
# TODO: async
|
||||
|
||||
proc checkPointer(p: pointer) =
|
||||
if p.isNil: raise newException(OutOfMemDefect, "Postgres returned nil")
|
||||
|
||||
type StringPairs = seq[tuple[key: string, val: string]]
|
||||
|
||||
proc splitParams(params: StringPairs): (cstringArray, cstringArray) =
|
||||
var strings = newSeq[string](params.len)
|
||||
for i, _ in params: strings[i] = params[i][0]
|
||||
result[0] = allocCStringArray(strings)
|
||||
for i, _ in params: strings[i] = params[i][1]
|
||||
result[1] = allocCStringArray(strings)
|
||||
|
||||
proc renderSql(tokens: openarray[Value]): string =
|
||||
for token in tokens:
|
||||
if result.len > 0: result.add ' '
|
||||
case token.kind
|
||||
of pkSymbol:
|
||||
result.add token.symbol.string
|
||||
of pkString:
|
||||
result.add '\''
|
||||
result.add token.string
|
||||
result.add '\''
|
||||
of pkFloat, pkRegister, pkBigInt:
|
||||
result.add $token
|
||||
of pkBoolean:
|
||||
if token.bool: result.add '1'
|
||||
else: result.add '0'
|
||||
else:
|
||||
return ""
|
||||
|
||||
proc spawnPostgreActor*(turn: Turn; root: Cap): Actor {.discardable.} =
|
||||
spawn("postgre", turn) do (turn: Turn):
|
||||
during(turn, root, ?:PostgreArguments) do (params: StringPairs, ds: Cap):
|
||||
var
|
||||
conn: PGconn
|
||||
statusHandle: Handle
|
||||
(keys, vals) = splitParams(params)
|
||||
conn = PQconnectdbParams(keys, vals, 0)
|
||||
checkPointer(conn)
|
||||
let
|
||||
status = PQstatus(conn)
|
||||
msg = $PQerrorMessage(conn)
|
||||
statusHandle = publish(turn, ds,
|
||||
initRecord("status", toSymbol($status), msg.toPreserves))
|
||||
if status == CONNECTION_OK:
|
||||
during(turn, ds, ?:Query) do (statement: seq[Value], target: Cap):
|
||||
var text = renderSql statement
|
||||
if text == "":
|
||||
discard publish(turn, ds, SqlError(msg: "invalid statement", context: $statement))
|
||||
else:
|
||||
var
|
||||
res = PQexec(conn, text)
|
||||
st = PQresultStatus(res)
|
||||
if st == PGRES_TUPLES_OK or st == PGRES_SINGLE_TUPLE:
|
||||
let tuples = PQntuples(res)
|
||||
let fields = PQnfields(res)
|
||||
if tuples > 0 and fields > 0:
|
||||
for r in 0..<tuples:
|
||||
var tupl = initSequence(fields)
|
||||
for f in 0..<fields:
|
||||
tupl[f] = toPreserves($PQgetvalue(res, r, f))
|
||||
discard publish(turn, target, tupl)
|
||||
else:
|
||||
discard publish(turn, ds, SqlError(
|
||||
msg: $PQresStatus(st),
|
||||
context: $PQresultErrorMessage(res),
|
||||
))
|
||||
PQclear(res)
|
||||
else:
|
||||
stderr.writeLine "refusing to do anything when status is ", status
|
||||
do:
|
||||
deallocCStringArray(keys)
|
||||
deallocCStringArray(vals)
|
||||
PQfinish(conn)
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
spawnPostgreActor(turn, ds)
|
|
@ -3,40 +3,29 @@
|
|||
|
||||
## See the rofi-script(5) manpage for documentation.
|
||||
|
||||
import std/[asyncdispatch, cmdline, envvars, strutils, tables]
|
||||
import std/[cmdline, envvars, strutils, tables]
|
||||
import preserves, syndicate, syndicate/relays
|
||||
import ./schema/rofi
|
||||
|
||||
proc main =
|
||||
let
|
||||
route = envRoute()
|
||||
rofiPid = getEnv("ROFI_OUTSIDE")
|
||||
if rofiPid == "":
|
||||
quit("run this program in rofi")
|
||||
if getEnv("ROFI_OUTSIDE") == "":
|
||||
quit("run this program in rofi")
|
||||
|
||||
runActor("rofi_script_actor") do (turn: var Turn; root: Cap):
|
||||
let rootFacet = turn.facet
|
||||
resolve(turn, root, route) do (turn: var Turn; ds: Cap):
|
||||
case paramCount()
|
||||
of 0:
|
||||
let pat = ?:Options
|
||||
onPublish(turn, ds, pat) do (options: seq[string]):
|
||||
stdout.writeLine options.join("\n")
|
||||
quit()
|
||||
runActor("rofi_script_actor") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
case paramCount()
|
||||
of 0:
|
||||
let pat = ?:Options
|
||||
onPublish(turn, ds, pat) do (options: seq[string]):
|
||||
stdout.writeLine options.join("\n")
|
||||
quit()
|
||||
|
||||
of 1:
|
||||
var select = Select(option: commandLineParams()[0])
|
||||
for (key, val) in envPairs():
|
||||
if key.startsWith "ROFI_":
|
||||
select.environment[Symbol key] = val
|
||||
message(turn, ds, select)
|
||||
# TODO: sync not implemented correctly
|
||||
# sync(turn, ds, stopActor)
|
||||
callSoon do ():
|
||||
waitFor sleepAsync(1)
|
||||
quit()
|
||||
of 1:
|
||||
var select = Select(option: commandLineParams()[0])
|
||||
for (key, val) in envPairs():
|
||||
if key.startsWith "ROFI_":
|
||||
select.environment[Symbol key] = val
|
||||
message(turn, ds, select)
|
||||
sync(turn, ds, stopActor)
|
||||
|
||||
else:
|
||||
quit("rofi passed an unexpected number of arguments")
|
||||
|
||||
main()
|
||||
else:
|
||||
quit("rofi passed an unexpected number of arguments")
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
|
||||
import
|
||||
preserves
|
||||
|
||||
type
|
||||
XsltItems* = seq[XsltItem]
|
||||
Pulse* {.preservesRecord: "pulse".} = object
|
||||
`periodSec`*: float
|
||||
`proxy`* {.preservesEmbedded.}: Value
|
||||
|
||||
XsltItem* = string
|
||||
XmlTranslation* {.preservesRecord: "xml-translation".} = object
|
||||
`xml`*: string
|
||||
`pr`*: Value
|
||||
|
||||
FileSystemUsage* {.preservesRecord: "file-system-usage".} = object
|
||||
`path`*: string
|
||||
`size`*: BiggestInt
|
||||
|
||||
XsltTransform* {.preservesRecord: "xslt-transform".} = object
|
||||
`stylesheet`*: string
|
||||
`input`*: string
|
||||
`output`*: Value
|
||||
|
||||
proc `$`*(x: XsltItems | Pulse | XsltItem | XmlTranslation | FileSystemUsage |
|
||||
XsltTransform): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: XsltItems | Pulse | XsltItem | XmlTranslation | FileSystemUsage |
|
||||
XsltTransform): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,19 @@
|
|||
|
||||
import
|
||||
preserves
|
||||
|
||||
type
|
||||
Base64File* {.preservesRecord: "base64-file".} = object
|
||||
`txt`*: string
|
||||
`path`*: string
|
||||
`size`*: BiggestInt
|
||||
|
||||
Base64Text* {.preservesRecord: "base64".} = object
|
||||
`txt`*: string
|
||||
`bin`*: seq[byte]
|
||||
|
||||
proc `$`*(x: Base64File | Base64Text): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: Base64File | Base64Text): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -10,6 +10,12 @@ type
|
|||
WebsocketArguments* {.preservesRecord: "websocket".} = object
|
||||
`field0`*: WebsocketArgumentsField0
|
||||
|
||||
HttpClientArgumentsField0* {.preservesDictionary.} = object
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
HttpClientArguments* {.preservesRecord: "http-client".} = object
|
||||
`field0`*: HttpClientArgumentsField0
|
||||
|
||||
JsonTranslatorArgumentsField0* {.preservesDictionary.} = object
|
||||
`argv`*: seq[string]
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
@ -17,16 +23,45 @@ type
|
|||
JsonTranslatorArguments* {.preservesRecord: "json-stdio-translator".} = object
|
||||
`field0`*: JsonTranslatorArgumentsField0
|
||||
|
||||
SocketAddressKind* {.pure.} = enum
|
||||
`TcpAddress`, `UnixAddress`
|
||||
`SocketAddress`* {.preservesOr.} = object
|
||||
case orKind*: SocketAddressKind
|
||||
of SocketAddressKind.`TcpAddress`:
|
||||
`tcpaddress`*: TcpAddress
|
||||
|
||||
of SocketAddressKind.`UnixAddress`:
|
||||
`unixaddress`*: UnixAddress
|
||||
|
||||
|
||||
Base64DecoderArgumentsField0* {.preservesDictionary.} = object
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
Base64DecoderArguments* {.preservesRecord: "base64-decoder".} = object
|
||||
`field0`*: Base64DecoderArgumentsField0
|
||||
|
||||
JsonTranslatorConnected* {.preservesRecord: "connected".} = object
|
||||
`path`*: string
|
||||
`address`*: SocketAddress
|
||||
|
||||
JsonSocketTranslatorArgumentsField0* {.preservesDictionary.} = object
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
`socket`*: string
|
||||
`socket`*: SocketAddress
|
||||
|
||||
JsonSocketTranslatorArguments* {.preservesRecord: "json-socket-translator".} = object
|
||||
`field0`*: JsonSocketTranslatorArgumentsField0
|
||||
|
||||
XsltArgumentsField0* {.preservesDictionary.} = object
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
XsltArguments* {.preservesRecord: "xslt".} = object
|
||||
`field0`*: XsltArgumentsField0
|
||||
|
||||
HttpDriverArgumentsField0* {.preservesDictionary.} = object
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
HttpDriverArguments* {.preservesRecord: "http-driver".} = object
|
||||
`field0`*: HttpDriverArgumentsField0
|
||||
|
||||
WebhooksArgumentsField0* {.preservesDictionary.} = object
|
||||
`endpoints`*: Table[seq[string], EmbeddedRef]
|
||||
`listen`*: Tcp
|
||||
|
@ -34,6 +69,12 @@ type
|
|||
WebhooksArguments* {.preservesRecord: "webhooks".} = object
|
||||
`field0`*: WebhooksArgumentsField0
|
||||
|
||||
FileSystemUsageArgumentsField0* {.preservesDictionary.} = object
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
FileSystemUsageArguments* {.preservesRecord: "file-system-usage".} = object
|
||||
`field0`*: FileSystemUsageArgumentsField0
|
||||
|
||||
SqliteArgumentsField0* {.preservesDictionary.} = object
|
||||
`database`*: string
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
@ -41,31 +82,84 @@ type
|
|||
SqliteArguments* {.preservesRecord: "sqlite".} = object
|
||||
`field0`*: SqliteArgumentsField0
|
||||
|
||||
TcpAddress* {.preservesRecord: "tcp".} = object
|
||||
`host`*: string
|
||||
`port`*: BiggestInt
|
||||
|
||||
CacheArgumentsField0* {.preservesDictionary.} = object
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
`lifetime`*: float32
|
||||
`lifetime`*: float
|
||||
|
||||
CacheArguments* {.preservesRecord: "cache".} = object
|
||||
`field0`*: CacheArgumentsField0
|
||||
|
||||
XmlTranslatorArgumentsField0* {.preservesDictionary.} = object
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
XmlTranslatorArguments* {.preservesRecord: "xml-translator".} = object
|
||||
`field0`*: XmlTranslatorArgumentsField0
|
||||
|
||||
PostgreConnectionParameter* {.preservesTuple.} = object
|
||||
`key`*: string
|
||||
`val`*: string
|
||||
|
||||
PostgreArgumentsField0* {.preservesDictionary.} = object
|
||||
`connection`*: seq[PostgreConnectionParameter]
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
PostgreArguments* {.preservesRecord: "postgre".} = object
|
||||
`field0`*: PostgreArgumentsField0
|
||||
|
||||
PulseArgumentsField0* {.preservesDictionary.} = object
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
PulseArguments* {.preservesRecord: "pulse".} = object
|
||||
`field0`*: PulseArgumentsField0
|
||||
|
||||
UnixAddress* {.preservesRecord: "unix".} = object
|
||||
`path`*: string
|
||||
|
||||
Tcp* {.preservesRecord: "tcp".} = object
|
||||
`host`*: string
|
||||
`port`*: BiggestInt
|
||||
|
||||
proc `$`*(x: WebsocketArguments | JsonTranslatorArguments |
|
||||
proc `$`*(x: WebsocketArguments | HttpClientArguments | JsonTranslatorArguments |
|
||||
SocketAddress |
|
||||
Base64DecoderArguments |
|
||||
JsonTranslatorConnected |
|
||||
JsonSocketTranslatorArguments |
|
||||
XsltArguments |
|
||||
HttpDriverArguments |
|
||||
WebhooksArguments |
|
||||
FileSystemUsageArguments |
|
||||
SqliteArguments |
|
||||
TcpAddress |
|
||||
CacheArguments |
|
||||
XmlTranslatorArguments |
|
||||
PostgreConnectionParameter |
|
||||
PostgreArguments |
|
||||
PulseArguments |
|
||||
UnixAddress |
|
||||
Tcp): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: WebsocketArguments | JsonTranslatorArguments |
|
||||
proc encode*(x: WebsocketArguments | HttpClientArguments |
|
||||
JsonTranslatorArguments |
|
||||
SocketAddress |
|
||||
Base64DecoderArguments |
|
||||
JsonTranslatorConnected |
|
||||
JsonSocketTranslatorArguments |
|
||||
XsltArguments |
|
||||
HttpDriverArguments |
|
||||
WebhooksArguments |
|
||||
FileSystemUsageArguments |
|
||||
SqliteArguments |
|
||||
TcpAddress |
|
||||
CacheArguments |
|
||||
XmlTranslatorArguments |
|
||||
PostgreConnectionParameter |
|
||||
PostgreArguments |
|
||||
PulseArguments |
|
||||
UnixAddress |
|
||||
Tcp): seq[byte] =
|
||||
encode(toPreserves(x))
|
||||
|
|
|
@ -5,9 +5,9 @@ import
|
|||
type
|
||||
RoundTripTime* {.preservesRecord: "rtt".} = object
|
||||
`address`*: string
|
||||
`minimum`*: float32
|
||||
`average`*: float32
|
||||
`maximum`*: float32
|
||||
`minimum`*: float
|
||||
`average`*: float
|
||||
`maximum`*: float
|
||||
|
||||
proc `$`*(x: RoundTripTime): string =
|
||||
`$`(toPreserves(x))
|
||||
|
|
|
@ -4,11 +4,15 @@ import
|
|||
|
||||
type
|
||||
Query* {.preservesRecord: "query".} = object
|
||||
`label`*: Value
|
||||
`statement`*: string
|
||||
`statement`*: seq[Value]
|
||||
`target`* {.preservesEmbedded.}: Value
|
||||
|
||||
proc `$`*(x: Query): string =
|
||||
SqlError* {.preservesRecord: "sql-error".} = object
|
||||
`msg`*: string
|
||||
`context`*: string
|
||||
|
||||
proc `$`*(x: Query | SqlError): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: Query): seq[byte] =
|
||||
proc encode*(x: Query | SqlError): seq[byte] =
|
||||
encode(toPreserves(x))
|
||||
|
|
|
@ -0,0 +1,149 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import preserves, syndicate
|
||||
import ./schema/[config, sql]
|
||||
|
||||
# Avoid Sqlite3 from the standard library because it is
|
||||
# only held together by wishful thinking and dlload.
|
||||
|
||||
{.passC: staticExec("pkg-config --cflags sqlite3").}
|
||||
{.passL: staticExec("pkg-config --libs sqlite3").}
|
||||
|
||||
{.pragma: sqlite3h, header: "sqlite3.h".}
|
||||
|
||||
var
|
||||
SQLITE_VERSION_NUMBER {.importc, sqlite3h.}: cint
|
||||
SQLITE_OK {.importc, sqlite3h.}: cint
|
||||
SQLITE_ROW {.importc, sqlite3h.}: cint
|
||||
SQLITE_DONE {.importc, sqlite3h.}: cint
|
||||
SQLITE_OPEN_READONLY {.importc, sqlite3h.}: cint
|
||||
|
||||
const
|
||||
SQLITE_INTEGER = 1
|
||||
SQLITE_FLOAT = 2
|
||||
SQLITE_TEXT = 3
|
||||
SQLITE_BLOB = 4
|
||||
# SQLITE_NULL = 5
|
||||
|
||||
type
|
||||
Sqlite3 {.importc: "sqlite3", sqlite3h.} = distinct pointer
|
||||
Stmt {.importc: "sqlite3_stmt", sqlite3h.} = distinct pointer
|
||||
|
||||
{.pragma: importSqlite3, importc: "sqlite3_$1", sqlite3h.}
|
||||
|
||||
proc libversion_number: cint {.importSqlite3.}
|
||||
|
||||
proc open_v2(filename: cstring; ppDb: ptr Sqlite3; flags: cint; zVfs: cstring): cint {.importSqlite3.}
|
||||
proc close(ds: Sqlite3): int32 {.discardable, importSqlite3.}
|
||||
|
||||
proc errmsg(db: Sqlite3): cstring {.importSqlite3.}
|
||||
|
||||
proc prepare_v2(db: Sqlite3; zSql: cstring, nByte: cint; ppStmt: ptr Stmt; pzTail: ptr cstring): cint {.importSqlite3.}
|
||||
|
||||
proc step(para1: Stmt): cint {.importSqlite3.}
|
||||
|
||||
proc column_count(stmt: Stmt): int32 {.importSqlite3.}
|
||||
proc column_blob(stmt: Stmt; col: cint): pointer {.importSqlite3.}
|
||||
proc column_bytes(stmt: Stmt; col: cint): cint {.importSqlite3.}
|
||||
proc column_double(stmt: Stmt; col: cint): float64 {.importSqlite3.}
|
||||
proc column_int64(stmt: Stmt; col: cint): int64 {.importSqlite3.}
|
||||
proc column_text(stmt: Stmt; col: cint): cstring {.importSqlite3.}
|
||||
proc column_type(stmt: Stmt; col: cint): cint {.importSqlite3.}
|
||||
proc finalize(stmt: Stmt): cint {.importSqlite3.}
|
||||
|
||||
doAssert libversion_number() == SQLITE_VERSION_NUMBER
|
||||
|
||||
proc assertError(facet: Facet; cap: Cap; db: Sqlite3; context: string) =
|
||||
run(facet) do (turn: Turn):
|
||||
publish(turn, cap, SqlError(
|
||||
msg: $errmsg(db),
|
||||
context: context,
|
||||
))
|
||||
|
||||
proc assertError(facet: Facet; cap: Cap; msg, context: string) =
|
||||
run(facet) do (turn: Turn):
|
||||
publish(turn, cap, SqlError(
|
||||
msg: msg,
|
||||
context: context,
|
||||
))
|
||||
|
||||
proc extractValue(stmt: Stmt; col: cint): Value =
|
||||
case column_type(stmt, col)
|
||||
of SQLITE_INTEGER:
|
||||
result = toPreserves(column_int64(stmt, col))
|
||||
of SQLITE_FLOAT:
|
||||
result = toPreserves(column_double(stmt, col))
|
||||
of SQLITE_TEXT:
|
||||
result = Value(kind: pkString, string: newString(column_bytes(stmt, col)))
|
||||
if result.string.len > 0:
|
||||
copyMem(addr result.string[0], column_text(stmt, col), result.string.len)
|
||||
of SQLITE_BLOB:
|
||||
result = Value(kind: pkByteString, bytes: newSeq[byte](column_bytes(stmt, col)))
|
||||
if result.bytes.len > 0:
|
||||
copyMem(addr result.bytes[0], column_blob(stmt, col), result.bytes.len)
|
||||
else:
|
||||
result = initRecord("null")
|
||||
|
||||
proc extractTuple(stmt: Stmt; arity: cint): Value =
|
||||
result = initSequence(arity)
|
||||
for col in 0..<arity: result[col] = extractValue(stmt, col)
|
||||
|
||||
proc renderSql(tokens: openarray[Value]): string =
|
||||
for token in tokens:
|
||||
if result.len > 0: result.add ' '
|
||||
case token.kind
|
||||
of pkSymbol:
|
||||
result.add token.symbol.string
|
||||
of pkString:
|
||||
result.add '\''
|
||||
result.add token.string
|
||||
result.add '\''
|
||||
of pkFloat, pkRegister, pkBigInt:
|
||||
result.add $token
|
||||
of pkBoolean:
|
||||
if token.bool: result.add '1'
|
||||
else: result.add '0'
|
||||
else:
|
||||
return ""
|
||||
|
||||
proc spawnSqliteActor*(turn: Turn; root: Cap): Actor {.discardable.} =
|
||||
spawn("sqlite-actor", turn) do (turn: Turn):
|
||||
during(turn, root, ?:SqliteArguments) do (path: string, ds: Cap):
|
||||
linkActor(turn, path) do (turn: Turn):
|
||||
let facet = turn.facet
|
||||
stderr.writeLine("opening SQLite database ", path)
|
||||
var db: Sqlite3
|
||||
if open_v2(path, addr db, SQLITE_OPEN_READONLY, nil) != SQLITE_OK:
|
||||
assertError(facet, ds, db, path)
|
||||
else:
|
||||
turn.onStop do (turn: Turn):
|
||||
close(db)
|
||||
stderr.writeLine("closed SQLite database ", path)
|
||||
during(turn, ds, ?:Query) do (statement: seq[Value], target: Cap):
|
||||
var
|
||||
stmt: Stmt
|
||||
text = renderSql statement
|
||||
if text == "":
|
||||
assertError(facet, target, "invalid statement", $statement)
|
||||
elif prepare_v2(db, text, text.len.cint, addr stmt, nil) != SQLITE_OK:
|
||||
assertError(facet, target, db, text)
|
||||
else:
|
||||
try:
|
||||
let arity = column_count(stmt)
|
||||
var res = step(stmt)
|
||||
while res == SQLITE_ROW:
|
||||
var rec = extractTuple(stmt, arity)
|
||||
discard publish(turn, target, rec)
|
||||
res = step(stmt)
|
||||
assert res != 100
|
||||
if res != SQLITE_DONE:
|
||||
assertError(facet, target, db, text)
|
||||
finally:
|
||||
if finalize(stmt) != SQLITE_OK: assertError(facet, target, db, text)
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
spawnSqliteActor(turn, ds)
|
|
@ -3,23 +3,26 @@
|
|||
|
||||
## Syndicate multitool.
|
||||
|
||||
import syndicate, syndicate/relays, syndicate/actors/timers
|
||||
import syndicate, syndicate/relays, syndicate/drivers/timers
|
||||
|
||||
import ./syndesizer/[
|
||||
base64_decoder,
|
||||
cache_actor,
|
||||
file_system_usage,
|
||||
http_driver,
|
||||
json_socket_translator,
|
||||
json_translator,
|
||||
sqlite_actor,
|
||||
webhooks,
|
||||
websockets]
|
||||
pulses,
|
||||
xml_translator]
|
||||
|
||||
runActor("syndesizer") do (turn: var Turn; root: Cap):
|
||||
connectStdio(turn, root)
|
||||
discard spawnTimers(turn, root)
|
||||
discard spawnCacheActor(turn, root)
|
||||
discard spawnJsonSocketTranslator(turn, root)
|
||||
discard spawnJsonStdioTranslator(turn, root)
|
||||
discard spawnWebhookActor(turn, root)
|
||||
discard spawnWebsocketActor(turn, root)
|
||||
when withSqlite:
|
||||
discard spawnSqliteActor(turn, root)
|
||||
runActor("syndesizer") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
discard spawnTimerDriver(turn, ds)
|
||||
discard spawnBase64Decoder(turn, ds)
|
||||
discard spawnCacheActor(turn, ds)
|
||||
discard spawnFileSystemUsageActor(turn, ds)
|
||||
discard spawnHttpDriver(turn, ds)
|
||||
discard spawnJsonSocketTranslator(turn, ds)
|
||||
discard spawnJsonStdioTranslator(turn, ds)
|
||||
discard spawnPulseActor(turn, ds)
|
||||
discard spawnXmlTranslator(turn, ds)
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
include_rules
|
||||
: foreach *.nim | $(SYNDICATE_PROTOCOL) ../<schema> |> !nim_bin |> {bin}
|
||||
: foreach {bin} |> !assert_built |>
|
|
@ -0,0 +1,51 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[base64, os]
|
||||
import pkg/hashlib/misc/blake2
|
||||
import preserves, preserves/sugar, syndicate
|
||||
import ../schema/config
|
||||
import ../schema/base64 as schema
|
||||
|
||||
export Base64DecoderArguments
|
||||
export schema
|
||||
|
||||
proc spawnBase64Decoder*(turn: Turn; root: Cap): Actor {.discardable.} =
|
||||
spawnActor(turn, "base64-decoder") do (turn: Turn):
|
||||
let tmpDir = getTempDir()
|
||||
during(turn, root, ?:Base64DecoderArguments) do (ds: Cap):
|
||||
|
||||
let decTextPat = observePattern(!Base64Text, { @[%0]: grabLit() })
|
||||
during(turn, ds, decTextPat) do (txt: string):
|
||||
discard publish(turn, ds, Base64Text(
|
||||
txt: txt,
|
||||
bin: cast[seq[byte]](decode(txt)),
|
||||
))
|
||||
|
||||
let encTextPat = observePattern(!Base64Text, { @[%1]: grabLit() })
|
||||
during(turn, ds, encTextPat) do (bin: seq[byte]):
|
||||
discard publish(turn, ds, Base64Text(
|
||||
txt: encode(bin),
|
||||
bin: bin,
|
||||
))
|
||||
|
||||
let decFilePat = observePattern( !Base64File, { @[%0]: grabLit() })
|
||||
during(turn, ds, decFilePat) do (txt: string):
|
||||
var bin = decode(txt)
|
||||
var ctx = init[BLAKE2B_512]()
|
||||
ctx.update(bin)
|
||||
let
|
||||
digest = $ctx.final()
|
||||
path = tmpDir / digest
|
||||
writeFile(path, bin)
|
||||
discard publish(turn, ds, Base64File(
|
||||
txt: txt,
|
||||
path: path,
|
||||
size: bin.len,
|
||||
))
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
spawnBase64Decoder(turn, ds)
|
|
@ -3,8 +3,8 @@
|
|||
|
||||
import std/times
|
||||
import preserves, syndicate,
|
||||
syndicate/[durings, relays],
|
||||
syndicate/actors/timers
|
||||
syndicate/durings,
|
||||
syndicate/drivers/timers
|
||||
|
||||
import ../schema/config
|
||||
|
||||
|
@ -18,9 +18,9 @@ type CacheEntity {.final.} = ref object of Entity
|
|||
pattern: Pattern
|
||||
lifetime: float64
|
||||
|
||||
method publish(cache: CacheEntity; turn: var Turn; ass: AssertionRef; h: Handle) =
|
||||
method publish(cache: CacheEntity; turn: Turn; ass: AssertionRef; h: Handle) =
|
||||
## Re-assert pattern captures in a sub-facet.
|
||||
discard inFacet(turn) do (turn: var Turn):
|
||||
discard inFacet(turn) do (turn: Turn):
|
||||
# TODO: a seperate facet for every assertion, too much?
|
||||
var ass = depattern(cache.pattern, ass.value.sequence)
|
||||
# Build an assertion with what he have of the pattern and capture.
|
||||
|
@ -30,12 +30,12 @@ method publish(cache: CacheEntity; turn: var Turn; ass: AssertionRef; h: Handle)
|
|||
stop(turn) # end this facet
|
||||
|
||||
proc isObserve(pat: Pattern): bool =
|
||||
pat.orKind == PatternKind.DCompound and
|
||||
pat.dcompound.orKind == DCompoundKind.rec and
|
||||
pat.dcompound.rec.label.isSymbol"Observe"
|
||||
pat.orKind == PatternKind.group and
|
||||
pat.group.type.orKind == GroupTypeKind.rec and
|
||||
pat.group.type.rec.label.isSymbol"Observe"
|
||||
|
||||
proc spawnCacheActor*(turn: var Turn; root: Cap): Actor =
|
||||
spawn("cache_actor", turn) do (turn: var Turn):
|
||||
proc spawnCacheActor*(turn: Turn; root: Cap): Actor =
|
||||
spawnActor(turn, "cache_actor") do (turn: Turn):
|
||||
during(turn, root, ?:CacheArguments) do (ds: Cap, lifetime: float64):
|
||||
onPublish(turn, ds, ?:Observe) do (pat: Pattern, obs: Cap):
|
||||
var cache: CacheEntity
|
||||
|
@ -51,7 +51,8 @@ proc spawnCacheActor*(turn: var Turn; root: Cap): Actor =
|
|||
discard observe(turn, ds, pat, cache)
|
||||
|
||||
when isMainModule:
|
||||
runActor("cache_actor") do (turn: var Turn; root: Cap):
|
||||
spawnTimers(turn, root)
|
||||
connectStdio(turn, root)
|
||||
discard spawnCacheActor(turn, root)
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
discard spawnTimerDriver(turn, ds)
|
||||
discard spawnCacheActor(turn, ds)
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[dirs, os, paths]
|
||||
import preserves, preserves/sugar
|
||||
import syndicate
|
||||
|
||||
import ../schema/[assertions, config]
|
||||
|
||||
proc spawnFileSystemUsageActor*(turn: Turn; root: Cap): Actor {.discardable.} =
|
||||
spawn("file-system-usage", turn) do (turn: Turn):
|
||||
during(turn, root, ?:FileSystemUsageArguments) do (ds: Cap):
|
||||
let pat = observePattern(!FileSystemUsage, { @[%0]: grab() })
|
||||
during(turn, ds, pat) do (lit: Literal[string]):
|
||||
var ass = FileSystemUsage(path: lit.value)
|
||||
if fileExists(ass.path): ass.size = getFileSize(ass.path)
|
||||
else:
|
||||
for fp in walkDirRec(paths.Path(lit.value), yieldFilter={pcFile}):
|
||||
var fs = getFileSize(string fp)
|
||||
inc(ass.size, fs)
|
||||
discard publish(turn, ds, ass)
|
||||
# TODO: updates?
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
discard spawnFileSystemUsageActor(turn, ds)
|
|
@ -0,0 +1,42 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
## Thin wrapper over `syndicate/drivers/http_driver`.
|
||||
|
||||
import pkg/preserves, pkg/syndicate
|
||||
import pkg/syndicate/drivers/http_driver
|
||||
import pkg/taps
|
||||
import ../schema/config
|
||||
|
||||
proc spawnHttpDriver*(turn: Turn; ds: Cap): Actor {.discardable.}=
|
||||
http_driver.spawnHttpDriver(turn, ds)
|
||||
during(turn, ds, ?:HttpDriverArguments) do (ds: Cap):
|
||||
http_driver.spawnHttpDriver(turn, ds)
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
|
||||
when defined(solo5):
|
||||
import solo5
|
||||
acquireDevices([("eth0", netBasic)], netAcquireHook)
|
||||
|
||||
proc envRoute: Route =
|
||||
var pr = parsePreserves $solo5_start_info.cmdline
|
||||
if result.fromPreserves pr:
|
||||
return
|
||||
elif pr.isSequence:
|
||||
for e in pr:
|
||||
if result.fromPreserves e:
|
||||
return
|
||||
quit("failed to parse command line for route to Syndicate gatekeeper")
|
||||
|
||||
runActor("main") do (turn: Turn):
|
||||
let ds = newDataspace(turn)
|
||||
spawnRelays(turn, ds)
|
||||
resolve(turn, ds, envRoute(), spawnHttpDriver)
|
||||
|
||||
else:
|
||||
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
spawnHttpDriver(turn, ds)
|
|
@ -0,0 +1,2 @@
|
|||
define:ipv6Enabled
|
||||
include:"std/assertions"
|
|
@ -1,39 +1,77 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[asyncdispatch, asyncnet, json]
|
||||
from std/nativesockets import AF_UNIX, SOCK_STREAM, Protocol
|
||||
import preserves, preserves/jsonhooks, syndicate, syndicate/relays
|
||||
import std/[json, options]
|
||||
import pkg/sys/[ioqueue, sockets]
|
||||
import preserves, preserves/jsonhooks, syndicate
|
||||
|
||||
import ../schema/config, ../json_messages
|
||||
import ../schema/[config, json_messages]
|
||||
|
||||
proc spawnJsonSocketTranslator*(turn: var Turn; root: Cap): Actor =
|
||||
spawn("json-socket-translator", turn) do (turn: var Turn):
|
||||
during(turn, root, ?:JsonSocketTranslatorArguments) do (ds: Cap, socketPath: string):
|
||||
let socket = newAsyncSocket(
|
||||
domain = AF_UNIX,
|
||||
sockType = SOCK_STREAM,
|
||||
protocol = cast[Protocol](0),
|
||||
buffered = false,
|
||||
)
|
||||
addCallback(connectUnix(socket, socketPath), turn) do (turn: var Turn):
|
||||
let a = JsonTranslatorConnected(path: socketPath)
|
||||
discard publish(turn, ds, a)
|
||||
template translateSocketBody {.dirty.} =
|
||||
# Template workaround for CPS and parameterized types.
|
||||
var
|
||||
guard = initGuard(facet)
|
||||
dec = newBufferedDecoder(0)
|
||||
buf = new string #TODO: get a pointer into the decoder
|
||||
alive = true
|
||||
proc kill(turn: Turn) =
|
||||
alive = false
|
||||
proc setup(turn: Turn) =
|
||||
# Closure, not CPS.
|
||||
onMessage(turn, ds, ?:SendJson) do (data: JsonNode):
|
||||
if alive:
|
||||
discard trampoline:
|
||||
whelp write(socket[], $data & "\n")
|
||||
else:
|
||||
stderr.writeLine "dropped send of ", data
|
||||
discard publish(turn, ds, initRecord("connected", sa.toPreserves))
|
||||
onStop(facet, kill)
|
||||
run(facet, setup)
|
||||
while alive:
|
||||
# TODO: parse buffer
|
||||
buf[].setLen(0x4000)
|
||||
let n = read(socket[], buf)
|
||||
if n < 1:
|
||||
stderr.writeLine "socket read returned ", n
|
||||
else:
|
||||
buf[].setLen(n)
|
||||
dec.feed(buf[])
|
||||
var data = dec.parse()
|
||||
if data.isSome:
|
||||
proc send(turn: Turn) =
|
||||
# Closure, not CPS.
|
||||
message(turn, ds, initRecord("recv", data.get))
|
||||
run(facet, send)
|
||||
stderr.writeLine "close socket ", sa
|
||||
close(socket[])
|
||||
|
||||
let socketFacet = turn.facet
|
||||
proc processOutput(fut: Future[string]) {.gcsafe.} =
|
||||
run(socketFacet) do (turn: var Turn):
|
||||
var data = fut.read.parseJson
|
||||
message(turn, ds, RecvJson(data: data))
|
||||
socket.recvLine.addCallback(processOutput)
|
||||
socket.recvLine.addCallback(processOutput)
|
||||
proc translateSocket(facet: Facet; ds: Cap; sa: TcpAddress) {.asyncio.} =
|
||||
var
|
||||
socket = new AsyncConn[Protocol.Tcp]
|
||||
conn = connectTcpAsync(sa.host, Port sa.port)
|
||||
socket[] = conn
|
||||
translateSocketBody()
|
||||
|
||||
onMessage(turn, ds, ?:SendJson) do (data: JsonNode):
|
||||
asyncCheck(turn, send(socket, $data & "\n"))
|
||||
do:
|
||||
close(socket)
|
||||
proc translateSocket(facet: Facet; ds: Cap; sa: UnixAddress) {.asyncio.} =
|
||||
var
|
||||
socket = new AsyncConn[Protocol.Unix]
|
||||
conn = connectUnixAsync(sa.path)
|
||||
socket[] = conn
|
||||
translateSocketBody()
|
||||
|
||||
proc spawnJsonSocketTranslator*(turn: Turn; root: Cap): Actor {.discardable.} =
|
||||
spawnActor(turn, "json-socket-translator") do (turn: Turn):
|
||||
during(turn, root, ?:JsonSocketTranslatorArguments) do (ds: Cap, sa: TcpAddress):
|
||||
linkActor(turn, "json-socket-translator") do (turn: Turn):
|
||||
discard trampoline:
|
||||
whelp translateSocket(turn.facet, ds, sa)
|
||||
during(turn, root, ?:JsonSocketTranslatorArguments) do (ds: Cap, sa: UnixAddress):
|
||||
linkActor(turn, "json-socket-translator") do (turn: Turn):
|
||||
discard trampoline:
|
||||
whelp translateSocket(turn.facet, ds, sa)
|
||||
|
||||
when isMainModule:
|
||||
runActor("json_socket_translator") do (turn: var Turn; root: Cap):
|
||||
connectStdio(turn, root)
|
||||
discard spawnJsonSocketTranslator(turn, root)
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
spawnJsonSocketTranslator(turn, ds)
|
||||
|
|
|
@ -3,10 +3,9 @@
|
|||
|
||||
import std/[json, osproc]
|
||||
import preserves
|
||||
import syndicate, syndicate/relays
|
||||
import syndicate
|
||||
|
||||
import ../schema/config
|
||||
import ../json_messages
|
||||
import ../schema/[config, json_messages]
|
||||
|
||||
proc runChild(params: seq[string]): string =
|
||||
if params.len < 1:
|
||||
|
@ -20,14 +19,15 @@ proc runChild(params: seq[string]): string =
|
|||
if result == "":
|
||||
stderr.writeLine "no ouput"
|
||||
|
||||
proc spawnJsonStdioTranslator*(turn: var Turn; root: Cap): Actor {.discardable.} =
|
||||
spawn("json-stdio-translator", turn) do (turn: var Turn):
|
||||
proc spawnJsonStdioTranslator*(turn: Turn; root: Cap): Actor {.discardable.} =
|
||||
spawnActor(turn, "json-stdio-translator") do (turn: Turn):
|
||||
during(turn, root, ?:JsonTranslatorArguments) do (argv: seq[string], ds: Cap):
|
||||
var js = parseJson(runChild(argv))
|
||||
message(turn, ds, RecvJson(data: js))
|
||||
discard publish(turn, ds, RecvJson(data: js))
|
||||
|
||||
when isMainModule:
|
||||
runActor("main") do (turn: var Turn; root: Cap):
|
||||
connectStdio(turn, root)
|
||||
spawnJsonStdioTranslator(turn, root)
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
spawnJsonStdioTranslator(turn, ds)
|
||||
|
|
|
@ -0,0 +1,105 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[options, tables, times]
|
||||
import preserves, preserves/sugar
|
||||
import syndicate, syndicate/drivers/timers
|
||||
|
||||
import ../schema/[assertions, config]
|
||||
|
||||
type PulseEntity {.final.} = ref object of Entity
|
||||
## An entity that asserts and retracts observers on a pulse.
|
||||
self, timers: Cap
|
||||
target: Entity
|
||||
period: float
|
||||
timerHandle: Handle
|
||||
observers: Table[Handle, AssertionRef]
|
||||
observePattern: Pattern
|
||||
observing: bool
|
||||
|
||||
proc schedule(turn: Turn; pulse: PulseEntity) =
|
||||
## Schedule the next pulse.
|
||||
## The next pulse will be schedule using the current time as
|
||||
## reference point and not the moment of the previous pulse.
|
||||
let then = getTime().toUnixFloat()+pulse.period
|
||||
pulse.timerHandle = publish(turn, pulse.timers, Observe(
|
||||
pattern: LaterThan ?: { 0: ?then },
|
||||
observer: pulse.self,
|
||||
))
|
||||
|
||||
method publish(pulse: PulseEntity; turn: Turn; ass: AssertionRef; h: Handle) =
|
||||
## Publish observers in reponse to <later-than …> assertions.
|
||||
pulse.timers.target.retract(turn, pulse.timerHandle)
|
||||
schedule(turn, pulse)
|
||||
pulse.observing = true
|
||||
for h, a in pulse.observers.pairs:
|
||||
pulse.target.publish(turn, a, h)
|
||||
pulse.target.sync(turn, pulse.self)
|
||||
|
||||
method message(pulse: PulseEntity; turn: Turn; v: AssertionRef) =
|
||||
## Retract observers in response to a sync message.
|
||||
pulse.observing = false
|
||||
for h in pulse.observers.keys:
|
||||
pulse.target.retract(turn, h)
|
||||
|
||||
type ProxyEntity {.final.} = ref object of Entity
|
||||
## A proxy `Entity` that diverts observers to a `PulseEntity`.
|
||||
pulse: PulseEntity
|
||||
|
||||
method publish(proxy: ProxyEntity; turn: Turn; ass: AssertionRef; h: Handle) =
|
||||
## Proxy assertions that are not observations.
|
||||
if proxy.pulse.observePattern.matches ass.value:
|
||||
if proxy.pulse.observers.len == 0:
|
||||
schedule(turn, proxy.pulse)
|
||||
proxy.pulse.observers[h] = ass
|
||||
else:
|
||||
proxy.pulse.target.publish(turn, ass, h)
|
||||
|
||||
method retract(proxy: ProxyEntity; turn: Turn; h: Handle) =
|
||||
## Retract proxied assertions.
|
||||
var obs: AssertionRef
|
||||
if proxy.pulse.observers.pop(h, obs):
|
||||
if proxy.pulse.observing:
|
||||
proxy.pulse.target.retract(turn, h)
|
||||
if proxy.pulse.observers.len == 0:
|
||||
proxy.pulse.timers.target.retract(turn, proxy.pulse.timerHandle)
|
||||
else:
|
||||
proxy.pulse.target.retract(turn, h)
|
||||
|
||||
method message(proxy: ProxyEntity; turn: Turn; v: AssertionRef) =
|
||||
## Proxy mesages.
|
||||
proxy.pulse.target.message(turn, v)
|
||||
|
||||
method sync(proxy: ProxyEntity; turn: Turn; peer: Cap) =
|
||||
## Proxy sync.
|
||||
proxy.pulse.target.sync(turn, peer)
|
||||
|
||||
proc newProxyEntity(turn: Turn; timers, ds: Cap; period: float): ProxyEntity =
|
||||
new result
|
||||
result.pulse = PulseEntity(
|
||||
target: ds.target,
|
||||
timers: timers,
|
||||
observePattern: ?:Observe,
|
||||
period: period,
|
||||
)
|
||||
result.pulse.self = newCap(turn, result.pulse)
|
||||
|
||||
proc spawnPulseActor*(turn: Turn; root: Cap): Actor =
|
||||
## Spawn an actor that retracts and re-asserts observers on
|
||||
## a timed pulse. Requires a timer service on the `root` capability.
|
||||
spawnActor(turn, "pulse") do (turn: Turn):
|
||||
let grabPeriod = observePattern(!Pulse, { @[%0]: grab() })
|
||||
during(turn, root, ?:PulseArguments) do (ds: Cap):
|
||||
during(turn, ds, grabPeriod) do (lit: Literal[float]):
|
||||
if lit.value < 0.000_1:
|
||||
stderr.writeLine("pulse period is too small: ", lit.value, "s")
|
||||
else:
|
||||
let proxyCap = newCap(turn, newProxyEntity(turn, root, ds, lit.value))
|
||||
var pulse = Pulse(periodSec: lit.value, proxy: embed proxyCap)
|
||||
discard publish(turn, ds, pulse)
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
discard spawnPulseActor(turn, ds)
|
|
@ -1,114 +0,0 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
const withSqlite* {.booldefine.}: bool = true
|
||||
|
||||
when withSqlite:
|
||||
import preserves, syndicate, syndicate/relays
|
||||
import ../schema/[config, sql]
|
||||
|
||||
# Avoid Sqlite3 from the standard library because it is
|
||||
# only held together by wishful thinking and dlload.
|
||||
|
||||
{.passC: staticExec("pkg-config --cflags sqlite3").}
|
||||
{.passL: staticExec("pkg-config --libs sqlite3").}
|
||||
|
||||
{.pragma: sqlite3h, header: "sqlite3.h".}
|
||||
|
||||
var
|
||||
SQLITE_VERSION_NUMBER {.importc, sqlite3h.}: cint
|
||||
SQLITE_OK {.importc, sqlite3h.}: cint
|
||||
SQLITE_ROW {.importc, sqlite3h.}: cint
|
||||
SQLITE_DONE {.importc, sqlite3h.}: cint
|
||||
SQLITE_OPEN_READONLY {.importc, sqlite3h.}: cint
|
||||
|
||||
const
|
||||
SQLITE_INTEGER = 1
|
||||
SQLITE_FLOAT = 2
|
||||
SQLITE_TEXT = 3
|
||||
SQLITE_BLOB = 4
|
||||
# SQLITE_NULL = 5
|
||||
|
||||
type
|
||||
Sqlite3 {.importc: "sqlite3", sqlite3h.} = distinct pointer
|
||||
Stmt {.importc: "sqlite3_stmt", sqlite3h.} = distinct pointer
|
||||
|
||||
{.pragma: importSqlite3, importc: "sqlite3_$1", sqlite3h.}
|
||||
|
||||
proc libversion_number: cint {.importSqlite3.}
|
||||
|
||||
proc open_v2(filename: cstring; ppDb: ptr Sqlite3; flags: cint; zVfs: cstring): cint {.importSqlite3.}
|
||||
proc close(ds: Sqlite3): int32 {.discardable, importSqlite3.}
|
||||
|
||||
proc errmsg(db: Sqlite3): cstring {.importSqlite3.}
|
||||
|
||||
proc prepare_v2(db: Sqlite3; zSql: cstring, nByte: cint; ppStmt: ptr Stmt; pzTail: ptr cstring): cint {.importSqlite3.}
|
||||
|
||||
proc step(para1: Stmt): cint {.importSqlite3.}
|
||||
|
||||
proc column_count(stmt: Stmt): int32 {.importSqlite3.}
|
||||
proc column_blob(stmt: Stmt; col: cint): pointer {.importSqlite3.}
|
||||
proc column_bytes(stmt: Stmt; col: cint): cint {.importSqlite3.}
|
||||
proc column_double(stmt: Stmt; col: cint): float64 {.importSqlite3.}
|
||||
proc column_int64(stmt: Stmt; col: cint): int64 {.importSqlite3.}
|
||||
proc column_text(stmt: Stmt; col: cint): cstring {.importSqlite3.}
|
||||
proc column_type(stmt: Stmt; col: cint): cint {.importSqlite3.}
|
||||
proc finalize(stmt: Stmt): cint {.importSqlite3.}
|
||||
|
||||
doAssert libversion_number() == SQLITE_VERSION_NUMBER
|
||||
|
||||
proc logError(db: Sqlite3; context: string) =
|
||||
writeLine(stderr, errmsg(db), ": ", context)
|
||||
|
||||
proc extractValue(stmt: Stmt; col: cint): Value =
|
||||
case column_type(stmt, col)
|
||||
of SQLITE_INTEGER:
|
||||
result = toPreserve(column_int64(stmt, col))
|
||||
of SQLITE_FLOAT:
|
||||
result = toPreserve(column_double(stmt, col))
|
||||
of SQLITE_TEXT:
|
||||
result = Value(kind: pkString, string: newString(column_bytes(stmt, col)))
|
||||
if result.string.len > 0:
|
||||
copyMem(addr result.string[0], column_text(stmt, col), result.string.len)
|
||||
of SQLITE_BLOB:
|
||||
result = Value(kind: pkByteString, bytes: newSeq[byte](column_bytes(stmt, col)))
|
||||
if result.bytes.len > 0:
|
||||
copyMem(addr result.bytes[0], column_blob(stmt, col), result.bytes.len)
|
||||
else:
|
||||
result = initRecord("null")
|
||||
|
||||
proc extractRecord(stmt: Stmt; label: Value, arity: cint): Value =
|
||||
result = initRecord(label, arity)
|
||||
for col in 0..<arity: result.record[col] = extractValue(stmt, col)
|
||||
|
||||
proc spawnSqliteActor*(turn: var Turn; root: Cap): Actor {.discardable.} =
|
||||
spawn("sqlite-actor", turn) do (turn: var Turn):
|
||||
during(turn, root, ?:SqliteArguments) do (path: string, ds: Cap):
|
||||
var db: Sqlite3
|
||||
if open_v2(path, addr db, SQLITE_OPEN_READONLY, nil) != SQLITE_OK:
|
||||
logError(db, path)
|
||||
else:
|
||||
during(turn, ds, ?:Query) do (label: Value, statement: string):
|
||||
var stmt: Stmt
|
||||
if prepare_v2(db, statement, statement.len.cint, addr stmt, nil) != SQLITE_OK:
|
||||
logError(db, statement)
|
||||
else:
|
||||
try:
|
||||
let arity = column_count(stmt)
|
||||
var res = step(stmt)
|
||||
while res == SQLITE_ROW:
|
||||
var rec = extractRecord(stmt, label, arity)
|
||||
discard publish(turn, ds, rec)
|
||||
res = step(stmt)
|
||||
assert res != 100
|
||||
if res != SQLITE_DONE:
|
||||
logError(db, statement)
|
||||
finally:
|
||||
if finalize(stmt) != SQLITE_OK: logError(db, statement)
|
||||
do:
|
||||
close(db)
|
||||
|
||||
when isMainModule:
|
||||
runActor("main") do (turn: var Turn; root: Cap):
|
||||
connectStdio(turn, root)
|
||||
spawnSqliteActor(turn, root)
|
|
@ -1,105 +0,0 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
## An actor for relaying Webhooks.
|
||||
|
||||
import std/[asyncdispatch, asynchttpserver, net, strutils, tables, uri]
|
||||
|
||||
import preserves, preserves/jsonhooks
|
||||
import syndicate, syndicate/[bags, relays]
|
||||
import syndicate/protocols/http
|
||||
|
||||
import ../schema/config
|
||||
|
||||
type
|
||||
CapBag = Bag[Cap]
|
||||
Endpoints = Table[seq[string], Cap]
|
||||
|
||||
func splitPath(s: string): seq[string] = s.strip(chars={'/'}).split('/')
|
||||
|
||||
proc toRecord(req: Request; seqnum: BiggestInt; path: seq[string]): Value =
|
||||
## Convert a request value from the std/asynchttpserver module
|
||||
## to a request type from syndicate/protocols/http.
|
||||
var record: HttpRequest
|
||||
record.sequenceNumber = seqnum
|
||||
record.host = req.hostname
|
||||
record.`method` = Symbol($req.reqMethod)
|
||||
record.path = path
|
||||
for key, val in req.headers.pairs:
|
||||
record.headers[Symbol key] = val
|
||||
for key, val in decodeQuery(req.url.query):
|
||||
record.query[Symbol key] =
|
||||
@[QueryValue(orKind: QueryValueKind.string, string: val)]
|
||||
let contentType = req.headers.getOrDefault("content-type")
|
||||
result = toPreserves record
|
||||
if req.body.len > 0:
|
||||
result[7] =
|
||||
case contentType.toString
|
||||
of "application/json":
|
||||
req.body.parsePreserves
|
||||
of "application/octet-stream":
|
||||
cast[seq[byte]](req.body).toPreserves
|
||||
else:
|
||||
req.body.toPreserves
|
||||
|
||||
proc spawnWebhookActor*(turn: var Turn; root: Cap): Actor =
|
||||
spawn("webhooks", turn) do (turn: var Turn):
|
||||
let pat = grabRecord("webhooks", grabDictionary({ "listen": ?:config.Tcp }))
|
||||
# Grab the details on listening for requests.
|
||||
# Disregard endpoints so the server doesn't restart as those change.
|
||||
during(turn, root, pat) do (host: string; port: Port):
|
||||
let endpointsPat = grabRecord("webhooks", grabDictionary({
|
||||
"listen": ?config.Tcp(host: host, port: BiggestInt port),
|
||||
"endpoints": grab(),
|
||||
}))
|
||||
# construct a pattern for grabbing endpoints when the server is ready
|
||||
var seqNum: BiggestInt
|
||||
let facet = turn.facet
|
||||
let endpoints = newTable[seq[string], CapBag]()
|
||||
# use a bag so the same capability registered multiple
|
||||
# times with the same path does not get duplicate messages
|
||||
|
||||
proc cb(req: Request): Future[void] =
|
||||
inc(seqNum)
|
||||
let path = req.url.path.splitPath
|
||||
if not endpoints.hasKey path:
|
||||
result = respond(req, Http404,
|
||||
"no capabilities registered at $1\n" % [req.url.path])
|
||||
else:
|
||||
result = respond(req, Http200, "")
|
||||
proc act(turn: var Turn) {.gcsafe.} =
|
||||
let rec = req.toRecord(seqNum, path)
|
||||
for cap in endpoints[path]:
|
||||
message(turn, cap, rec)
|
||||
run(facet, act)
|
||||
|
||||
let server = newAsyncHttpServer()
|
||||
stderr.writeLine("listening for webhooks at ", host, ":", port)
|
||||
if host.isIpAddress:
|
||||
var ip = parseIpAddress host
|
||||
case ip.family
|
||||
of IPv6:
|
||||
asyncCheck(turn, server.serve(port, cb, host, domain = AF_INET6))
|
||||
of IPv4:
|
||||
asyncCheck(turn, server.serve(port, cb, host, domain = AF_INET))
|
||||
else:
|
||||
asyncCheck(turn, server.serve(port, cb, host, domain = AF_INET6))
|
||||
asyncCheck(turn, server.serve(port, cb, host, domain = AF_INET))
|
||||
|
||||
during(turn, root, endpointsPat) do (eps: Endpoints):
|
||||
for path, cap in eps:
|
||||
if not endpoints.hasKey path:
|
||||
endpoints[path] = CapBag()
|
||||
discard endpoints[path].change(cap, +1)
|
||||
do:
|
||||
for path, cap in eps:
|
||||
discard endpoints[path].change(cap, -1)
|
||||
|
||||
do:
|
||||
stderr.writeLine("closing for webhook server at ", host, ":", port)
|
||||
close(server)
|
||||
|
||||
when isMainModule:
|
||||
runActor("webhooks") do (turn: var Turn; root: Cap):
|
||||
connectStdio(turn, root)
|
||||
discard spawnWebhookActor(turn, root)
|
|
@ -1,55 +0,0 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[asyncdispatch, json]
|
||||
import preserves
|
||||
import syndicate, syndicate/relays
|
||||
import ws
|
||||
|
||||
import ../schema/config, ../json_messages
|
||||
|
||||
type WebSocket = ws.WebSocket
|
||||
# not the object from the transportAddress schema
|
||||
|
||||
proc spawnWebsocketActor*(turn: var Turn; root: Cap): Actor =
|
||||
spawn("websocket-actor", turn) do (turn: var Turn):
|
||||
during(turn, root, ?:WebsocketArguments) do (ds: Cap, url: string):
|
||||
let facet = turn.facet
|
||||
var
|
||||
ws: WebSocket
|
||||
connectedHandle: Handle
|
||||
newWebSocket(url).addCallback(turn) do (turn: var Turn; sock: WebSocket):
|
||||
ws = sock
|
||||
connectedHandle = publish(turn, ds, initRecord("connected", url.toPreserves))
|
||||
var fut: Future[(Opcode, string)]
|
||||
proc recvMessage() {.gcsafe.} =
|
||||
fut = receivePacket ws
|
||||
addCallback(fut, facet) do (turn: var Turn):
|
||||
let (opcode, data) = read fut
|
||||
case opcode
|
||||
of Text:
|
||||
message(turn, ds,
|
||||
RecvJson(data: data.parseJson))
|
||||
of Binary:
|
||||
message(turn, ds,
|
||||
initRecord("recv", cast[seq[byte]](data).toPreserves))
|
||||
of Ping:
|
||||
asyncCheck(turn, ws.send(data, Pong))
|
||||
of Pong, Cont:
|
||||
discard
|
||||
of Close:
|
||||
retract(turn, connectedHandle)
|
||||
stderr.writeLine "closed connection with ", url
|
||||
stop(turn)
|
||||
return
|
||||
recvMessage()
|
||||
recvMessage()
|
||||
onMessage(turn, ds, ?:SendJson) do (data: JsonNode):
|
||||
asyncCheck(turn, ws.send($data, Text))
|
||||
do:
|
||||
close(ws)
|
||||
|
||||
when isMainModule:
|
||||
runActor("main") do (turn: var Turn; root: Cap):
|
||||
connectStdio(turn, root)
|
||||
discard spawnWebsocketActor(turn, root)
|
|
@ -0,0 +1,34 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[options, parsexml, xmlparser, xmltree]
|
||||
import preserves, preserves/sugar, preserves/xmlhooks
|
||||
import syndicate
|
||||
|
||||
import ../schema/[assertions, config]
|
||||
|
||||
proc translateXml(s: string): XmlTranslation =
|
||||
result.xml = s
|
||||
try: result.pr = result.xml.parseXml({allowUnquotedAttribs}).toPreservesHook
|
||||
except XmlError: discard
|
||||
|
||||
proc translatePreserves(pr: Value): XmlTranslation {.gcsafe.} =
|
||||
result.pr = pr
|
||||
var xn = result.pr.preservesTo(XmlNode)
|
||||
if xn.isSome: result.xml = $get(xn)
|
||||
|
||||
proc spawnXmlTranslator*(turn: Turn; root: Cap): Actor {.discardable.} =
|
||||
spawnActor(turn, "xml-translator") do (turn: Turn):
|
||||
during(turn, root, ?:XmlTranslatorArguments) do (ds: Cap):
|
||||
let xmlPat = observePattern(!XmlTranslation, {@[%0]:grab()})
|
||||
during(turn, ds, xmlPat) do (xs: Literal[string]):
|
||||
publish(turn, ds, translateXml(xs.value))
|
||||
let prPat = observePattern(!XmlTranslation, {@[%1]:grab()})
|
||||
during(turn, ds, prPat) do (pr: Literal[Value]):
|
||||
publish(turn, ds, translatePreserves(pr.value))
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
spawnXmlTranslator(turn, ds)
|
|
@ -1,132 +0,0 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
## This was all Tony's idea, except for the silly name.
|
||||
|
||||
import std/[asyncdispatch, os, terminal]
|
||||
import preserves
|
||||
import syndicate, syndicate/[durings, relays]
|
||||
import illwill
|
||||
|
||||
proc exitProc() {.noconv.} =
|
||||
illwillDeinit()
|
||||
showCursor()
|
||||
quit QuitSuccess
|
||||
|
||||
setControlCHook(exitProc)
|
||||
|
||||
proc parsePattern(pr: Value): Pattern =
|
||||
let
|
||||
dropSigil = initRecord("lit", "_".toSymbol)
|
||||
grabSigil = initRecord("lit", "?".toSymbol)
|
||||
var pr = grab(pr).toPreserves
|
||||
apply(pr) do (pr: var Value):
|
||||
if pr == dropSigil:
|
||||
pr = initRecord("_")
|
||||
elif pr == grabSigil:
|
||||
pr = initRecord("bind", initRecord("_"))
|
||||
doAssert result.fromPreserves(pr)
|
||||
|
||||
proc inputPattern: Pattern =
|
||||
var args = commandLineParams()
|
||||
if args.len != 1:
|
||||
quit "expected a single pattern argument"
|
||||
else:
|
||||
var input = pop args
|
||||
if input == "":
|
||||
quit "expected Preserves Pattern on stdin"
|
||||
else:
|
||||
var pr: Value
|
||||
try: pr = decodePreserves(input)
|
||||
except ValueError: discard
|
||||
try: pr = parsePreserves(input)
|
||||
except ValueError: discard
|
||||
if pr.isFalse:
|
||||
quit "failed to parse Preserves argument"
|
||||
result = parsePattern(pr)
|
||||
|
||||
type TermEntity {.final.} = ref object of Entity
|
||||
pattern: Pattern
|
||||
value: Value
|
||||
|
||||
method publish(te: TermEntity; turn: var Turn; v: AssertionRef; h: Handle) =
|
||||
te.value = v.value
|
||||
var termBuf = newTerminalBuffer(terminalWidth(), terminalHeight())
|
||||
var y = 1
|
||||
termBuf.write(1, y, $te.pattern, styleBright)
|
||||
inc(y)
|
||||
termBuf.drawHorizLine(0, termBuf.width(), y)
|
||||
inc(y)
|
||||
termBuf.write(0, y, $h, styleBright)
|
||||
for i, e in te.value.sequence:
|
||||
inc(y)
|
||||
termBuf.write(1, y, $e)
|
||||
termBuf.display()
|
||||
|
||||
method retract(te: TermEntity; turn: var Turn; h: Handle) =
|
||||
var termBuf = newTerminalBuffer(terminalWidth(), terminalHeight())
|
||||
var y = 1
|
||||
termBuf.write(1, y, $te.pattern, styleDim)
|
||||
inc y
|
||||
termBuf.drawHorizLine(0, termBuf.width(), y, true)
|
||||
inc(y)
|
||||
termBuf.write(0, y, $h, styleBright)
|
||||
if te.value.isSequence:
|
||||
for i, e in te.value.sequence:
|
||||
inc(y)
|
||||
termBuf.write(1, y, $e)
|
||||
else:
|
||||
inc(y)
|
||||
termBuf.write(1, y, $te.value)
|
||||
termBuf.display()
|
||||
|
||||
type DumpEntity {.final.} = ref object of Entity
|
||||
discard
|
||||
|
||||
method publish(dump: DumpEntity; turn: var Turn; ass: AssertionRef; h: Handle) =
|
||||
stdout.writeLine($ass.value)
|
||||
stdout.flushFile()
|
||||
|
||||
method message*(dump: DumpEntity; turn: var Turn; ass: AssertionRef) =
|
||||
stdout.writeLine($ass.value)
|
||||
stdout.flushFile()
|
||||
|
||||
proc exit {.noconv.} =
|
||||
illwillDeinit()
|
||||
showCursor()
|
||||
quit()
|
||||
|
||||
setControlCHook(exit)
|
||||
|
||||
proc main =
|
||||
let
|
||||
route = envRoute()
|
||||
pat = inputPattern()
|
||||
|
||||
if stdout.is_a_TTY:
|
||||
illwillInit()
|
||||
hideCursor()
|
||||
|
||||
discard bootDataspace("syndex_card") do (turn: var Turn; root: Cap):
|
||||
resolve(turn, root, route) do (turn: var Turn; ds: Cap):
|
||||
var termBuf = newTerminalBuffer(terminalWidth(), terminalHeight())
|
||||
termBuf.write(1, 1, $pat, styleBright)
|
||||
termBuf.drawHorizLine(1, termBuf.width(), 2)
|
||||
termBuf.display()
|
||||
|
||||
discard observe(turn, ds, pat, TermEntity(pattern: pat))
|
||||
|
||||
while true:
|
||||
try: poll()
|
||||
except CatchableError:
|
||||
illwillDeinit()
|
||||
showCursor()
|
||||
quit getCurrentExceptionMsg()
|
||||
|
||||
else:
|
||||
let entity = DumpEntity()
|
||||
runActor("syndex_card") do (root: Cap; turn: var Turn):
|
||||
resolve(turn, root, route) do (turn: var Turn; ds: Cap):
|
||||
discard observe(turn, ds, pat, entity)
|
||||
|
||||
main()
|
|
@ -36,19 +36,19 @@ proc toLine(values: seq[Value]; prefix: char): string =
|
|||
add(result, $v)
|
||||
add(result, '\n')
|
||||
|
||||
method publish(dump: DumpEntity; turn: var Turn; ass: AssertionRef; h: Handle) =
|
||||
method publish(dump: DumpEntity; turn: Turn; ass: AssertionRef; h: Handle) =
|
||||
var values = ass.value.sequence
|
||||
stdout.write(values.toLine('+'))
|
||||
stdout.flushFile()
|
||||
dump.assertions[h] = values
|
||||
|
||||
method retract(dump: DumpEntity; turn: var Turn; h: Handle) =
|
||||
method retract(dump: DumpEntity; turn: Turn; h: Handle) =
|
||||
var values: seq[Value]
|
||||
if dump.assertions.pop(h, values):
|
||||
stdout.write(values.toLine('-'))
|
||||
stdout.flushFile()
|
||||
|
||||
method message*(dump: DumpEntity; turn: var Turn; ass: AssertionRef) =
|
||||
method message*(dump: DumpEntity; turn: Turn; ass: AssertionRef) =
|
||||
stdout.write(ass.value.sequence.toLine('!'))
|
||||
stdout.flushFile()
|
||||
|
||||
|
@ -58,13 +58,10 @@ proc exitProc() {.noconv.} =
|
|||
|
||||
proc main =
|
||||
let
|
||||
route = envRoute()
|
||||
patterns = inputPatterns()
|
||||
entity = DumpEntity()
|
||||
runActor("syndex_card") do (root: Cap; turn: var Turn):
|
||||
for pat in patterns:
|
||||
discard observe(turn, root, pat, entity)
|
||||
resolve(turn, root, route) do (turn: var Turn; ds: Cap):
|
||||
runActor("syndex_card") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
for pat in patterns:
|
||||
discard observe(turn, ds, pat, entity)
|
||||
|
||||
|
|
|
@ -0,0 +1,211 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[os, strutils]
|
||||
import preserves, preserves/sugar, syndicate
|
||||
import ./schema/[assertions, config]
|
||||
|
||||
{.passC: staticExec("pkg-config --cflags libxslt").}
|
||||
{.passL: staticExec("pkg-config --libs libxslt").}
|
||||
|
||||
{.pragma: libxslt, header: "libxslt/xslt.h", importc.}
|
||||
|
||||
type
|
||||
xmlElementType {.libxslt.} = enum
|
||||
XML_ELEMENT_NODE = 1,
|
||||
XML_ATTRIBUTE_NODE = 2,
|
||||
XML_TEXT_NODE = 3,
|
||||
XML_CDATA_SECTION_NODE = 4,
|
||||
XML_ENTITY_REF_NODE = 5,
|
||||
XML_ENTITY_NODE = 6,
|
||||
XML_PI_NODE = 7,
|
||||
XML_COMMENT_NODE = 8,
|
||||
XML_DOCUMENT_NODE = 9,
|
||||
XML_DOCUMENT_TYPE_NODE = 10,
|
||||
XML_DOCUMENT_FRAG_NODE = 11,
|
||||
XML_NOTATION_NODE = 12,
|
||||
XML_HTML_DOCUMENT_NODE = 13,
|
||||
XML_DTD_NODE = 14,
|
||||
XML_ELEMENT_DECL = 15,
|
||||
XML_ATTRIBUTE_DECL = 16,
|
||||
XML_ENTITY_DECL = 17,
|
||||
XML_NAMESPACE_DECL = 18,
|
||||
XML_XINCLUDE_START = 19,
|
||||
XML_XINCLUDE_END = 20
|
||||
|
||||
xmlNsPtr = ptr xmlNs
|
||||
xmlNs {.libxslt.} = object
|
||||
next: xmlNsPtr
|
||||
href, prefix: cstring
|
||||
|
||||
xmlAttrPtr = ptr xmlAttr
|
||||
xmlAttr {.libxslt.} = object
|
||||
name: cstring
|
||||
next: xmlAttrPtr
|
||||
children: xmlNodePtr
|
||||
|
||||
xmlElementContentPtr = ptr xmlElementContent
|
||||
xmlElementContent {.libxslt.} = object
|
||||
encoding: cstring
|
||||
|
||||
xmlNodePtr = ptr xmlNode
|
||||
xmlNode {.libxslt.} = object
|
||||
`type`: xmlElementType
|
||||
name: cstring
|
||||
children, next: xmlNodePtr
|
||||
content: cstring
|
||||
properties: xmlAttrPtr
|
||||
nsDef: xmlNsPtr
|
||||
|
||||
xmlDocPtr {.libxslt.} = distinct pointer
|
||||
xsltStylesheetPtr {.libxslt.} = distinct pointer
|
||||
|
||||
proc isNil(x: xmlDocPtr): bool {.borrow.}
|
||||
proc isNil(x: xsltStylesheetPtr): bool {.borrow.}
|
||||
|
||||
proc xmlReadMemory(buf: pointer; len: cint; url, enc: cstring; opts: cint): xmlDocPtr {.libxslt.}
|
||||
|
||||
proc xmlReadMemory(buf: string; uri = "noname.xml"): xmlDocPtr =
|
||||
xmlReadMemory(buf[0].addr, buf.len.cint, uri, "UTF-8", 0)
|
||||
|
||||
proc xmlParseFile(filename: cstring): xmlDocPtr {.libxslt.}
|
||||
|
||||
proc xmlFreeDoc(p: xmlDocPtr) {.libxslt.}
|
||||
|
||||
proc xmlDocGetRootElement(doc: xmlDocPtr): xmlNodePtr {.libxslt.}
|
||||
|
||||
proc loadXmlDoc(text: string): xmlDocPtr =
|
||||
if text.startsWith("/") and fileExists(text):
|
||||
xmlParseFile(text)
|
||||
else:
|
||||
xmlReadMemory(text, "noname.xml")
|
||||
|
||||
proc xsltParseStylesheetFile(filename: cstring): xsltStylesheetPtr {.libxslt.}
|
||||
|
||||
proc xsltParseStylesheetDoc(doc: xmlDocPtr): xsltStylesheetPtr {.libxslt.}
|
||||
|
||||
proc xsltParseStylesheetDoc(text: string; uri = "noname.xml"): xsltStylesheetPtr =
|
||||
var doc = xmlReadMemory(text, uri)
|
||||
result = xsltParseStylesheetDoc(doc)
|
||||
# implicit free of doc
|
||||
|
||||
proc loadStylesheet(text: string): xsltStylesheetPtr =
|
||||
if text.startsWith("/") and fileExists(text):
|
||||
xsltParseStylesheetFile(text)
|
||||
else:
|
||||
xsltParseStylesheetDoc(text, "noname.xsl")
|
||||
|
||||
proc xsltApplyStylesheet(
|
||||
style: xsltStylesheetPtr, doc: xmlDocPtr, params: cstringArray): xmlDocPtr {.libxslt.}
|
||||
|
||||
proc xsltFreeStylesheet(style: xsltStylesheetPtr) {.libxslt.}
|
||||
|
||||
proc xsltSaveResultToString(txt: ptr pointer; len: ptr cint; res: xmlDocPtr; style: xsltStylesheetPtr): cint {.libxslt.}
|
||||
|
||||
proc c_free*(p: pointer) {.importc: "free", header: "<stdlib.h>".}
|
||||
|
||||
proc xsltSaveResultToString(res: xmlDocPtr; style: xsltStylesheetPtr): string =
|
||||
var
|
||||
txt: pointer
|
||||
len: cint
|
||||
if xsltSaveResultToString(addr txt, addr len, res, style) < 0:
|
||||
raise newException(CatchableError, "xsltSaveResultToString failed")
|
||||
if len > 0:
|
||||
result = newString(int len)
|
||||
copyMem(result[0].addr, txt, len)
|
||||
c_free(txt)
|
||||
|
||||
proc initLibXml =
|
||||
discard
|
||||
|
||||
proc XML_GET_CONTENT(xn: xmlNodePtr): xmlElementContentPtr {.libxslt.}
|
||||
|
||||
proc textContent(xn: xmlNodePtr): string =
|
||||
if xn.content != nil: result = $xn.content
|
||||
|
||||
proc content(attr: xmlAttrPtr): string =
|
||||
var child = attr.children
|
||||
while not child.isNil:
|
||||
result.add child.content
|
||||
child = child.next
|
||||
|
||||
proc preserveSiblings(result: var seq[Value]; first: xmlNodePtr) =
|
||||
var xn = first
|
||||
while not xn.isNil:
|
||||
case xn.type
|
||||
of XML_ELEMENT_NODE:
|
||||
var child = Value(kind: pkRecord)
|
||||
if not xn.nsDef.isNil:
|
||||
child.record.add initDictionary()
|
||||
var ns = xn.nsDef
|
||||
while not ns.isNil:
|
||||
if not ns.href.isNil:
|
||||
var key = Value(kind: pkString)
|
||||
if ns.prefix.isNil:
|
||||
key.string = "xmlns"
|
||||
else:
|
||||
key.string = "xmlns:" & $ns.prefix
|
||||
child.record[0][key] = toPreserves($ns.href)
|
||||
ns = ns.next
|
||||
|
||||
if not xn.properties.isNil:
|
||||
if child.record.len < 1:
|
||||
child.record.add initDictionary()
|
||||
var attr = xn.properties
|
||||
while not attr.isNil:
|
||||
var
|
||||
key = toPreserves($attr.name)
|
||||
val = toPreserves(attr.content)
|
||||
child.record[0][key] = val
|
||||
attr = attr.next
|
||||
if not xn.children.isNil:
|
||||
preserveSiblings(child.record, xn.children)
|
||||
child.record.add tosymbol($xn.name)
|
||||
result.add child
|
||||
of XML_TEXT_NODE:
|
||||
result.add textContent(xn).toPreserves
|
||||
else:
|
||||
stderr.writeLine "not an XML_ELEMENT_NODE - ", $xn.type
|
||||
xn = xn.next
|
||||
|
||||
proc toPreservesHook*(xn: xmlNodePtr): Value =
|
||||
var items = newSeqofCap[Value](1)
|
||||
preserveSiblings(items, xn)
|
||||
items[0]
|
||||
|
||||
proc spawnXsltActor*(turn: Turn; root: Cap): Actor {.discardable.} =
|
||||
spawnActor(turn, "xslt") do (turn: Turn):
|
||||
initLibXml()
|
||||
during(turn, root, ?:XsltArguments) do (ds: Cap):
|
||||
let sheetsPat = observePattern(!XsltTransform, {@[%0]: grab(), @[%1]: grab()})
|
||||
during(turn, ds, sheetsPat) do (stylesheet: Literal[string], input: Literal[string]):
|
||||
let cur = loadStylesheet(stylesheet.value)
|
||||
if cur.isNil:
|
||||
stderr.writeLine "failed to parse stylesheet"
|
||||
else:
|
||||
let doc = loadXmlDoc(input.value)
|
||||
if doc.isNil:
|
||||
stderr.writeLine "failed to parse input document"
|
||||
else:
|
||||
let
|
||||
params = allocCStringArray([])
|
||||
res = xsltApplyStylesheet(cur, doc, params)
|
||||
if res.isNil:
|
||||
stderr.writeLine "failed to apply stylesheet transformation"
|
||||
else:
|
||||
let output = xsltSaveResultToString(res, cur)
|
||||
deallocCStringArray(params)
|
||||
publish(turn, ds, XsltTransform(
|
||||
stylesheet: stylesheet.value,
|
||||
input: input.value,
|
||||
output: xmlDocGetRootElement(res).toPreservesHook,
|
||||
))
|
||||
xmlFreeDoc(res)
|
||||
xmlFreeDoc(doc)
|
||||
xsltFreeStylesheet(cur)
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
spawnXsltActor(turn, ds)
|
|
@ -1,13 +1,13 @@
|
|||
# Package
|
||||
|
||||
version = "20240109"
|
||||
version = "20240509"
|
||||
author = "Emery Hemingway"
|
||||
description = "Utilites for Syndicated Actors and Synit"
|
||||
license = "unlicense"
|
||||
srcDir = "src"
|
||||
bin = @["mintsturdyref", "mount_actor", "msg", "net_mapper", "preserve_process_environment", "syndex_card", "syndump"]
|
||||
bin = @["http_client", "mintsturdyref", "mount_actor", "msg", "postgre_actor", "preserve_process_environment", "rofi_script_actor", "sqlite_actor", "syndesizer", "syndump", "xslt_actor"]
|
||||
|
||||
|
||||
# Dependencies
|
||||
|
||||
requires "nim >= 2.0.0", "illwill", "syndicate >= 20240108", "ws"
|
||||
requires "https://git.syndicate-lang.org/ehmry/syndicate-nim.git >= 20240507", "https://github.com/ehmry/nim-sys.git#4ef3b624db86e331ba334e705c1aa235d55b05e1", "https://git.sr.ht/~ehmry/nim_taps >= 20240405"
|
||||
|
|
Loading…
Reference in New Issue