Compare commits
No commits in common. "xdg_open_ng-0.4.0" and "trunk" have entirely different histories.
xdg_open_n
...
trunk
|
@ -1,2 +1,2 @@
|
|||
uri_runner
|
||||
xdg_open
|
||||
/nim.cfg
|
||||
*.check
|
||||
|
|
488
README.md
488
README.md
|
@ -1,10 +1,486 @@
|
|||
# xdg-open-ng
|
||||
# Syndicate utils
|
||||
|
||||
An `xdg-open` replacement that uses Syndicate and PCRE pattern matching to open URIs.
|
||||
## Syndesizer
|
||||
|
||||
There are two utilites, `xdg-open` and `uri_runner`. The former connects to a shared Syndicate dataspace via a UNIX socket at `$SYNDICATE_SOCK` otherwise `$XDG_RUNTIME_DIR/dataspace` and as has no other configuration. The `uri_runner` component is intended to be managed by the [Syndicate server](https://git.syndicate-lang.org/syndicate-lang/syndicate-rs) thru which it receives configuration, see [uri_runner.pr](./uri_runner.pr) as an example.
|
||||
A Syndicate multitool that includes a number of different actors that become active via configuration.
|
||||
|
||||
The [protocol.nim](./src/protocol.nim) file is generated from the [protocol.prs](./protocol.prs) schema, a [Tupfile](https://gittup.org/tup/) file is provided to do this.
|
||||
Whether you use a single instance for many protocols or many specialized instances is up to you.
|
||||
|
||||
## TODO
|
||||
- Fallback commands?
|
||||
### Cache
|
||||
|
||||
Observes patterns and reässert the values captured for a given lifetime. Takes the argument `<cache { dataspace: #!any lifetime: float }>`. The lifetime of a cache counts down from moment a value is asserted.
|
||||
|
||||
Example configuration:
|
||||
```
|
||||
? <nixspace ?nixspace> [
|
||||
; Require the nix_actor during observations.
|
||||
?nixspace> ? <Observe <rec eval _> _> [
|
||||
$config <require-service <daemon nix_actor>> ]
|
||||
?nixspace> ? <Observe <rec realise _> _> [
|
||||
$config <require-service <daemon nix_actor>> ]
|
||||
|
||||
; Cache anything captured by observers in the $nixspace for an hour.
|
||||
; The nix_actor is not required during caching.
|
||||
$config <require-service <daemon syndesizer>>
|
||||
$config ? <service-object <daemon syndesizer> ?cap> [
|
||||
$cap <cache { dataspace: $nixspace lifetime: 3600.0 }> ]
|
||||
]
|
||||
```
|
||||
|
||||
### File System Usage
|
||||
|
||||
Summarize the size of file-system directory. Equivalent to `du -s -b`.
|
||||
Query the size of a directory in bytes by observing `<file-system-usage "/SOME/PATH" ?size>`.
|
||||
|
||||
```
|
||||
# Configuration example
|
||||
|
||||
? <exposed-dataspace ?ds> [
|
||||
|
||||
<require-service <daemon syndesizer>>
|
||||
? <service-object <daemon syndesizer> ?cap> [
|
||||
$cap <file-system-usage { dataspace: $ds }>
|
||||
]
|
||||
|
||||
]
|
||||
```
|
||||
|
||||
### HTTP driver
|
||||
|
||||
Experimental HTTP server that services requests using [some version](https://git.syndicate-lang.org/syndicate-lang/syndicate-protocols/src/commit/9864ce0ec86fb2f916c2aab318a1e6994ab8834c/schemas/http.prs) of the http Syndicate protocol schema.
|
||||
|
||||
```
|
||||
# Configuration example
|
||||
|
||||
let ?not-found = dataspace
|
||||
$not-found ? <request _ ?res> [
|
||||
$res ! <status 503 "Service unavailable">
|
||||
$res ! <done "No binding here.">
|
||||
]
|
||||
|
||||
let ?greeting = dataspace
|
||||
$greeting ? <request _ ?res> [
|
||||
$res ! <status 200 "ok">
|
||||
$res ! <chunk "Hello world">
|
||||
$res ! <done "!">
|
||||
]
|
||||
|
||||
let ?http = dataspace
|
||||
$http [
|
||||
<http-bind #f 80 get [ ] $not-found>
|
||||
<http-bind #f 80 get [|...|] $not-found>
|
||||
<http-bind #f 80 get ["hello"] $greeting>
|
||||
]
|
||||
|
||||
? <service-object <daemon http-driver> ?cap> [
|
||||
$cap <http-driver { dataspace: $http }>
|
||||
]
|
||||
|
||||
<daemon http-driver {
|
||||
argv: [ "/bin/syndesizer" ]
|
||||
clearEnv: #t
|
||||
protocol: application/syndicate
|
||||
}>
|
||||
|
||||
<require-service <daemon http-driver>>
|
||||
```
|
||||
|
||||
### JSON Socket Translator
|
||||
|
||||
Communicate with sockets that send and receive lines of JSON using `<send …>` and `<recv …>` messages.
|
||||
Responds to the gatekeeper step `<json-socket-translator { socket: <unix "…"> / <tcp "…" … }> $resolver>`.
|
||||
|
||||
```
|
||||
# MPV configuration example
|
||||
<require-service <daemon mpv-server>>
|
||||
|
||||
<daemon mpv-server {
|
||||
argv: [
|
||||
"/run/current-system/sw/bin/mpv"
|
||||
"--really-quiet"
|
||||
"--idle=yes"
|
||||
"--no-audio-display"
|
||||
"--input-ipc-server=/run/user/1000/mpv.sock"
|
||||
"--volume=75"
|
||||
]
|
||||
protocol: none
|
||||
}>
|
||||
|
||||
let ?resolver = dataspace
|
||||
$resolver ? <accepted ?mpvSpace> $mpvSpace [
|
||||
# announce the dataspace when the translator is connected
|
||||
$config <mpv $mpvSpace>
|
||||
$config <bind <ref { oid: "mpv" key: #x"" }> $mpvSpace #f>
|
||||
|
||||
# translate <play-file …> to an MPV command
|
||||
?? <play-file ?file> [
|
||||
! <send { "command": ["loadfile" $file "append-play"] }>
|
||||
]
|
||||
|
||||
# clear the playlist on idle so it doesn't grow indefinitely
|
||||
?? <recv {"event": "idle"}> [
|
||||
! <send { "command": ["playlist-clear"] }>
|
||||
]
|
||||
]
|
||||
|
||||
? <service-state <daemon mpv-server> ready> [
|
||||
<require-service <daemon syndesizer>>
|
||||
? <service-object <daemon syndesizer> ?cap> [
|
||||
$cap <resolve <json-socket-translator {
|
||||
socket: <unix "/run/user/1000/mpv.sock">
|
||||
}> $resolver>
|
||||
]
|
||||
]
|
||||
```
|
||||
|
||||
### JSON Stdio Translator
|
||||
|
||||
Executes a command, parses its JSON output, converts to record `<recv @jsonData any>`, and publishes and messages it to a dataspace.
|
||||
|
||||
```
|
||||
# Configuration example
|
||||
<require-service <daemon syndesizer>>
|
||||
|
||||
let ?ds = dataspace
|
||||
<bind <ref {oid: "syndicate" key: #x""}> $ds #f>
|
||||
|
||||
? <service-object <daemon syndesizer> ?cap> [
|
||||
$cap <json-stdio-translator {
|
||||
argv: [
|
||||
"yt-dlp"
|
||||
"--dump-json"
|
||||
"https://youtu.be/RR9GkEXDvog"
|
||||
]
|
||||
dataspace: $ds
|
||||
}>
|
||||
]
|
||||
```
|
||||
|
||||
### Pulse proxy
|
||||
|
||||
A proxy actor that passes assertions and messages to a configured capability but only asserts observations on a a periodic pulse.
|
||||
This can be used to implement polling behavior.
|
||||
|
||||
```
|
||||
# Example config
|
||||
let ?ds = dataspace
|
||||
|
||||
<require-service <daemon syndesizer>>
|
||||
? <service-object <daemon syndesizer> ?cap> [
|
||||
$cap <pulse {dataspace: $ds}>
|
||||
]
|
||||
|
||||
$ds ? <pulse 3600.0 ?proxy> [
|
||||
$proxy ? <assertion-updated-hourly ?value> [
|
||||
$log ! <log "-" {assertion-updated-hourly: $value}>
|
||||
]
|
||||
]
|
||||
```
|
||||
|
||||
### SQLite
|
||||
|
||||
Readonly access to SQLite databases. Asserts rows as records in response to SQL query assertions. Dynamic updates are not implemented.
|
||||
|
||||
Can be disabled by passing `--define:withSqlite=no` to the Nim compiler.
|
||||
|
||||
```
|
||||
# Configuration example
|
||||
<require-service <daemon syndesizer>>
|
||||
|
||||
let ?sqlspace = dataspace
|
||||
|
||||
? <service-object <daemon syndesizer> ?cap> [
|
||||
$cap <sqlite {
|
||||
dataspace: $sqlspace
|
||||
database: "/var/db/example.db"
|
||||
}>
|
||||
]
|
||||
|
||||
let ?tuplespace = dataspace
|
||||
|
||||
$sqlspace <query "SELECT id, name FROM stuff" $tuplespace>
|
||||
|
||||
$tuplespace [
|
||||
? [?id ?name] [
|
||||
$log ! <log "-" { row: <example-row $id $name> }>
|
||||
]
|
||||
? <sqlite-error ?msg ?ctx> [
|
||||
$log ! <log "-" { msg: $msg ctx: $ctx }>
|
||||
]
|
||||
]
|
||||
```
|
||||
|
||||
### XML translator
|
||||
|
||||
Translates between Preserves and XML according to the [Conventions for Common Data Types](https://preserves.dev/conventions.html).
|
||||
|
||||
Examples:
|
||||
- `<xml-translation "<foo a=\"1\"> <bar>hello world!</bar></foo>" <foo {"a": 1}<bar "hello world!">>>`
|
||||
- `<xml-translation "" [#t #f]>`
|
||||
- `<xml-translation "<<</>>" #f>`
|
||||
|
||||
```
|
||||
# Configuration example
|
||||
? <sharedspace ?ds> [
|
||||
$ds ? <Observe <rec xml-translation _> _> $config [
|
||||
$config <require-service <daemon syndesizer>>
|
||||
$config ? <service-object <daemon syndesizer> ?cap> [
|
||||
$cap <xml-translator { dataspace: $ds }>
|
||||
]
|
||||
]
|
||||
]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## esc-printer-driver
|
||||
|
||||
A basic [ESC/P](https://en.wikipedia.org/wiki/ESC/P) printer driver.
|
||||
|
||||
Takes a path to a printer device file as a command line argument.
|
||||
The driver speaks the gatekeeper protocol and responds to the `<print {}>` step with a capability that prints strings it receives as messages.
|
||||
While the `<bold>` or `<italic>` is asserted to this entity the printer will go into the corresponding font mode (if the printer supports it).
|
||||
|
||||
Sample Syndicate server script:
|
||||
```
|
||||
<require-service <daemon printer>>
|
||||
|
||||
let ?printer-resolver = dataspace
|
||||
$printer-resolver ? <accepted ?printer> [
|
||||
$printer <italic>
|
||||
$printer ! "printer resolved\r\n"
|
||||
]
|
||||
|
||||
? <service-object <daemon printer> ?cap> [
|
||||
$cap <resolve <printer {}> $printer-resolver>
|
||||
$log ! <log "-" { line: "printer started"}>
|
||||
]
|
||||
|
||||
<daemon printer {
|
||||
argv: [ "/bin/esc-printer-driver" "/dev/usb/lp0"]
|
||||
protocol: application/syndicate
|
||||
clearEnv: #t
|
||||
}>
|
||||
```
|
||||
|
||||
## http_client
|
||||
|
||||
The inverse of `http-driver`.
|
||||
|
||||
### Caveats
|
||||
- HTTPS is assumed unless the request is to port 80.
|
||||
- If the request or response sets `Content-Type` to `application/json` or `…/preserves`
|
||||
the body will be a parsed Preserves value.
|
||||
- No cache support.
|
||||
- Internal errors propagate using a `400 Internal client error` response.
|
||||
|
||||
Sample Syndicate server script:
|
||||
```
|
||||
# A top-level dataspace
|
||||
let ?ds = dataspace
|
||||
|
||||
# A dataspace for handling the HTTP response.
|
||||
let ?response = dataspace
|
||||
$response [
|
||||
?? <done { "code": "EUR" "exchange_middle": ?middle } > [
|
||||
$ds <exchange EUR RSD $middle>
|
||||
]
|
||||
]
|
||||
|
||||
$ds [
|
||||
<request
|
||||
# Request Euro to Dinar exchange rate.
|
||||
<http-request 0 "kurs.resenje.org" 443
|
||||
get ["api" "v1" "currencies" "eur" "rates" "today"]
|
||||
{Content-Type: "application/json"} {} #f
|
||||
>
|
||||
$response
|
||||
>
|
||||
|
||||
# Log all assertions.
|
||||
? ?any [
|
||||
$log ! <log "-" { assertion: $any }>
|
||||
]
|
||||
]
|
||||
|
||||
? <service-object <daemon http-client> ?cap> [
|
||||
$cap <http-client {
|
||||
dataspace: $ds
|
||||
}>
|
||||
]
|
||||
|
||||
<require-service <daemon http-client>>
|
||||
|
||||
? <built http-client ?path ?sum> [
|
||||
<daemon http-client {
|
||||
argv: [ "/bin/http_client" ]
|
||||
clearEnv: #t
|
||||
protocol: application/syndicate
|
||||
}>
|
||||
]
|
||||
```
|
||||
|
||||
## mintsturdyref
|
||||
|
||||
A utility for minting [Sturdyrefs](https://synit.org/book/operation/builtin/gatekeeper.html#sturdyrefs).
|
||||
|
||||
## mount_actor
|
||||
|
||||
Actor for mounting filesystems on Linux.
|
||||
|
||||
Sample Syndicate server script:
|
||||
```
|
||||
# Assert a file-system we want to mount.
|
||||
<mount "/dev/sda3" "/boot" "vfat">
|
||||
|
||||
# Transform mount assertions into mount status observations.
|
||||
? <mount ?source ?target ?fs> [
|
||||
? <mount $source $target $fs _> [ ]
|
||||
]
|
||||
|
||||
# Assert mounting succeded.
|
||||
? <mount _ ?target _ #t> [
|
||||
<service-state <mountpoint $target> ready>
|
||||
]
|
||||
# Assert mount failed.
|
||||
? <mount _ ?target _ <failure _>> [
|
||||
<service-state <mountpoint $target> failed>
|
||||
]
|
||||
|
||||
# Assert the details into the machine dataspace.
|
||||
? <machine-dataspace ?machine> [
|
||||
$config ? <mount ?source ?target ?fs ?status> [
|
||||
$machine <mount $source $target $fs $status>
|
||||
]
|
||||
]
|
||||
|
||||
# Require the mount_actor daemon.
|
||||
<require-service <daemon mount_actor>>
|
||||
<daemon mount_actor {
|
||||
argv: ["/home/emery/src/bin/mount_actor"]
|
||||
protocol: application/syndicate
|
||||
}>
|
||||
|
||||
# Pass the daemon the config dataspace.
|
||||
? <service-object <daemon mount_actor> ?cap> [
|
||||
$cap { dataspace: $config }
|
||||
]
|
||||
```
|
||||
|
||||
## msg
|
||||
|
||||
A utility that parses its command-line arguments as Preserves and send them as messages to `$SYNDICATE_ROUTE`.
|
||||
When called as `assert` (by a symlink or a rename) it will make assertions instead.
|
||||
|
||||
## PostgreSQL
|
||||
|
||||
Readonly access to PostgreSQL databases.
|
||||
Asserts rows as records in response to SQL query assertions.
|
||||
Dynamic updates are not implemented.
|
||||
|
||||
```
|
||||
let ?postgreStep = <postgre {connection: [["host" "db.example.com"] ["dbname" "example"] ["user" "hackme"]]}>
|
||||
|
||||
let ?tuplespace = dataspace
|
||||
$tuplespace ? ?row [
|
||||
$log ! <log "-" { line: $row }>
|
||||
]
|
||||
|
||||
let ?resolver = dataspace
|
||||
$resolver ? <accepted ?sqlspace> [
|
||||
$sqlspace ? <sql-error ?msg ?context> [
|
||||
$log ! <log "-" { line: $msg context: $context }>
|
||||
]
|
||||
$sqlspace <query [SELECT firstname FROM users] $tuplespace>
|
||||
]
|
||||
|
||||
<require-service <daemon postgre-actor>>
|
||||
$config ? <service-object <daemon postgre-actor> ?cap> [
|
||||
$cap <resolve $postgreStep $resolver>
|
||||
]
|
||||
|
||||
<daemon postgre-actor {
|
||||
argv: [ "/bin/postgre-actor" ]
|
||||
clearEnv: #t
|
||||
protocol: application/syndicate
|
||||
}>
|
||||
|
||||
```
|
||||
|
||||
## preserve_process_environment
|
||||
|
||||
This utility serializes it's process environment to Preserves and prints it to stdout.
|
||||
It can be used to feed the environment variables of a nested child of the Syndicate server back to the server. For example, to retreive the environmental variables that a desktop manager passed on to its children.
|
||||
|
||||
## SQLite
|
||||
|
||||
Readonly access to SQLite databases.
|
||||
Asserts rows as records in response to SQL query assertions.
|
||||
Dynamic updates are not implemented.
|
||||
|
||||
```
|
||||
# Configuration example
|
||||
let ?sqliteStep = <sqlite { database: "/var/db/stuff.db" }>
|
||||
|
||||
let ?tuplespace = dataspace
|
||||
$tuplespace ? ?row [
|
||||
$log ! <log "-" { line: $row }>
|
||||
]
|
||||
|
||||
let ?resolver = dataspace
|
||||
$resolver [
|
||||
? <rejected ?detail> [
|
||||
$log ! <log "-" { line: $detail }>
|
||||
]
|
||||
? <accepted ?sqlspace> [
|
||||
$log ! <log "-" { sqlspace: $sqlspace }>
|
||||
$sqlspace ? <sql-error ?msg ?context> [
|
||||
$log ! <log "-" { line: $msg context: $context }>
|
||||
]
|
||||
$sqlspace <query [ SELECT local_display_name FROM contacts ] $tuplespace>
|
||||
]
|
||||
]
|
||||
|
||||
<require-service <daemon sqlite-actor>>
|
||||
$config ? <service-object <daemon sqlite-actor> ?cap> [
|
||||
$cap <resolve $sqliteStep $resolver>
|
||||
]
|
||||
|
||||
<daemon sqlite-actor {
|
||||
argv: [ "/bin/sqlite-actor" ]
|
||||
clearEnv: #t
|
||||
protocol: application/syndicate
|
||||
}>
|
||||
```
|
||||
|
||||
## syndump
|
||||
|
||||
Utility for printing assertions and messages. Parses the command-line arguments as a pattern, connects a dataspace via `$SYNDICATE_ROUTE`, and writes observations to standard-output. Published assertions are prefixed by the `+` character, retractions by `-`, and messages by `!`.
|
||||
|
||||
Example
|
||||
```sh
|
||||
# Print patterns in use, filter down with AWK to only the published patterns.
|
||||
$ FS=':' syndump '<Observe ? _>' | awk -F : '/^+/ { print $2 }'
|
||||
```
|
||||
|
||||
## XSLT processor
|
||||
|
||||
Perform XML stylesheet transformations. For a given textual XSLT stylesheet and a textual XML document generate an abstract XML document in Preserves form. Inputs may be XML text or paths to XML files.
|
||||
|
||||
```
|
||||
# Configuration example
|
||||
let ?ds = dataspace
|
||||
$ds [
|
||||
? <xslt-transform "/stylesheet.xls" "/doc.xml" ?output> [
|
||||
? <xml-translation ?text $output> [
|
||||
$log ! <log "-" { xslt-output: $text }>
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
<require-service <daemon xslt_actor>>
|
||||
? <service-object <daemon xslt_actor> ?cap> $cap [
|
||||
<xml-translator { dataspace: $ds }>
|
||||
<xslt { dataspace: $ds }>
|
||||
]
|
||||
```
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
include_rules
|
||||
|
||||
: sbom.json |> !sbom-to-nix |> | ./<lock>
|
||||
run ./Tuprules.jq sbom.json
|
||||
|
||||
: foreach {bin} |> !assert_built |>
|
||||
: &(BIN_DIR)/msg |> !symlink |> &(BIN_DIR)/beep
|
||||
: &(BIN_DIR)/msg |> !symlink |> &(BIN_DIR)/assert
|
|
@ -0,0 +1,12 @@
|
|||
#! /usr/bin/env -S jq --raw-output --from-file
|
||||
.metadata.component.properties as $props |
|
||||
$props |
|
||||
( map( select(.name | .[0:10] == "nim:binDir") ) +
|
||||
map( select(.name | .[0:10] == "nim:srcDir") ) |
|
||||
map( .value )
|
||||
) + ["."] | .[0] as $binDir |
|
||||
|
||||
$props |
|
||||
map( select(.name | .[0:8] == "nim:bin:") ) |
|
||||
map( ": \($binDir)/\(.value).nim |> !nim_bin |> &(BIN_DIR)/\(.name[8:]) {bin}" ) |
|
||||
join("\n")
|
|
@ -0,0 +1,7 @@
|
|||
include ../syndicate-nim/depends.tup
|
||||
PROJECT_DIR = $(TUP_CWD)
|
||||
|
||||
NIM = $(DIRENV) $(NIM)
|
||||
NIM_GROUPS += $(SYNDICATE_PROTOCOL)
|
||||
NIM_GROUPS += $(PROJECT_DIR)/<lock>
|
||||
NIM_GROUPS += $(PROJECT_DIR)/<schema>
|
|
@ -0,0 +1,14 @@
|
|||
version 1.
|
||||
|
||||
FileSystemUsage = <file-system-usage @path string @size int>.
|
||||
|
||||
# This assertion publishes a dataspace that proxies assertions with
|
||||
# an exception for <Observe …> which is pulsed every periodSec.
|
||||
# The pulse resolution is no more than one millisecond.
|
||||
Pulse = <pulse @periodSec float @proxy #:any>.
|
||||
|
||||
XmlTranslation = <xml-translation @xml string @pr any>.
|
||||
|
||||
XsltTransform = <xslt-transform @stylesheet string @input string @output any>.
|
||||
XsltItems = [XsltItem ...].
|
||||
XsltItem = string.
|
|
@ -0,0 +1,4 @@
|
|||
version 1.
|
||||
|
||||
Base64Text = <base64 @txt string @bin bytes> .
|
||||
Base64File = <base64-file @txt string @path string @size int> .
|
|
@ -0,0 +1,196 @@
|
|||
{
|
||||
lib,
|
||||
stdenv,
|
||||
fetchgit,
|
||||
fetchzip,
|
||||
runCommand,
|
||||
xorg,
|
||||
nim,
|
||||
nimOverrides,
|
||||
}:
|
||||
|
||||
let
|
||||
fetchers = {
|
||||
fetchzip =
|
||||
{ url, sha256, ... }:
|
||||
fetchzip {
|
||||
name = "source";
|
||||
inherit url sha256;
|
||||
};
|
||||
fetchgit =
|
||||
{
|
||||
fetchSubmodules ? false,
|
||||
leaveDotGit ? false,
|
||||
rev,
|
||||
sha256,
|
||||
url,
|
||||
...
|
||||
}:
|
||||
fetchgit {
|
||||
inherit
|
||||
fetchSubmodules
|
||||
leaveDotGit
|
||||
rev
|
||||
sha256
|
||||
url
|
||||
;
|
||||
};
|
||||
};
|
||||
|
||||
filterPropertiesToAttrs =
|
||||
prefix: properties:
|
||||
lib.pipe properties [
|
||||
(builtins.filter ({ name, ... }: (lib.strings.hasPrefix prefix name)))
|
||||
(map (
|
||||
{ name, value }:
|
||||
{
|
||||
name = lib.strings.removePrefix prefix name;
|
||||
inherit value;
|
||||
}
|
||||
))
|
||||
builtins.listToAttrs
|
||||
];
|
||||
|
||||
buildNimCfg =
|
||||
{ backend, components, ... }:
|
||||
let
|
||||
componentSrcDirs = map (
|
||||
{ properties, ... }:
|
||||
let
|
||||
fodProps = filterPropertiesToAttrs "nix:fod:" properties;
|
||||
fod = fetchers.${fodProps.method} fodProps;
|
||||
srcDir = fodProps.srcDir or "";
|
||||
in
|
||||
if srcDir == "" then fod else "${fod}/${srcDir}"
|
||||
) components;
|
||||
in
|
||||
runCommand "nim.cfg"
|
||||
{
|
||||
outputs = [
|
||||
"out"
|
||||
"src"
|
||||
];
|
||||
nativeBuildInputs = [ xorg.lndir ];
|
||||
}
|
||||
''
|
||||
pkgDir=$src/pkg
|
||||
cat << EOF >> $out
|
||||
backend:${backend}
|
||||
path:"$src"
|
||||
path:"$pkgDir"
|
||||
EOF
|
||||
mkdir -p "$pkgDir"
|
||||
${lib.strings.concatMapStrings (d: ''
|
||||
lndir "${d}" "$pkgDir"
|
||||
'') componentSrcDirs}
|
||||
'';
|
||||
|
||||
buildCommands = lib.attrsets.mapAttrsToList (
|
||||
output: input: ''
|
||||
nim compile $nimFlags --out:${output} ${input}
|
||||
''
|
||||
);
|
||||
|
||||
installCommands = lib.attrsets.mapAttrsToList (
|
||||
output: input: ''
|
||||
install -Dt $out/bin ${output}
|
||||
''
|
||||
);
|
||||
in
|
||||
|
||||
callerArg: sbomArg:
|
||||
|
||||
let
|
||||
applySbom =
|
||||
{
|
||||
passthru ? { },
|
||||
...
|
||||
}@prevAttrs:
|
||||
let
|
||||
sbom = lib.attrsets.recursiveUpdate (
|
||||
if builtins.isAttrs sbomArg then sbomArg else builtins.fromJSON (builtins.readFile sbomArg)
|
||||
) passthru.sbom or { };
|
||||
|
||||
properties = # SBOM metadata.component.properties as an attrset.
|
||||
lib.attrsets.recursiveUpdate (builtins.listToAttrs sbom.metadata.component.properties)
|
||||
passthru.properties or { };
|
||||
|
||||
nimBin = # A mapping of Nim module file paths to names of programs.
|
||||
lib.attrsets.recursiveUpdate (lib.pipe properties [
|
||||
(lib.attrsets.filterAttrs (name: value: lib.strings.hasPrefix "nim:bin:" name))
|
||||
(lib.attrsets.mapAttrs' (
|
||||
name: value: {
|
||||
name = lib.strings.removePrefix "nim:bin:" name;
|
||||
value = "${properties."nim:binDir" or (properties."nim:srcDir" or ".")}/${value}";
|
||||
}
|
||||
))
|
||||
]) passthru.nimBin or { };
|
||||
in
|
||||
{
|
||||
strictDeps = true;
|
||||
|
||||
pname = prevAttrs.pname or sbom.metadata.component.name;
|
||||
version = prevAttrs.version or sbom.metadata.component.version or null;
|
||||
|
||||
configurePhase =
|
||||
prevAttrs.configurePhase or ''
|
||||
runHook preConfigure
|
||||
echo "nim.cfg << $nimCfg"
|
||||
cat $nimCfg >> nim.cfg
|
||||
cat << EOF >> nim.cfg
|
||||
nimcache:"$NIX_BUILD_TOP/nimcache"
|
||||
parallelBuild:$NIX_BUILD_CORES
|
||||
EOF
|
||||
runHook postConfigure
|
||||
'';
|
||||
|
||||
buildPhase =
|
||||
prevAttrs.buildPhase or ''
|
||||
runHook preBuild
|
||||
${lib.strings.concatLines (buildCommands nimBin)}
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
installPhase =
|
||||
prevAttrs.installPhase or ''
|
||||
runHook preInstall
|
||||
${lib.strings.concatLines (installCommands nimBin)}
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
nativeBuildInputs = (prevAttrs.nativeBuildInputs or [ ]) ++ [ nim ];
|
||||
|
||||
nimCfg =
|
||||
prevAttrs.nimCfg or (buildNimCfg {
|
||||
backend = prevAttrs.nimBackend or properties."nim:backend" or "c";
|
||||
inherit (sbom) components;
|
||||
});
|
||||
|
||||
passthru = {
|
||||
inherit sbom properties nimBin;
|
||||
};
|
||||
};
|
||||
|
||||
applyOverrides =
|
||||
prevAttrs:
|
||||
builtins.foldl' (
|
||||
prevAttrs:
|
||||
{ name, ... }@component:
|
||||
if (builtins.hasAttr name nimOverrides) then
|
||||
prevAttrs // (nimOverrides.${name} component prevAttrs)
|
||||
else
|
||||
prevAttrs
|
||||
) prevAttrs prevAttrs.passthru.sbom.components;
|
||||
|
||||
composition =
|
||||
finalAttrs:
|
||||
let
|
||||
callerAttrs = if builtins.isAttrs callerArg then callerArg else callerArg finalAttrs;
|
||||
sbomAttrs = callerAttrs // (applySbom callerAttrs);
|
||||
overrideAttrs = sbomAttrs // (applyOverrides sbomAttrs);
|
||||
in
|
||||
overrideAttrs;
|
||||
in
|
||||
stdenv.mkDerivation composition
|
||||
|
||||
# TODO: Add an overrideSbom function into the result..
|
|
@ -0,0 +1,61 @@
|
|||
version 1 .
|
||||
embeddedType EntityRef.Cap .
|
||||
|
||||
Base64DecoderArguments = <base64-decoder {
|
||||
dataspace: #:any
|
||||
}>.
|
||||
|
||||
CacheArguments = <cache {
|
||||
dataspace: #:any
|
||||
lifetime: float
|
||||
}>.
|
||||
|
||||
FileSystemUsageArguments = <file-system-usage {
|
||||
dataspace: #:any
|
||||
}>.
|
||||
|
||||
JsonTranslatorArguments = <json-stdio-translator {
|
||||
argv: [string ...]
|
||||
dataspace: #:any
|
||||
}>.
|
||||
|
||||
TcpAddress = <tcp @host string @port int>.
|
||||
UnixAddress = <unix @path string>.
|
||||
|
||||
SocketAddress = TcpAddress / UnixAddress .
|
||||
|
||||
HttpClientArguments = <http-client {
|
||||
dataspace: #:any
|
||||
}>.
|
||||
|
||||
HttpDriverStep= <http-driver { }>.
|
||||
|
||||
JsonSocketTranslatorStep = <json-socket-translator {
|
||||
socket: SocketAddress
|
||||
}>.
|
||||
|
||||
PostgreStep = <postgre {
|
||||
connection: [PostgreConnectionParameter ...]
|
||||
}>.
|
||||
PostgreConnectionParameter = [@key string @val string].
|
||||
|
||||
PrinterStep = <printer {}> .
|
||||
|
||||
PulseArguments = <pulse {
|
||||
dataspace: #:any
|
||||
}>.
|
||||
|
||||
SqliteStep = <sqlite {
|
||||
database: string
|
||||
}>.
|
||||
|
||||
XmlTranslatorArguments = <xml-translator {
|
||||
dataspace: #:any
|
||||
}>.
|
||||
|
||||
XsltArguments = <xslt {
|
||||
dataspace: #:any
|
||||
}>.
|
||||
|
||||
# Reused from syndicate-protocols/transportAddress
|
||||
Tcp = <tcp @host string @port int>.
|
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
pkgs ? import <nixpkgs> { },
|
||||
}:
|
||||
with pkgs;
|
||||
let
|
||||
buildNimSbom = pkgs.callPackage ./build-nim-sbom.nix { };
|
||||
in
|
||||
buildNimSbom (finalAttrs: {
|
||||
src = if lib.inNixShell then null else lib.cleanSource ./.;
|
||||
buildInputs = [
|
||||
postgresql.out
|
||||
sqlite
|
||||
libxml2
|
||||
libxslt
|
||||
openssl
|
||||
];
|
||||
}) ./sbom.json
|
23
exec.prs
23
exec.prs
|
@ -1,23 +0,0 @@
|
|||
; Copied from ../syndicate-rs/syndicate-server/protocols/schemas/externalServices.prs
|
||||
version 1 .
|
||||
|
||||
Exec = <exec @argv CommandLine @restartPolicy RestartPolicy> .
|
||||
|
||||
CommandLine = @shell string / @full FullCommandLine .
|
||||
FullCommandLine = [@program string @args string ...] .
|
||||
|
||||
RestartPolicy =
|
||||
/ ; Whether the process terminates normally or abnormally, restart it
|
||||
; without affecting any peer processes within the service.
|
||||
=always
|
||||
/ ; If the process terminates normally, leave everything alone; if it
|
||||
; terminates abnormally, restart it without affecting peers.
|
||||
@onError =on-error
|
||||
/ ; If the process terminates normally, leave everything alone; if it
|
||||
; terminates abnormally, restart the whole daemon (all processes
|
||||
; within the daemon).
|
||||
=all
|
||||
/ ; Treat both normal and abnormal termination as normal termination; that is, never restart,
|
||||
; and enter state "complete" even if the process fails.
|
||||
=never
|
||||
.
|
|
@ -0,0 +1,3 @@
|
|||
version 1 .
|
||||
|
||||
InotifyMessage = <inotify @path string @event symbol @cookie int @name string> .
|
|
@ -0,0 +1,137 @@
|
|||
{
|
||||
"depends": [
|
||||
{
|
||||
"date": "2024-05-23T17:44:14+03:00",
|
||||
"deepClone": false,
|
||||
"fetchLFS": false,
|
||||
"fetchSubmodules": true,
|
||||
"hash": "sha256-qTRhHsOPNov1BQcm3P7NEkEPW6uh80XFfQRBdMp4o0Q=",
|
||||
"leaveDotGit": false,
|
||||
"method": "git",
|
||||
"packages": [
|
||||
"syndicate"
|
||||
],
|
||||
"path": "/nix/store/1lcxrap5n80hy1z4bcmsmdx83n4b9wjf-syndicate-nim",
|
||||
"rev": "7ab4611824b676157523f2618e7893d5ac99e4f2",
|
||||
"sha256": "0i53g3578h84gp2lbwx1mddhyh8jrpzdq9h70psqndlgqcg62d59",
|
||||
"srcDir": "src",
|
||||
"url": "https://git.syndicate-lang.org/ehmry/syndicate-nim.git"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"bigints"
|
||||
],
|
||||
"path": "/nix/store/jvrm392g8adfsgf36prgwkbyd7vh5jsw-source",
|
||||
"rev": "86ea14d31eea9275e1408ca34e6bfe9c99989a96",
|
||||
"sha256": "15pcpmnk1bnw3k8769rjzcpg00nahyrypwbxs88jnwr4aczp99j4",
|
||||
"srcDir": "src",
|
||||
"url": "https://github.com/ehmry/nim-bigints/archive/86ea14d31eea9275e1408ca34e6bfe9c99989a96.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"cps"
|
||||
],
|
||||
"path": "/nix/store/8gbhwni0akqskdb3qhn5nfgv6gkdz0vz-source",
|
||||
"rev": "c90530ac57f98a842b7be969115c6ef08bdcc564",
|
||||
"sha256": "0h8ghs2fqg68j3jdcg7grnxssmllmgg99kym2w0a3vlwca1zvr62",
|
||||
"srcDir": "",
|
||||
"url": "https://github.com/ehmry/cps/archive/c90530ac57f98a842b7be969115c6ef08bdcc564.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"getdns"
|
||||
],
|
||||
"path": "/nix/store/x9xmn7w4k6jg8nv5bnx148ibhnsfh362-source",
|
||||
"rev": "c73cbe288d9f9480586b8fa87f6d794ffb6a6ce6",
|
||||
"sha256": "1sbgx2x51szr22i72n7c8jglnfmr8m7y7ga0v85d58fwadiv7g6b",
|
||||
"srcDir": "src",
|
||||
"url": "https://git.sr.ht/~ehmry/getdns-nim/archive/c73cbe288d9f9480586b8fa87f6d794ffb6a6ce6.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"nimcrypto"
|
||||
],
|
||||
"path": "/nix/store/fkrcpp8lzj2yi21na79xm63xk0ggnqsp-source",
|
||||
"rev": "485f7b3cfa83c1beecc0e31be0e964d697aa74d7",
|
||||
"sha256": "1h3dzdbc9kacwpi10mj73yjglvn7kbizj1x8qc9099ax091cj5xn",
|
||||
"srcDir": "",
|
||||
"url": "https://github.com/cheatfate/nimcrypto/archive/485f7b3cfa83c1beecc0e31be0e964d697aa74d7.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"npeg"
|
||||
],
|
||||
"path": "/nix/store/xpn694ibgipj8xak3j4bky6b3k0vp7hh-source",
|
||||
"rev": "ec0cc6e64ea4c62d2aa382b176a4838474238f8d",
|
||||
"sha256": "1fi9ls3xl20bmv1ikillxywl96i9al6zmmxrbffx448gbrxs86kg",
|
||||
"srcDir": "src",
|
||||
"url": "https://github.com/zevv/npeg/archive/ec0cc6e64ea4c62d2aa382b176a4838474238f8d.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"preserves"
|
||||
],
|
||||
"path": "/nix/store/9zl4s2did00725n8ygbp37agvkskdhcx-source",
|
||||
"rev": "1fee87590940761e288cf9ab3c7270832403b719",
|
||||
"sha256": "1ny42rwr3yx52zwvkdg4lh54nxaxrmxdj9dlw3qarvvp2grfq4j2",
|
||||
"srcDir": "src",
|
||||
"url": "https://git.syndicate-lang.org/ehmry/preserves-nim/archive/1fee87590940761e288cf9ab3c7270832403b719.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"stew"
|
||||
],
|
||||
"path": "/nix/store/mqg8qzsbcc8xqabq2yzvlhvcyqypk72c-source",
|
||||
"rev": "3c91b8694e15137a81ec7db37c6c58194ec94a6a",
|
||||
"sha256": "17lfhfxp5nxvld78xa83p258y80ks5jb4n53152cdr57xk86y07w",
|
||||
"srcDir": "",
|
||||
"url": "https://github.com/status-im/nim-stew/archive/3c91b8694e15137a81ec7db37c6c58194ec94a6a.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"sys"
|
||||
],
|
||||
"path": "/nix/store/syhxsjlsdqfap0hk4qp3s6kayk8cqknd-source",
|
||||
"rev": "4ef3b624db86e331ba334e705c1aa235d55b05e1",
|
||||
"sha256": "1q4qgw4an4mmmcbx48l6xk1jig1vc8p9cq9dbx39kpnb0890j32q",
|
||||
"srcDir": "src",
|
||||
"url": "https://github.com/ehmry/nim-sys/archive/4ef3b624db86e331ba334e705c1aa235d55b05e1.tar.gz"
|
||||
},
|
||||
{
|
||||
"method": "fetchzip",
|
||||
"packages": [
|
||||
"taps"
|
||||
],
|
||||
"path": "/nix/store/6y14ia52kr7jyaa0izx37mlablmq9s65-source",
|
||||
"rev": "8c8572cd971d1283e6621006b310993c632da247",
|
||||
"sha256": "1dp166bv9x773jmfqppg5i3v3rilgff013vb11yzwcid9l7s3iy8",
|
||||
"srcDir": "src",
|
||||
"url": "https://git.sr.ht/~ehmry/nim_taps/archive/8c8572cd971d1283e6621006b310993c632da247.tar.gz"
|
||||
},
|
||||
{
|
||||
"date": "2024-05-22T06:09:38+02:00",
|
||||
"deepClone": false,
|
||||
"fetchLFS": false,
|
||||
"fetchSubmodules": true,
|
||||
"hash": "sha256-B3fMwgBpO2Ty8143k9V1cnHXa5K8i1+zN+eF/rBLMe0=",
|
||||
"leaveDotGit": false,
|
||||
"method": "git",
|
||||
"packages": [
|
||||
"solo5_dispatcher"
|
||||
],
|
||||
"path": "/nix/store/xqj48v4rqlffl1l94hi02szazj5gla8g-solo5_dispatcher",
|
||||
"rev": "cc64ef99416b22b12e4a076d33de9e25a163e57d",
|
||||
"sha256": "1v9i9fqgx1g76yrmz2xwj9mxfwbjfpar6dsyygr68fv9031cqxq7",
|
||||
"srcDir": "pkg",
|
||||
"url": "https://git.sr.ht/~ehmry/solo5_dispatcher"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
version 1.
|
||||
|
||||
Mountpoint = <mount @source string @target string @type string @status Status> .
|
||||
Status = Failure / @success #t .
|
||||
Failure = <failure @msg string> .
|
|
@ -0,0 +1,56 @@
|
|||
let ?socketPath = "/run/user/1000/mpv.sock"
|
||||
|
||||
let ?mpvSpace = dataspace
|
||||
$mpvSpace [
|
||||
; announce the dataspace when the translator is connected
|
||||
? <connected $socketPath> [
|
||||
$config <mpv $mpvSpace>
|
||||
]
|
||||
|
||||
; translate <play-file …> to an MPV command
|
||||
?? <play-file ?file> [
|
||||
$log ! <log "-" { line: <play-file $file> }>
|
||||
! <send-json { "command": ["loadfile" $file "append-play"] }>
|
||||
]
|
||||
|
||||
; log anything that comes back from MPV
|
||||
; ?? <recv-json ?js> [ $log ! <log "-" { mpv: $js }> ]
|
||||
|
||||
; clear the playlist on idle so it doesn't grow indefinitely
|
||||
?? <recv-json {"event": "idle"}> [
|
||||
! <send-json { "command": ["playlist-clear"] }>
|
||||
]
|
||||
]
|
||||
|
||||
; need the translator and the translator needs the daemon
|
||||
<require-service <daemon mpv-translator>>
|
||||
<depends-on <daemon mpv-translator> <service-state <daemon mpv-server> ready>>
|
||||
|
||||
? <service-object <daemon mpv-translator> ?cap> [
|
||||
$cap {
|
||||
dataspace: $mpvSpace
|
||||
socket: $socketPath
|
||||
}
|
||||
]
|
||||
|
||||
; assert and retract the daemon as the daemon is built (this is a testing artifact)
|
||||
? <built json_socket_translator ?path ?sum> [
|
||||
|
||||
<daemon mpv-translator {
|
||||
argv: [$path]
|
||||
protocol: application/syndicate
|
||||
env: {BUILD_SUM: $sum}
|
||||
}>
|
||||
]
|
||||
|
||||
; start mpv regardless
|
||||
<daemon mpv-server {
|
||||
argv: [
|
||||
"/run/current-system/sw/bin/mpv"
|
||||
"--really-quiet"
|
||||
"--idle=yes"
|
||||
"--no-audio-display"
|
||||
"--input-ipc-server=/run/user/1000/mpv.sock"
|
||||
]
|
||||
protocol: none
|
||||
}>
|
|
@ -0,0 +1,2 @@
|
|||
version 1.
|
||||
RoundTripTime = <rtt @address string @minimum float @average float @maximum float>.
|
|
@ -1,7 +0,0 @@
|
|||
version 1 .
|
||||
|
||||
XdgOpen = <xdg-open @uris [string ...]> .
|
||||
|
||||
UriRunnerConfig = ListenOn / ActionHandler .
|
||||
ListenOn = <listen-on @dataspace #!any> .
|
||||
ActionHandler = <action-handler @pat string @cmd [string ...]> .
|
|
@ -0,0 +1,5 @@
|
|||
version 1.
|
||||
|
||||
Environment = { symbol: string ...:... } .
|
||||
Select = <rofi-select @option string @environment Environment> .
|
||||
Options = <rofi-options @options [string ...]> .
|
|
@ -0,0 +1,653 @@
|
|||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.6",
|
||||
"metadata": {
|
||||
"component": {
|
||||
"type": "application",
|
||||
"bom-ref": "pkg:nim/syndicate_utils",
|
||||
"name": "syndicate_utils",
|
||||
"description": "Utilites for Syndicated Actors and Synit",
|
||||
"version": "20240610",
|
||||
"authors": [
|
||||
{
|
||||
"name": "Emery Hemingway"
|
||||
}
|
||||
],
|
||||
"licenses": [
|
||||
{
|
||||
"license": {
|
||||
"id": "Unlicense"
|
||||
}
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nim:skipExt",
|
||||
"value": "nim"
|
||||
},
|
||||
{
|
||||
"name": "nim:bin:postgre-actor",
|
||||
"value": "postgre_actor"
|
||||
},
|
||||
{
|
||||
"name": "nim:bin:xslt-actor",
|
||||
"value": "xslt_actor"
|
||||
},
|
||||
{
|
||||
"name": "nim:bin:preserve-process-environment",
|
||||
"value": "preserve_process_environment"
|
||||
},
|
||||
{
|
||||
"name": "nim:bin:mintsturdyref",
|
||||
"value": "mintsturdyref"
|
||||
},
|
||||
{
|
||||
"name": "nim:bin:esc-printer-driver",
|
||||
"value": "esc_printer_driver"
|
||||
},
|
||||
{
|
||||
"name": "nim:bin:msg",
|
||||
"value": "msg"
|
||||
},
|
||||
{
|
||||
"name": "nim:bin:rofi-script-actor",
|
||||
"value": "rofi_script_actor"
|
||||
},
|
||||
{
|
||||
"name": "nim:bin:syndesizer",
|
||||
"value": "syndesizer"
|
||||
},
|
||||
{
|
||||
"name": "nim:bin:http-client",
|
||||
"value": "http_client"
|
||||
},
|
||||
{
|
||||
"name": "nim:bin:mount-actor",
|
||||
"value": "mount_actor"
|
||||
},
|
||||
{
|
||||
"name": "nim:bin:syndump",
|
||||
"value": "syndump"
|
||||
},
|
||||
{
|
||||
"name": "nim:bin:sqlite-actor",
|
||||
"value": "sqlite_actor"
|
||||
},
|
||||
{
|
||||
"name": "nim:srcDir",
|
||||
"value": "src"
|
||||
},
|
||||
{
|
||||
"name": "nim:backend",
|
||||
"value": "c"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"components": [
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/syndicate",
|
||||
"name": "syndicate",
|
||||
"version": "20240610",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://git.syndicate-lang.org/ehmry/syndicate-nim/archive/7bbcdb7e7705c2ab54ba0165565813d67aea48b0.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://git.syndicate-lang.org/ehmry/syndicate-nim.git",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/mldff990wpr0v9v5qh6ggqjmc2mn3n8g-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "7bbcdb7e7705c2ab54ba0165565813d67aea48b0"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "0mb3mrj5dkkqm0xp5hg84c5naaci4mi6mv2jjznfi6i7swp3f7vs"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://git.syndicate-lang.org/ehmry/syndicate-nim/archive/7bbcdb7e7705c2ab54ba0165565813d67aea48b0.tar.gz"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:ref",
|
||||
"value": "20240610"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:srcDir",
|
||||
"value": "src"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/preserves",
|
||||
"name": "preserves",
|
||||
"version": "20240610",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://git.syndicate-lang.org/ehmry/preserves-nim/archive/560a6417a30a2dff63f24b62498e9fcac2de8354.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://git.syndicate-lang.org/ehmry/preserves-nim.git",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/0sszsmz84ppwqsgda8cmli4lfh2mjmin-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "560a6417a30a2dff63f24b62498e9fcac2de8354"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "19r983fy7m54mlaj0adxdp8pxi1x8dp6phkcnr8rz5y5cwndfjx2"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://git.syndicate-lang.org/ehmry/preserves-nim/archive/560a6417a30a2dff63f24b62498e9fcac2de8354.tar.gz"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:ref",
|
||||
"value": "20240610"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:srcDir",
|
||||
"value": "src"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:date",
|
||||
"value": "2024-05-23T15:58:40+03:00"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:hash",
|
||||
"value": "sha256-JvdvLdPajDgIPbLblO0LbOm0wEp530fs8LYmgH885sk="
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/sys",
|
||||
"name": "sys",
|
||||
"version": "4ef3b624db86e331ba334e705c1aa235d55b05e1",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://github.com/ehmry/nim-sys/archive/4ef3b624db86e331ba334e705c1aa235d55b05e1.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://github.com/ehmry/nim-sys.git",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/syhxsjlsdqfap0hk4qp3s6kayk8cqknd-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "4ef3b624db86e331ba334e705c1aa235d55b05e1"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "1q4qgw4an4mmmcbx48l6xk1jig1vc8p9cq9dbx39kpnb0890j32q"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://github.com/ehmry/nim-sys/archive/4ef3b624db86e331ba334e705c1aa235d55b05e1.tar.gz"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:srcDir",
|
||||
"value": "src"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/taps",
|
||||
"name": "taps",
|
||||
"version": "20240405",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://git.sr.ht/~ehmry/nim_taps/archive/8c8572cd971d1283e6621006b310993c632da247.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://git.sr.ht/~ehmry/nim_taps",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/6y14ia52kr7jyaa0izx37mlablmq9s65-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "8c8572cd971d1283e6621006b310993c632da247"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "1dp166bv9x773jmfqppg5i3v3rilgff013vb11yzwcid9l7s3iy8"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://git.sr.ht/~ehmry/nim_taps/archive/8c8572cd971d1283e6621006b310993c632da247.tar.gz"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:ref",
|
||||
"value": "20240405"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:srcDir",
|
||||
"value": "src"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/nimcrypto",
|
||||
"name": "nimcrypto",
|
||||
"version": "traditional-api",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://github.com/cheatfate/nimcrypto/archive/602c5d20c69c76137201b5d41f788f72afb95aa8.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://github.com/cheatfate/nimcrypto",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/zyr8zwh7vaiycn1s4r8cxwc71f2k5l0h-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "602c5d20c69c76137201b5d41f788f72afb95aa8"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "1dmdmgb6b9m5f8dyxk781nnd61dsk3hdxqks7idk9ncnpj9fng65"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://github.com/cheatfate/nimcrypto/archive/602c5d20c69c76137201b5d41f788f72afb95aa8.tar.gz"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:ref",
|
||||
"value": "traditional-api"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/npeg",
|
||||
"name": "npeg",
|
||||
"version": "1.2.2",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://github.com/zevv/npeg/archive/ec0cc6e64ea4c62d2aa382b176a4838474238f8d.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://github.com/zevv/npeg.git",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/xpn694ibgipj8xak3j4bky6b3k0vp7hh-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "ec0cc6e64ea4c62d2aa382b176a4838474238f8d"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "1fi9ls3xl20bmv1ikillxywl96i9al6zmmxrbffx448gbrxs86kg"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://github.com/zevv/npeg/archive/ec0cc6e64ea4c62d2aa382b176a4838474238f8d.tar.gz"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:ref",
|
||||
"value": "1.2.2"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:srcDir",
|
||||
"value": "src"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/bigints",
|
||||
"name": "bigints",
|
||||
"version": "20231006",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://github.com/ehmry/nim-bigints/archive/86ea14d31eea9275e1408ca34e6bfe9c99989a96.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://github.com/ehmry/nim-bigints.git",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/jvrm392g8adfsgf36prgwkbyd7vh5jsw-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "86ea14d31eea9275e1408ca34e6bfe9c99989a96"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "15pcpmnk1bnw3k8769rjzcpg00nahyrypwbxs88jnwr4aczp99j4"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://github.com/ehmry/nim-bigints/archive/86ea14d31eea9275e1408ca34e6bfe9c99989a96.tar.gz"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:ref",
|
||||
"value": "20231006"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:srcDir",
|
||||
"value": "src"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/cps",
|
||||
"name": "cps",
|
||||
"version": "0.10.4",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://github.com/nim-works/cps/archive/2a4d771a715ba45cfba3a82fa625ae7ad6591c8b.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://github.com/nim-works/cps",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/m9vpcf3dq6z2h1xpi1vlw0ycxp91s5p7-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "2a4d771a715ba45cfba3a82fa625ae7ad6591c8b"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "0c62k5wpq9z9mn8cd4rm8jjc4z0xmnak4piyj5dsfbyj6sbdw2bf"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://github.com/nim-works/cps/archive/2a4d771a715ba45cfba3a82fa625ae7ad6591c8b.tar.gz"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:ref",
|
||||
"value": "0.10.4"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/stew",
|
||||
"name": "stew",
|
||||
"version": "3c91b8694e15137a81ec7db37c6c58194ec94a6a",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://github.com/status-im/nim-stew/archive/3c91b8694e15137a81ec7db37c6c58194ec94a6a.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://github.com/status-im/nim-stew",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/mqg8qzsbcc8xqabq2yzvlhvcyqypk72c-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "3c91b8694e15137a81ec7db37c6c58194ec94a6a"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "17lfhfxp5nxvld78xa83p258y80ks5jb4n53152cdr57xk86y07w"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://github.com/status-im/nim-stew/archive/3c91b8694e15137a81ec7db37c6c58194ec94a6a.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/getdns",
|
||||
"name": "getdns",
|
||||
"version": "20230806",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://git.sr.ht/~ehmry/getdns-nim/archive/e4ae0992ed7c5540e6d498f3074d06c8f454a0b6.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://git.sr.ht/~ehmry/getdns-nim",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/j8i20k9aarzppg4p234449140nnnaycq-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "e4ae0992ed7c5540e6d498f3074d06c8f454a0b6"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "1dp53gndr6d9s9601dd5ipkiq94j53hlx46mxv8gpr8nd98bqysg"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://git.sr.ht/~ehmry/getdns-nim/archive/e4ae0992ed7c5540e6d498f3074d06c8f454a0b6.tar.gz"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:ref",
|
||||
"value": "20230806"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:srcDir",
|
||||
"value": "src"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/solo5_dispatcher",
|
||||
"name": "solo5_dispatcher",
|
||||
"version": "20240522",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://git.sr.ht/~ehmry/solo5_dispatcher/archive/cc64ef99416b22b12e4a076d33de9e25a163e57d.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://git.sr.ht/~ehmry/solo5_dispatcher",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/4jj467pg4hs6warhksb8nsxn9ykz8c7c-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "cc64ef99416b22b12e4a076d33de9e25a163e57d"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "1v9i9fqgx1g76yrmz2xwj9mxfwbjfpar6dsyygr68fv9031cqxq7"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://git.sr.ht/~ehmry/solo5_dispatcher/archive/cc64ef99416b22b12e4a076d33de9e25a163e57d.tar.gz"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:ref",
|
||||
"value": "20240522"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:srcDir",
|
||||
"value": "pkg"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"dependencies": [
|
||||
{
|
||||
"ref": "pkg:nim/syndicate_utils",
|
||||
"dependsOn": [
|
||||
"pkg:nim/syndicate"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/syndicate",
|
||||
"dependsOn": [
|
||||
"pkg:nim/nimcrypto",
|
||||
"pkg:nim/preserves",
|
||||
"pkg:nim/sys",
|
||||
"pkg:nim/taps"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/preserves",
|
||||
"dependsOn": [
|
||||
"pkg:nim/npeg",
|
||||
"pkg:nim/bigints"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/sys",
|
||||
"dependsOn": [
|
||||
"pkg:nim/cps",
|
||||
"pkg:nim/stew"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/taps",
|
||||
"dependsOn": [
|
||||
"pkg:nim/getdns",
|
||||
"pkg:nim/sys",
|
||||
"pkg:nim/cps",
|
||||
"pkg:nim/solo5_dispatcher"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/nimcrypto",
|
||||
"dependsOn": []
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/npeg",
|
||||
"dependsOn": []
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/bigints",
|
||||
"dependsOn": []
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/cps",
|
||||
"dependsOn": []
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/stew",
|
||||
"dependsOn": []
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/getdns",
|
||||
"dependsOn": []
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/solo5_dispatcher",
|
||||
"dependsOn": [
|
||||
"pkg:nim/cps"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
version 1 .
|
||||
|
||||
# When asserted the actor reponds to @target rows as records
|
||||
# of the given label and row columns as record fields.
|
||||
Query = <query @statement [any ...] @target #:any> .
|
||||
|
||||
# When a query fails this is asserted instead.
|
||||
SqlError = <sql-error @msg string @context string>.
|
|
@ -1,2 +0,0 @@
|
|||
include_rules
|
||||
: foreach ../*.prs |> !preserves_schema_nim |> %B.nim
|
|
@ -0,0 +1,111 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
## ESC/P printer control actor.
|
||||
|
||||
import
|
||||
std/[cmdline, oserrors, posix, sequtils, sets],
|
||||
pkg/sys/[files, ioqueue],
|
||||
preserves, preserves/sugar,
|
||||
syndicate, syndicate/relays,
|
||||
syndicate/protocols/[gatekeeper, sturdy],
|
||||
./private/esc_p
|
||||
|
||||
from pkg/sys/handles import FD
|
||||
|
||||
proc echo(args: varargs[string, `$`]) {.used.} =
|
||||
stderr.writeLine(args)
|
||||
|
||||
type
|
||||
HandleSet = HashSet[Handle]
|
||||
|
||||
Printer = ref object of Entity
|
||||
device: AsyncFile
|
||||
boldHandles, italicHandles, superscriptHandles, subscriptHandles: HandleSet
|
||||
buffer: seq[byte]
|
||||
isBusy: bool
|
||||
|
||||
proc flush(printer: Printer) {.asyncio.} =
|
||||
printer.isBusy = true
|
||||
while printer.buffer.len > 0:
|
||||
let n = printer.device.write(printer.buffer)
|
||||
if n > 0:
|
||||
printer.buffer.delete(0..<n)
|
||||
elif n < 0:
|
||||
osLastError().osErrorMsg().quit()
|
||||
printer.isBusy = false
|
||||
|
||||
proc write(printer: Printer; s: string) {.inline.} =
|
||||
printer.buffer.add cast[seq[byte]](s)
|
||||
if not printer.isBusy:
|
||||
discard trampoline:
|
||||
whelp printer.flush()
|
||||
|
||||
proc writeLine(printer: Printer; s: string) {.inline.} =
|
||||
printer.write(s)
|
||||
printer.write("\r\n")
|
||||
|
||||
method message(printer: Printer; t: Turn; a: AssertionRef) =
|
||||
if a.value.isString:
|
||||
printer.write(a.value.string)
|
||||
# TODO: unicode?
|
||||
# TODO: line breaks?
|
||||
|
||||
proc assert(printer: Printer; handles: var HandleSet; ctrl: string; h: Handle) =
|
||||
if handles.len == 0: printer.write(ctrl)
|
||||
handles.incl h
|
||||
|
||||
proc retract(printer: Printer; handles: var HandleSet; ctrl: string; h: Handle) =
|
||||
handles.excl h
|
||||
if handles.len == 0: printer.write(ctrl)
|
||||
|
||||
method publish(printer: Printer; t: Turn; a: AssertionRef; h: Handle) =
|
||||
if a.value.isRecord("bold"):
|
||||
printer.assert(printer.boldHandles, SelectBoldFont, h)
|
||||
|
||||
elif a.value.isRecord("italic"):
|
||||
printer.assert(printer.italicHandles, SelectItalicFont, h)
|
||||
|
||||
elif a.value.isRecord("superscript"):
|
||||
printer.assert(printer.superscriptHandles, SelectSuperScript, h)
|
||||
|
||||
elif a.value.isRecord("subscript"):
|
||||
printer.assert(printer.subscriptHandles, SelectSubScript, h)
|
||||
|
||||
method retract(printer: Printer; t: Turn; h: Handle) =
|
||||
if printer.boldHandles.contains h:
|
||||
printer.retract(printer.boldHandles, CancelBoldFont, h)
|
||||
|
||||
elif printer.italicHandles.contains h:
|
||||
printer.retract(printer.italicHandles, CanceItalicFont, h)
|
||||
|
||||
elif printer.superscriptHandles.contains h:
|
||||
printer.retract(printer.superscriptHandles, CancelAltScript, h)
|
||||
|
||||
elif printer.subscriptHandles.contains h:
|
||||
printer.retract(printer.subscriptHandles, CancelAltScript, h)
|
||||
|
||||
|
||||
proc devicePath: string =
|
||||
if paramCount() < 1:
|
||||
quit "missing path to printer device file"
|
||||
if paramCount() > 1:
|
||||
quit "too many command line parameters"
|
||||
paramStr(1)
|
||||
|
||||
proc openPrinter(turn: Turn): Printer =
|
||||
new result
|
||||
result.facet = turn.facet
|
||||
let fd = posix.open(devicePath(), O_WRONLY or O_NONBLOCK, 0)
|
||||
if fd < 0: osLastError().osErrorMsg().quit()
|
||||
result.device = newAsyncFile(FD fd)
|
||||
result.write(InitializePrinter)
|
||||
|
||||
runActor(devicePath()) do (turn: Turn):
|
||||
let printer = openPrinter(turn)
|
||||
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
|
||||
let pat = Resolve?:{0: matchRecord("printer"), 1: grab()}
|
||||
during(turn, relay, pat) do (cont: Cap):
|
||||
# Publish for any <printer> step.
|
||||
discard publish(turn, cont, ResolvedAccepted(
|
||||
responderSession: turn.newCap(printer)))
|
32
src/exec.nim
32
src/exec.nim
|
@ -1,32 +0,0 @@
|
|||
|
||||
import
|
||||
std/typetraits, preserves
|
||||
|
||||
type
|
||||
CommandLineKind* {.pure.} = enum
|
||||
`shell`, `full`
|
||||
CommandLineShell* = string
|
||||
`CommandLine`* {.preservesOr.} = object
|
||||
case orKind*: CommandLineKind
|
||||
of CommandLineKind.`shell`:
|
||||
`shell`*: CommandLineShell
|
||||
|
||||
of CommandLineKind.`full`:
|
||||
`full`*: FullCommandLine
|
||||
|
||||
|
||||
Exec* {.preservesRecord: "exec".} = object
|
||||
`argv`*: CommandLine
|
||||
`restartPolicy`*: RestartPolicy
|
||||
|
||||
`RestartPolicy`* {.preservesOr, pure.} = enum
|
||||
`always`, `onError`, `all`, `never`
|
||||
FullCommandLine* {.preservesTuple.} = object
|
||||
`program`*: string
|
||||
`args`* {.preservesTupleTail.}: seq[string]
|
||||
|
||||
proc `$`*(x: CommandLine | Exec | FullCommandLine): string =
|
||||
`$`(toPreserve(x))
|
||||
|
||||
proc encode*(x: CommandLine | Exec | FullCommandLine): seq[byte] =
|
||||
encode(toPreserve(x))
|
|
@ -0,0 +1,113 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
# TODO: write a TAPS HTTP client. Figure out how to externalise TLS.
|
||||
|
||||
import
|
||||
std/[httpclient, options, streams, strutils, tables, uri],
|
||||
pkg/taps,
|
||||
pkg/preserves,
|
||||
pkg/syndicate, pkg/syndicate/protocols/http,
|
||||
./schema/config
|
||||
|
||||
proc url(req: HttpRequest): Uri =
|
||||
result.scheme = if req.port == 80: "http" else: "https"
|
||||
result.hostname = req.host.present
|
||||
result.port = $req.port
|
||||
for i, p in req.path:
|
||||
if 0 < i: result.path.add '/'
|
||||
result.path.add p.encodeUrl
|
||||
for key, vals in req.query:
|
||||
if result.query.len > 0:
|
||||
result.query.add '&'
|
||||
result.query.add key.string.encodeUrl
|
||||
for i, val in vals:
|
||||
if i == 0: result.query.add '='
|
||||
elif i < vals.high: result.query.add ','
|
||||
result.query.add val.string.encodeUrl
|
||||
|
||||
proc toContent(body: Value; contentType: var string): string =
|
||||
case contentType
|
||||
of "application/json":
|
||||
var stream = newStringStream()
|
||||
writeText(stream, body, textJson)
|
||||
return stream.data.move
|
||||
of "application/preserves":
|
||||
return cast[string](body.encode)
|
||||
of "text/preserves":
|
||||
return $body
|
||||
else:
|
||||
discard
|
||||
|
||||
case body.kind
|
||||
of pkString:
|
||||
result = body.string
|
||||
if contentType == "":
|
||||
contentType = "text/plain"
|
||||
of pkByteString:
|
||||
result = cast[string](body.bytes)
|
||||
if contentType == "":
|
||||
contentType = "application/octet-stream"
|
||||
else:
|
||||
raise newException(ValueError, "unknown content type")
|
||||
|
||||
proc spawnHttpClient*(turn: Turn; root: Cap): Actor {.discardable.} =
|
||||
|
||||
during(turn, root, ?:HttpClientArguments) do (ds: Cap):
|
||||
spawn("http-client", turn) do (turn: Turn):
|
||||
during(turn, ds, HttpContext.grabType) do (ctx: HttpContext):
|
||||
let peer = ctx.res.unembed(Cap).get
|
||||
var client = newHttpClient()
|
||||
try:
|
||||
var
|
||||
headers = newHttpHeaders()
|
||||
contentType = ""
|
||||
for key, val in ctx.req.headers:
|
||||
if key == Symbol"content-type":
|
||||
contentType = val
|
||||
client.headers[key.string] = val
|
||||
let stdRes = client.request(
|
||||
ctx.req.url,
|
||||
ctx.req.method.string.toUpper,
|
||||
ctx.req.body.toContent(contentType), headers
|
||||
)
|
||||
client.headers["content-type"] = contentType
|
||||
var resp = HttpResponse(orKind: HttpResponseKind.status)
|
||||
resp.status.code = stdRes.status[0 .. 2].parseInt
|
||||
resp.status.message = stdRes.status[3 .. ^1]
|
||||
message(turn, peer, resp)
|
||||
resp = HttpResponse(orKind: HttpResponseKind.header)
|
||||
for key, val in stdRes.headers:
|
||||
if key == "Content-Type":
|
||||
contentType = val
|
||||
resp.header.name = key.Symbol
|
||||
resp.header.value = val
|
||||
message(turn, peer, resp)
|
||||
case contentType
|
||||
of "application/json", "text/preserves":
|
||||
message(turn, peer,
|
||||
initRecord("done", stdRes.bodyStream.readAll.parsePreserves))
|
||||
of "application/preserves":
|
||||
message(turn, peer,
|
||||
initRecord("done", stdRes.bodyStream.decodePreserves))
|
||||
else:
|
||||
resp = HttpResponse(orKind: HttpResponseKind.done)
|
||||
resp.done.chunk.string = stdRes.bodyStream.readAll()
|
||||
message(turn, peer, resp)
|
||||
except CatchableError as err:
|
||||
var resp = HttpResponse(orKind: HttpResponseKind.status)
|
||||
resp.status.code = 400
|
||||
resp.status.message = "Internal client error"
|
||||
message(turn, peer, resp)
|
||||
resp = HttpResponse(orKind: HttpResponseKind.done)
|
||||
resp.done.chunk.string = err.msg
|
||||
message(turn, peer, resp)
|
||||
client.close()
|
||||
do:
|
||||
client.close()
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
spawnHttpClient(turn, ds)
|
|
@ -0,0 +1 @@
|
|||
define:ssl
|
|
@ -0,0 +1,44 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
from os import commandLineParams
|
||||
import preserves, syndicate/capabilities, syndicate/protocols/sturdy
|
||||
|
||||
const usage = """
|
||||
mintsturdyref OID < SECRET_KEY
|
||||
|
||||
Mint Sturdyrefs using a sixteen-byte secret key read from stdin using OIDs
|
||||
passed as command-line parameters.
|
||||
|
||||
Example:
|
||||
mintsturdyref '"syndicate"' < /dev/null
|
||||
|
||||
See:
|
||||
https://synit.org/book/operation/builtin/gatekeeper.html#sturdyrefs
|
||||
https://synit.org/book/glossary.html?highlight=oid#oid
|
||||
"""
|
||||
|
||||
proc main =
|
||||
var oids: seq[Value]
|
||||
for p in commandLineParams():
|
||||
case p
|
||||
of "-h", "--help", "?":
|
||||
quit(usage)
|
||||
else:
|
||||
add(oids, parsePreserves p)
|
||||
if oids.len == 0:
|
||||
stderr.writeLine """using the "syndicate" OID"""
|
||||
oids.add(toPreserves "syndicate")
|
||||
|
||||
var key: array[16, byte]
|
||||
case readBytes(stdin, key, 0, 16)
|
||||
of 16: discard
|
||||
of 0: stderr.writeLine "using null key"
|
||||
else: quit "expected sixteen bytes of key from stdin"
|
||||
|
||||
for oid in oids:
|
||||
let sturdy = mint(key, oid)
|
||||
doAssert validate(key, sturdy)
|
||||
stdout.writeLine(sturdy)
|
||||
|
||||
main()
|
|
@ -0,0 +1,53 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
## An actor for Linux file-system mounting.
|
||||
|
||||
when not defined(linux):
|
||||
{.error: "this component only tested for Linux".}
|
||||
|
||||
import std/oserrors
|
||||
import preserves, preserves/sugar
|
||||
import syndicate
|
||||
import ./schema/mountpoints
|
||||
|
||||
type BootArgs {.preservesDictionary.} = object
|
||||
dataspace: Cap
|
||||
|
||||
proc mount(source, target, fsType: cstring; flags: culong; data: pointer): cint {.importc, header: "<sys/mount.h>".}
|
||||
## `mount(2)`
|
||||
|
||||
proc umount(target: cstring): cint {.importc, header: "<sys/mount.h>".}
|
||||
## `umount(2)`
|
||||
|
||||
proc spawnMountActor*(turn: Turn; ds: Cap): Actor {.discardable.} =
|
||||
spawnActor(turn, "mount_actor") do (turn: Turn):
|
||||
let
|
||||
targetPat = observePattern(!Mountpoint, { @[%1]: grabLit() })
|
||||
sourcePat = observePattern(!Mountpoint, {
|
||||
@[%0]: grabLit(),
|
||||
@[%2]: grabLit(),
|
||||
})
|
||||
during(turn, ds, ?:BootArgs) do (ds: Cap):
|
||||
during(turn, ds, targetPat) do (target: string):
|
||||
during(turn, ds, sourcePat) do (source: string, fsType: string):
|
||||
var mountpoint = Mountpoint(
|
||||
source: source,
|
||||
target: target,
|
||||
`type`: fsType,
|
||||
)
|
||||
var rc = mount(source, target, fsType, 0, nil)
|
||||
if rc == 0:
|
||||
mountpoint.status = Status(orKind: StatusKind.success)
|
||||
else:
|
||||
mountpoint.status = Status(orKind: StatusKind.Failure)
|
||||
mountpoint.status.failure.msg = osErrorMsg(osLastError())
|
||||
discard publish(turn, ds, mountpoint)
|
||||
do:
|
||||
discard umount(target)
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
discard spawnMountActor(turn, ds)
|
|
@ -0,0 +1,20 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[sequtils, os, strutils]
|
||||
import preserves, syndicate, syndicate/relays
|
||||
|
||||
runActor("msg") do (turn: Turn):
|
||||
let
|
||||
data = map(commandLineParams(), parsePreserves)
|
||||
cmd = paramStr(0).extractFilename.normalize
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
case cmd
|
||||
of "assert":
|
||||
for e in data:
|
||||
publish(turn, ds, e)
|
||||
else: # "msg"
|
||||
for e in data:
|
||||
message(turn, ds, e)
|
||||
sync(turn, ds) do (turn: Turn):
|
||||
stopActor(turn)
|
|
@ -0,0 +1,163 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import
|
||||
pkg/preserves,
|
||||
pkg/syndicate, pkg/syndicate/protocols/[gatekeeper, sturdy],
|
||||
./schema/[config, sql]
|
||||
|
||||
{.passL: "-lpq".}
|
||||
|
||||
{.pragma: libpq, header: "libpq-fe.h", importc.}
|
||||
|
||||
type
|
||||
Oid = cuint
|
||||
PGconn {.libpq.} = ptr object
|
||||
PGresult {.libpq.} = ptr object
|
||||
ConnStatusType {.libpq.} = enum
|
||||
CONNECTION_OK, CONNECTION_BAD, ## Non-blocking mode only below here
|
||||
##
|
||||
## The existence of these should never be relied upon - they should only
|
||||
## be used for user feedback or similar purposes.
|
||||
##
|
||||
CONNECTION_STARTED, ## Waiting for connection to be made.
|
||||
CONNECTION_MADE, ## Connection OK; waiting to send.
|
||||
CONNECTION_AWAITING_RESPONSE, ## Waiting for a response from the
|
||||
## postmaster.
|
||||
CONNECTION_AUTH_OK, ## Received authentication; waiting for
|
||||
## backend startup.
|
||||
CONNECTION_SETENV, ## This state is no longer used.
|
||||
CONNECTION_SSL_STARTUP, ## Negotiating SSL.
|
||||
CONNECTION_NEEDED, ## Internal state: connect() needed
|
||||
CONNECTION_CHECK_WRITABLE, ## Checking if session is read-write.
|
||||
CONNECTION_CONSUME, ## Consuming any extra messages.
|
||||
CONNECTION_GSS_STARTUP, ## Negotiating GSSAPI.
|
||||
CONNECTION_CHECK_TARGET, ## Checking target server properties.
|
||||
CONNECTION_CHECK_STANDBY ## Checking if server is in standby mode.
|
||||
|
||||
ExecStatusType = enum
|
||||
PGRES_EMPTY_QUERY = 0, ## empty query string was executed
|
||||
PGRES_COMMAND_OK, ## a query command that doesn't return
|
||||
## anything was executed properly by the
|
||||
## backend
|
||||
PGRES_TUPLES_OK, ## a query command that returns tuples was
|
||||
## executed properly by the backend, PGresult
|
||||
## contains the result tuples
|
||||
PGRES_COPY_OUT, ## Copy Out data transfer in progress
|
||||
PGRES_COPY_IN, ## Copy In data transfer in progress
|
||||
PGRES_BAD_RESPONSE, ## an unexpected response was recv'd from the
|
||||
## backend
|
||||
PGRES_NONFATAL_ERROR, ## notice or warning message
|
||||
PGRES_FATAL_ERROR, ## query failed
|
||||
PGRES_COPY_BOTH, ## Copy In/Out data transfer in progress
|
||||
PGRES_SINGLE_TUPLE, ## single tuple from larger resultset
|
||||
PGRES_PIPELINE_SYNC, ## pipeline synchronization point
|
||||
PGRES_PIPELINE_ABORTED ## Command didn't run because of an abort
|
||||
## earlier in a pipeline
|
||||
|
||||
proc PQconnectdbParams(
|
||||
keywords: cstringArray; values: cstringArray; expand_dbname: cint): PGconn {.libpq.}
|
||||
|
||||
proc PQerrorMessage(conn: PGconn): cstring {.libpq.}
|
||||
proc PQfinish(conn: PGconn) {.libpq.}
|
||||
proc PQstatus(conn: PGconn): ConnStatusType {.libpq.}
|
||||
proc PQexec(conn: PGconn; query: cstring): PGresult {.libpq.}
|
||||
proc PQresultStatus(res: PGresult): ExecStatusType {.libpq.}
|
||||
proc PQresStatus (status: ExecStatusType): cstring {.libpq.}
|
||||
proc PQresultErrorMessage(res: PGresult): cstring {.libpq.}
|
||||
proc PQclear(res: PGresult) {.libpq.}
|
||||
proc PQntuples(res: PGresult): cint {.libpq.}
|
||||
proc PQnfields(res: PGresult): cint {.libpq.}
|
||||
proc PQgetvalue(res: PGresult; tup_num: cint; field_num: cint): cstring {.libpq.}
|
||||
proc PQftype(res: PGresult; field_num: cint): Oid {.libpq.}
|
||||
proc PQfsize(res: PGresult; field_num: cint): cint {.libpq.}
|
||||
|
||||
# proc PQsocket(conn: PGconn): cint
|
||||
# proc PQconnectStartParams(
|
||||
# keywords: cstringArray; values: cstringArray; expand_dbname: cint): PGconn
|
||||
# TODO: async
|
||||
|
||||
proc checkPointer(p: pointer) =
|
||||
if p.isNil: raise newException(OutOfMemDefect, "Postgres returned nil")
|
||||
|
||||
type StringPairs = seq[tuple[key: string, val: string]]
|
||||
|
||||
proc splitParams(params: StringPairs): (cstringArray, cstringArray) =
|
||||
var strings = newSeq[string](params.len)
|
||||
for i, _ in params: strings[i] = params[i][0]
|
||||
result[0] = allocCStringArray(strings)
|
||||
for i, _ in params: strings[i] = params[i][1]
|
||||
result[1] = allocCStringArray(strings)
|
||||
|
||||
proc renderSql(tokens: openarray[Value]): string =
|
||||
for token in tokens:
|
||||
if result.len > 0: result.add ' '
|
||||
case token.kind
|
||||
of pkSymbol:
|
||||
result.add token.symbol.string
|
||||
of pkString:
|
||||
result.add '\''
|
||||
result.add token.string
|
||||
result.add '\''
|
||||
of pkFloat, pkRegister, pkBigInt:
|
||||
result.add $token
|
||||
of pkBoolean:
|
||||
if token.bool: result.add '1'
|
||||
else: result.add '0'
|
||||
else:
|
||||
return ""
|
||||
|
||||
proc spawnPostgreActor*(turn: Turn; relay: Cap): Actor {.discardable.} =
|
||||
result = spawnActor(turn, "postgre") do (turn: Turn):
|
||||
let pat = Resolve?:{ 0: PostgreStep.grabTypeFlat, 1: grab() }
|
||||
during(turn, relay, pat) do (params: StringPairs, observer: Cap):
|
||||
linkActor(turn, "postgre-conn") do (turn: Turn):
|
||||
var
|
||||
(keys, vals) = splitParams(params)
|
||||
conn = PQconnectdbParams(keys, vals, 0)
|
||||
checkPointer(conn)
|
||||
let
|
||||
status = PQstatus(conn)
|
||||
msg = $PQerrorMessage(conn)
|
||||
deallocCStringArray(keys)
|
||||
deallocCStringArray(vals)
|
||||
onStop(turn) do (turn: Turn):
|
||||
PQfinish(conn)
|
||||
if status == CONNECTION_OK:
|
||||
let ds = turn.newDataspace()
|
||||
discard publish(turn, ds, initRecord("status", toSymbol($status), msg.toPreserves))
|
||||
during(turn, ds, ?:Query) do (statement: seq[Value], target: Cap):
|
||||
var text = renderSql statement
|
||||
if text == "":
|
||||
discard publish(turn, ds, SqlError(msg: "invalid statement", context: $statement))
|
||||
else:
|
||||
var
|
||||
res = PQexec(conn, text)
|
||||
st = PQresultStatus(res)
|
||||
if st == PGRES_TUPLES_OK or st == PGRES_SINGLE_TUPLE:
|
||||
let tuples = PQntuples(res)
|
||||
let fields = PQnfields(res)
|
||||
if tuples > 0 and fields > 0:
|
||||
for r in 0..<tuples:
|
||||
var tupl = initSequence(fields)
|
||||
for f in 0..<fields:
|
||||
tupl[f] = toPreserves($PQgetvalue(res, r, f))
|
||||
discard publish(turn, target, tupl)
|
||||
else:
|
||||
discard publish(turn, ds, SqlError(
|
||||
msg: $PQresStatus(st),
|
||||
context: $PQresultErrorMessage(res),
|
||||
))
|
||||
PQclear(res)
|
||||
discard publish(turn, observer,
|
||||
ResolvedAccepted(responderSession: ds))
|
||||
else:
|
||||
discard publish(turn, observer,
|
||||
Rejected(detail: msg.toPreserves))
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
|
||||
spawnPostgreActor(turn, relay)
|
|
@ -0,0 +1,21 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[tables, os]
|
||||
import preserves
|
||||
|
||||
type ProcessInfo {.preservesDictionary.} = object
|
||||
program: string
|
||||
argv: seq[string]
|
||||
env: Table[string, string]
|
||||
dir: string
|
||||
|
||||
proc main =
|
||||
var info: ProcessInfo
|
||||
info.program = getAppFilename()
|
||||
info.argv = commandLineParams()
|
||||
for key, val in envPairs(): info.env[key] = val
|
||||
info.dir = getCurrentDir()
|
||||
writeLine(stdout, info.toPreserves)
|
||||
|
||||
main()
|
|
@ -0,0 +1,11 @@
|
|||
const
|
||||
ESC* = "\x1b"
|
||||
InitializePrinter* = ESC & "@"
|
||||
CancelLine* = ESC & "\x18"
|
||||
SelectBoldFont* = ESC & "E"
|
||||
CancelBoldFont* = ESC & "F"
|
||||
SelectItalicFont* = ESC & "4"
|
||||
CanceItalicFont* = ESC & "5"
|
||||
SelectSuperScript* = ESC & "S0"
|
||||
SelectSubScript* = ESC & "S1"
|
||||
CancelAltScript* = ESC & "T"
|
|
@ -1,37 +0,0 @@
|
|||
|
||||
import
|
||||
std/typetraits, preserves
|
||||
|
||||
type
|
||||
XdgOpen* {.preservesRecord: "xdg-open".} = object
|
||||
`uris`*: seq[string]
|
||||
|
||||
UriRunnerConfigKind* {.pure.} = enum
|
||||
`ListenOn`, `ActionHandler`
|
||||
`UriRunnerConfig`*[E] {.preservesOr.} = ref object
|
||||
case orKind*: UriRunnerConfigKind
|
||||
of UriRunnerConfigKind.`ListenOn`:
|
||||
`listenon`*: ListenOn[E]
|
||||
|
||||
of UriRunnerConfigKind.`ActionHandler`:
|
||||
`actionhandler`*: ActionHandler
|
||||
|
||||
|
||||
ListenOn*[E] {.preservesRecord: "listen-on".} = ref object
|
||||
`dataspace`*: Preserve[E]
|
||||
|
||||
ActionHandler* {.preservesRecord: "action-handler".} = object
|
||||
`pat`*: string
|
||||
`cmd`*: seq[string]
|
||||
|
||||
proc `$`*[E](x: UriRunnerConfig[E] | ListenOn[E]): string =
|
||||
`$`(toPreserve(x, E))
|
||||
|
||||
proc encode*[E](x: UriRunnerConfig[E] | ListenOn[E]): seq[byte] =
|
||||
encode(toPreserve(x, E))
|
||||
|
||||
proc `$`*(x: XdgOpen | ActionHandler): string =
|
||||
`$`(toPreserve(x))
|
||||
|
||||
proc encode*(x: XdgOpen | ActionHandler): seq[byte] =
|
||||
encode(toPreserve(x))
|
|
@ -0,0 +1,31 @@
|
|||
# SPDX-FileCopyrightText: ☭ 2022 Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
## See the rofi-script(5) manpage for documentation.
|
||||
|
||||
import std/[cmdline, envvars, strutils, tables]
|
||||
import preserves, syndicate, syndicate/relays
|
||||
import ./schema/rofi
|
||||
|
||||
if getEnv("ROFI_OUTSIDE") == "":
|
||||
quit("run this program in rofi")
|
||||
|
||||
runActor("rofi_script_actor") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
case paramCount()
|
||||
of 0:
|
||||
let pat = ?:Options
|
||||
onPublish(turn, ds, pat) do (options: seq[string]):
|
||||
stdout.writeLine options.join("\n")
|
||||
quit()
|
||||
|
||||
of 1:
|
||||
var select = Select(option: commandLineParams()[0])
|
||||
for (key, val) in envPairs():
|
||||
if key.startsWith "ROFI_":
|
||||
select.environment[Symbol key] = val
|
||||
message(turn, ds, select)
|
||||
sync(turn, ds, stopActor)
|
||||
|
||||
else:
|
||||
quit("rofi passed an unexpected number of arguments")
|
|
@ -0,0 +1,2 @@
|
|||
include_rules
|
||||
: foreach ../../*.prs |> !preserves-schema-nim |> %B.nim | $(PROJECT_DIR)/<schema>
|
|
@ -0,0 +1,31 @@
|
|||
|
||||
import
|
||||
preserves
|
||||
|
||||
type
|
||||
XsltItems* = seq[XsltItem]
|
||||
Pulse* {.preservesRecord: "pulse".} = object
|
||||
`periodSec`*: float
|
||||
`proxy`* {.preservesEmbedded.}: Value
|
||||
|
||||
XsltItem* = string
|
||||
XmlTranslation* {.preservesRecord: "xml-translation".} = object
|
||||
`xml`*: string
|
||||
`pr`*: Value
|
||||
|
||||
FileSystemUsage* {.preservesRecord: "file-system-usage".} = object
|
||||
`path`*: string
|
||||
`size`*: BiggestInt
|
||||
|
||||
XsltTransform* {.preservesRecord: "xslt-transform".} = object
|
||||
`stylesheet`*: string
|
||||
`input`*: string
|
||||
`output`*: Value
|
||||
|
||||
proc `$`*(x: XsltItems | Pulse | XsltItem | XmlTranslation | FileSystemUsage |
|
||||
XsltTransform): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: XsltItems | Pulse | XsltItem | XmlTranslation | FileSystemUsage |
|
||||
XsltTransform): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,19 @@
|
|||
|
||||
import
|
||||
preserves
|
||||
|
||||
type
|
||||
Base64File* {.preservesRecord: "base64-file".} = object
|
||||
`txt`*: string
|
||||
`path`*: string
|
||||
`size`*: BiggestInt
|
||||
|
||||
Base64Text* {.preservesRecord: "base64".} = object
|
||||
`txt`*: string
|
||||
`bin`*: seq[byte]
|
||||
|
||||
proc `$`*(x: Base64File | Base64Text): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: Base64File | Base64Text): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,144 @@
|
|||
|
||||
import
|
||||
preserves
|
||||
|
||||
type
|
||||
HttpClientArgumentsField0* {.preservesDictionary.} = object
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
HttpClientArguments* {.preservesRecord: "http-client".} = object
|
||||
`field0`*: HttpClientArgumentsField0
|
||||
|
||||
JsonTranslatorArgumentsField0* {.preservesDictionary.} = object
|
||||
`argv`*: seq[string]
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
JsonTranslatorArguments* {.preservesRecord: "json-stdio-translator".} = object
|
||||
`field0`*: JsonTranslatorArgumentsField0
|
||||
|
||||
SocketAddressKind* {.pure.} = enum
|
||||
`TcpAddress`, `UnixAddress`
|
||||
`SocketAddress`* {.preservesOr.} = object
|
||||
case orKind*: SocketAddressKind
|
||||
of SocketAddressKind.`TcpAddress`:
|
||||
`tcpaddress`*: TcpAddress
|
||||
|
||||
of SocketAddressKind.`UnixAddress`:
|
||||
`unixaddress`*: UnixAddress
|
||||
|
||||
|
||||
Base64DecoderArgumentsField0* {.preservesDictionary.} = object
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
Base64DecoderArguments* {.preservesRecord: "base64-decoder".} = object
|
||||
`field0`*: Base64DecoderArgumentsField0
|
||||
|
||||
SqliteStepField0* {.preservesDictionary.} = object
|
||||
`database`*: string
|
||||
|
||||
SqliteStep* {.preservesRecord: "sqlite".} = object
|
||||
`field0`*: SqliteStepField0
|
||||
|
||||
XsltArgumentsField0* {.preservesDictionary.} = object
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
XsltArguments* {.preservesRecord: "xslt".} = object
|
||||
`field0`*: XsltArgumentsField0
|
||||
|
||||
JsonSocketTranslatorStepField0* {.preservesDictionary.} = object
|
||||
`socket`*: SocketAddress
|
||||
|
||||
JsonSocketTranslatorStep* {.preservesRecord: "json-socket-translator".} = object
|
||||
`field0`*: JsonSocketTranslatorStepField0
|
||||
|
||||
FileSystemUsageArgumentsField0* {.preservesDictionary.} = object
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
FileSystemUsageArguments* {.preservesRecord: "file-system-usage".} = object
|
||||
`field0`*: FileSystemUsageArgumentsField0
|
||||
|
||||
HttpDriverStepField0* {.preservesDictionary.} = object
|
||||
|
||||
HttpDriverStep* {.preservesRecord: "http-driver".} = object
|
||||
`field0`*: HttpDriverStepField0
|
||||
|
||||
PostgreStepField0* {.preservesDictionary.} = object
|
||||
`connection`*: seq[PostgreConnectionParameter]
|
||||
|
||||
PostgreStep* {.preservesRecord: "postgre".} = object
|
||||
`field0`*: PostgreStepField0
|
||||
|
||||
TcpAddress* {.preservesRecord: "tcp".} = object
|
||||
`host`*: string
|
||||
`port`*: BiggestInt
|
||||
|
||||
CacheArgumentsField0* {.preservesDictionary.} = object
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
`lifetime`*: float
|
||||
|
||||
CacheArguments* {.preservesRecord: "cache".} = object
|
||||
`field0`*: CacheArgumentsField0
|
||||
|
||||
XmlTranslatorArgumentsField0* {.preservesDictionary.} = object
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
XmlTranslatorArguments* {.preservesRecord: "xml-translator".} = object
|
||||
`field0`*: XmlTranslatorArgumentsField0
|
||||
|
||||
PostgreConnectionParameter* {.preservesTuple.} = object
|
||||
`key`*: string
|
||||
`val`*: string
|
||||
|
||||
PulseArgumentsField0* {.preservesDictionary.} = object
|
||||
`dataspace`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
PulseArguments* {.preservesRecord: "pulse".} = object
|
||||
`field0`*: PulseArgumentsField0
|
||||
|
||||
Tcp* {.preservesRecord: "tcp".} = object
|
||||
`host`*: string
|
||||
`port`*: BiggestInt
|
||||
|
||||
UnixAddress* {.preservesRecord: "unix".} = object
|
||||
`path`*: string
|
||||
|
||||
PrinterStepField0* {.preservesDictionary.} = object
|
||||
|
||||
PrinterStep* {.preservesRecord: "printer".} = object
|
||||
`field0`*: PrinterStepField0
|
||||
|
||||
proc `$`*(x: HttpClientArguments | JsonTranslatorArguments | SocketAddress |
|
||||
Base64DecoderArguments |
|
||||
SqliteStep |
|
||||
XsltArguments |
|
||||
JsonSocketTranslatorStep |
|
||||
FileSystemUsageArguments |
|
||||
HttpDriverStep |
|
||||
PostgreStep |
|
||||
TcpAddress |
|
||||
CacheArguments |
|
||||
XmlTranslatorArguments |
|
||||
PostgreConnectionParameter |
|
||||
PulseArguments |
|
||||
Tcp |
|
||||
UnixAddress |
|
||||
PrinterStep): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: HttpClientArguments | JsonTranslatorArguments | SocketAddress |
|
||||
Base64DecoderArguments |
|
||||
SqliteStep |
|
||||
XsltArguments |
|
||||
JsonSocketTranslatorStep |
|
||||
FileSystemUsageArguments |
|
||||
HttpDriverStep |
|
||||
PostgreStep |
|
||||
TcpAddress |
|
||||
CacheArguments |
|
||||
XmlTranslatorArguments |
|
||||
PostgreConnectionParameter |
|
||||
PulseArguments |
|
||||
Tcp |
|
||||
UnixAddress |
|
||||
PrinterStep): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,16 @@
|
|||
|
||||
import
|
||||
preserves
|
||||
|
||||
type
|
||||
InotifyMessage* {.preservesRecord: "inotify".} = object
|
||||
`path`*: string
|
||||
`event`*: Symbol
|
||||
`cookie`*: BiggestInt
|
||||
`name`*: string
|
||||
|
||||
proc `$`*(x: InotifyMessage): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: InotifyMessage): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,14 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/json
|
||||
import preserves, preserves/jsonhooks
|
||||
|
||||
export fromPreservesHook, toPreservesHook
|
||||
# re-export the hooks so that conversion "just works"
|
||||
|
||||
type
|
||||
SendJson* {.preservesRecord: "send".} = object
|
||||
data*: JsonNode
|
||||
RecvJson* {.preservesRecord: "recv".} = object
|
||||
data*: JsonNode
|
|
@ -0,0 +1,30 @@
|
|||
|
||||
import
|
||||
preserves
|
||||
|
||||
type
|
||||
Failure* {.preservesRecord: "failure".} = object
|
||||
`msg`*: string
|
||||
|
||||
Mountpoint* {.preservesRecord: "mount".} = object
|
||||
`source`*: string
|
||||
`target`*: string
|
||||
`type`*: string
|
||||
`status`*: Status
|
||||
|
||||
StatusKind* {.pure.} = enum
|
||||
`Failure`, `success`
|
||||
`Status`* {.preservesOr.} = object
|
||||
case orKind*: StatusKind
|
||||
of StatusKind.`Failure`:
|
||||
`failure`*: Failure
|
||||
|
||||
of StatusKind.`success`:
|
||||
`success`* {.preservesLiteral: "#t".}: bool
|
||||
|
||||
|
||||
proc `$`*(x: Failure | Mountpoint | Status): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: Failure | Mountpoint | Status): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,16 @@
|
|||
|
||||
import
|
||||
preserves
|
||||
|
||||
type
|
||||
RoundTripTime* {.preservesRecord: "rtt".} = object
|
||||
`address`*: string
|
||||
`minimum`*: float
|
||||
`average`*: float
|
||||
`maximum`*: float
|
||||
|
||||
proc `$`*(x: RoundTripTime): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: RoundTripTime): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,18 @@
|
|||
|
||||
import
|
||||
preserves, std/tables
|
||||
|
||||
type
|
||||
Environment* = Table[Symbol, string]
|
||||
Select* {.preservesRecord: "rofi-select".} = object
|
||||
`option`*: string
|
||||
`environment`*: Environment
|
||||
|
||||
Options* {.preservesRecord: "rofi-options".} = object
|
||||
`options`*: seq[string]
|
||||
|
||||
proc `$`*(x: Environment | Select | Options): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: Environment | Select | Options): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,18 @@
|
|||
|
||||
import
|
||||
preserves
|
||||
|
||||
type
|
||||
Query* {.preservesRecord: "query".} = object
|
||||
`statement`*: seq[Value]
|
||||
`target`* {.preservesEmbedded.}: Value
|
||||
|
||||
SqlError* {.preservesRecord: "sql-error".} = object
|
||||
`msg`*: string
|
||||
`context`*: string
|
||||
|
||||
proc `$`*(x: Query | SqlError): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: Query | SqlError): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,156 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import
|
||||
pkg/preserves,
|
||||
pkg/syndicate, pkg/syndicate/protocols/[gatekeeper, sturdy],
|
||||
./schema/[config, sql]
|
||||
|
||||
# Avoid Sqlite3 from the standard library because it is
|
||||
# only held together by wishful thinking and dlload.
|
||||
|
||||
{.passC: staticExec("pkg-config --cflags sqlite3").}
|
||||
{.passL: staticExec("pkg-config --libs sqlite3").}
|
||||
|
||||
{.pragma: sqlite3h, header: "sqlite3.h".}
|
||||
|
||||
var
|
||||
SQLITE_VERSION_NUMBER {.importc, sqlite3h.}: cint
|
||||
SQLITE_OK {.importc, sqlite3h.}: cint
|
||||
SQLITE_ROW {.importc, sqlite3h.}: cint
|
||||
SQLITE_DONE {.importc, sqlite3h.}: cint
|
||||
SQLITE_OPEN_READONLY {.importc, sqlite3h.}: cint
|
||||
|
||||
const
|
||||
SQLITE_INTEGER = 1
|
||||
SQLITE_FLOAT = 2
|
||||
SQLITE_TEXT = 3
|
||||
SQLITE_BLOB = 4
|
||||
# SQLITE_NULL = 5
|
||||
|
||||
type
|
||||
Sqlite3 {.importc: "sqlite3", sqlite3h.} = distinct pointer
|
||||
Stmt {.importc: "sqlite3_stmt", sqlite3h.} = distinct pointer
|
||||
|
||||
{.pragma: importSqlite3, importc: "sqlite3_$1", sqlite3h.}
|
||||
|
||||
proc libversion_number: cint {.importSqlite3.}
|
||||
|
||||
proc open_v2(filename: cstring; ppDb: ptr Sqlite3; flags: cint; zVfs: cstring): cint {.importSqlite3.}
|
||||
proc close(ds: Sqlite3): int32 {.discardable, importSqlite3.}
|
||||
|
||||
proc errmsg(db: Sqlite3): cstring {.importSqlite3.}
|
||||
|
||||
proc prepare_v2(db: Sqlite3; zSql: cstring, nByte: cint; ppStmt: ptr Stmt; pzTail: ptr cstring): cint {.importSqlite3.}
|
||||
|
||||
proc step(para1: Stmt): cint {.importSqlite3.}
|
||||
|
||||
proc column_count(stmt: Stmt): int32 {.importSqlite3.}
|
||||
proc column_blob(stmt: Stmt; col: cint): pointer {.importSqlite3.}
|
||||
proc column_bytes(stmt: Stmt; col: cint): cint {.importSqlite3.}
|
||||
proc column_double(stmt: Stmt; col: cint): float64 {.importSqlite3.}
|
||||
proc column_int64(stmt: Stmt; col: cint): int64 {.importSqlite3.}
|
||||
proc column_text(stmt: Stmt; col: cint): cstring {.importSqlite3.}
|
||||
proc column_type(stmt: Stmt; col: cint): cint {.importSqlite3.}
|
||||
proc finalize(stmt: Stmt): cint {.importSqlite3.}
|
||||
|
||||
doAssert libversion_number() == SQLITE_VERSION_NUMBER
|
||||
|
||||
proc assertError(facet: Facet; cap: Cap; db: Sqlite3; context: string) =
|
||||
run(facet) do (turn: Turn):
|
||||
publish(turn, cap, SqlError(
|
||||
msg: $errmsg(db),
|
||||
context: context,
|
||||
))
|
||||
|
||||
proc assertError(facet: Facet; cap: Cap; msg, context: string) =
|
||||
run(facet) do (turn: Turn):
|
||||
publish(turn, cap, SqlError(
|
||||
msg: msg,
|
||||
context: context,
|
||||
))
|
||||
|
||||
proc extractValue(stmt: Stmt; col: cint): Value =
|
||||
case column_type(stmt, col)
|
||||
of SQLITE_INTEGER:
|
||||
result = toPreserves(column_int64(stmt, col))
|
||||
of SQLITE_FLOAT:
|
||||
result = toPreserves(column_double(stmt, col))
|
||||
of SQLITE_TEXT:
|
||||
result = Value(kind: pkString, string: newString(column_bytes(stmt, col)))
|
||||
if result.string.len > 0:
|
||||
copyMem(addr result.string[0], column_text(stmt, col), result.string.len)
|
||||
of SQLITE_BLOB:
|
||||
result = Value(kind: pkByteString, bytes: newSeq[byte](column_bytes(stmt, col)))
|
||||
if result.bytes.len > 0:
|
||||
copyMem(addr result.bytes[0], column_blob(stmt, col), result.bytes.len)
|
||||
else:
|
||||
result = initRecord("null")
|
||||
|
||||
proc extractTuple(stmt: Stmt; arity: cint): Value =
|
||||
result = initSequence(arity)
|
||||
for col in 0..<arity: result[col] = extractValue(stmt, col)
|
||||
|
||||
proc renderSql(tokens: openarray[Value]): string =
|
||||
for token in tokens:
|
||||
if result.len > 0: result.add ' '
|
||||
case token.kind
|
||||
of pkSymbol:
|
||||
result.add token.symbol.string
|
||||
of pkString:
|
||||
result.add '\''
|
||||
result.add token.string
|
||||
result.add '\''
|
||||
of pkFloat, pkRegister, pkBigInt:
|
||||
result.add $token
|
||||
of pkBoolean:
|
||||
if token.bool: result.add '1'
|
||||
else: result.add '0'
|
||||
else:
|
||||
return ""
|
||||
|
||||
proc spawnSqliteActor*(turn: Turn; relay: Cap): Actor {.discardable.} =
|
||||
result = spawnActor(turn, "sqlite") do (turn: Turn):
|
||||
let pat = Resolve?:{ 0: SqliteStep.grabTypeFlat, 1: grab() }
|
||||
during(turn, relay, pat) do (path: string, observer: Cap):
|
||||
linkActor(turn, path) do (turn: Turn):
|
||||
let facet = turn.facet
|
||||
stderr.writeLine("opening SQLite database ", path)
|
||||
var db: Sqlite3
|
||||
if open_v2(path, addr db, SQLITE_OPEN_READONLY, nil) != SQLITE_OK:
|
||||
discard publish(turn, observer,
|
||||
Rejected(detail: toPreserves($errmsg(db))))
|
||||
else:
|
||||
turn.onStop do (turn: Turn):
|
||||
close(db)
|
||||
stderr.writeLine("closed SQLite database ", path)
|
||||
let ds = turn.newDataspace()
|
||||
discard publish(turn, observer,
|
||||
ResolvedAccepted(responderSession: ds))
|
||||
during(turn, ds, ?:Query) do (statement: seq[Value], target: Cap):
|
||||
var
|
||||
stmt: Stmt
|
||||
text = renderSql statement
|
||||
if text == "":
|
||||
assertError(facet, target, "invalid statement", $statement)
|
||||
elif prepare_v2(db, text, text.len.cint, addr stmt, nil) != SQLITE_OK:
|
||||
assertError(facet, target, db, text)
|
||||
else:
|
||||
try:
|
||||
let arity = column_count(stmt)
|
||||
var res = step(stmt)
|
||||
while res == SQLITE_ROW:
|
||||
var rec = extractTuple(stmt, arity)
|
||||
discard publish(turn, target, rec)
|
||||
res = step(stmt)
|
||||
assert res != 100
|
||||
if res != SQLITE_DONE:
|
||||
assertError(facet, target, db, text)
|
||||
finally:
|
||||
if finalize(stmt) != SQLITE_OK: assertError(facet, target, db, text)
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
|
||||
spawnSqliteActor(turn, relay)
|
|
@ -0,0 +1,28 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
## Syndicate multitool.
|
||||
|
||||
import syndicate, syndicate/relays, syndicate/drivers/timers
|
||||
|
||||
import ./syndesizer/[
|
||||
base64_decoder,
|
||||
cache_actor,
|
||||
file_system_usage,
|
||||
http_driver,
|
||||
json_socket_translator,
|
||||
json_translator,
|
||||
pulses,
|
||||
xml_translator]
|
||||
|
||||
runActor("syndesizer") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
|
||||
discard spawnTimerDriver(turn, relay)
|
||||
discard spawnBase64Decoder(turn, relay)
|
||||
discard spawnCacheActor(turn, relay)
|
||||
discard spawnFileSystemUsageActor(turn, relay)
|
||||
discard spawnHttpDriver(turn, relay)
|
||||
discard spawnJsonSocketTranslator(turn, relay)
|
||||
discard spawnJsonStdioTranslator(turn, relay)
|
||||
discard spawnPulseActor(turn, relay)
|
||||
discard spawnXmlTranslator(turn, relay)
|
|
@ -0,0 +1,3 @@
|
|||
include_rules
|
||||
: foreach *.nim |> !nim_bin |> {bin}
|
||||
: foreach {bin} |> !assert_built |>
|
|
@ -0,0 +1,50 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import
|
||||
std/[base64, os],
|
||||
pkg/nimcrypto/blake2,
|
||||
preserves, preserves/sugar, syndicate,
|
||||
../schema/config,
|
||||
../schema/base64 as schema
|
||||
|
||||
export Base64DecoderArguments
|
||||
export schema
|
||||
|
||||
proc spawnBase64Decoder*(turn: Turn; root: Cap): Actor {.discardable.} =
|
||||
spawnActor(turn, "base64-decoder") do (turn: Turn):
|
||||
let tmpDir = getTempDir()
|
||||
during(turn, root, ?:Base64DecoderArguments) do (ds: Cap):
|
||||
|
||||
let decTextPat = observePattern(!Base64Text, { @[%0]: grabLit() })
|
||||
during(turn, ds, decTextPat) do (txt: string):
|
||||
discard publish(turn, ds, Base64Text(
|
||||
txt: txt,
|
||||
bin: cast[seq[byte]](decode(txt)),
|
||||
))
|
||||
|
||||
let encTextPat = observePattern(!Base64Text, { @[%1]: grabLit() })
|
||||
during(turn, ds, encTextPat) do (bin: seq[byte]):
|
||||
discard publish(turn, ds, Base64Text(
|
||||
txt: encode(bin),
|
||||
bin: bin,
|
||||
))
|
||||
|
||||
let decFilePat = observePattern( !Base64File, { @[%0]: grabLit() })
|
||||
during(turn, ds, decFilePat) do (txt: string):
|
||||
var
|
||||
bin = decode(txt)
|
||||
digest = $blake2_256.digest(bin)
|
||||
path = tmpDir / digest
|
||||
writeFile(path, bin)
|
||||
discard publish(turn, ds, Base64File(
|
||||
txt: txt,
|
||||
path: path,
|
||||
size: bin.len,
|
||||
))
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
spawnBase64Decoder(turn, ds)
|
|
@ -0,0 +1,58 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/times
|
||||
import preserves, syndicate,
|
||||
syndicate/durings,
|
||||
syndicate/drivers/timers
|
||||
|
||||
import ../schema/config
|
||||
|
||||
proc afterTimeout(n: float64): LaterThan =
|
||||
## Get a `LaterThan` record for `n` seconds in the future.
|
||||
result.seconds = getTime().toUnixFloat() + n
|
||||
|
||||
type CacheEntity {.final.} = ref object of Entity
|
||||
timeouts, target: Cap
|
||||
# dataspaces for observing timeouts and publishing values
|
||||
pattern: Pattern
|
||||
lifetime: float64
|
||||
|
||||
method publish(cache: CacheEntity; turn: Turn; ass: AssertionRef; h: Handle) =
|
||||
## Re-assert pattern captures in a sub-facet.
|
||||
discard inFacet(turn) do (turn: Turn):
|
||||
# TODO: a seperate facet for every assertion, too much?
|
||||
var ass = depattern(cache.pattern, ass.value.sequence)
|
||||
# Build an assertion with what he have of the pattern and capture.
|
||||
discard publish(turn, cache.target, ass)
|
||||
let timeout = afterTimeout(cache.lifetime)
|
||||
onPublish(turn, cache.timeouts, ?timeout) do:
|
||||
stop(turn) # end this facet
|
||||
|
||||
proc isObserve(pat: Pattern): bool =
|
||||
pat.orKind == PatternKind.group and
|
||||
pat.group.type.orKind == GroupTypeKind.rec and
|
||||
pat.group.type.rec.label.isSymbol"Observe"
|
||||
|
||||
proc spawnCacheActor*(turn: Turn; root: Cap): Actor =
|
||||
spawnActor(turn, "cache_actor") do (turn: Turn):
|
||||
during(turn, root, ?:CacheArguments) do (ds: Cap, lifetime: float64):
|
||||
onPublish(turn, ds, ?:Observe) do (pat: Pattern, obs: Cap):
|
||||
var cache: CacheEntity
|
||||
if obs.relay != turn.facet and not(pat.isObserve):
|
||||
# Watch pattern if the observer is not us
|
||||
# and if the pattern isn't a recursive observe
|
||||
cache = CacheEntity(
|
||||
timeouts: root,
|
||||
target: ds,
|
||||
pattern: pat,
|
||||
lifetime: lifetime,
|
||||
)
|
||||
discard observe(turn, ds, pat, cache)
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
discard spawnTimerDriver(turn, ds)
|
||||
discard spawnCacheActor(turn, ds)
|
|
@ -0,0 +1,28 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[dirs, os, paths]
|
||||
import preserves, preserves/sugar
|
||||
import syndicate
|
||||
|
||||
import ../schema/[assertions, config]
|
||||
|
||||
proc spawnFileSystemUsageActor*(turn: Turn; root: Cap): Actor {.discardable.} =
|
||||
spawn("file-system-usage", turn) do (turn: Turn):
|
||||
during(turn, root, ?:FileSystemUsageArguments) do (ds: Cap):
|
||||
let pat = observePattern(!FileSystemUsage, { @[%0]: grab() })
|
||||
during(turn, ds, pat) do (lit: Literal[string]):
|
||||
var ass = FileSystemUsage(path: lit.value)
|
||||
if fileExists(ass.path): ass.size = getFileSize(ass.path)
|
||||
else:
|
||||
for fp in walkDirRec(paths.Path(lit.value), yieldFilter={pcFile}):
|
||||
var fs = getFileSize(string fp)
|
||||
inc(ass.size, fs)
|
||||
discard publish(turn, ds, ass)
|
||||
# TODO: updates?
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
discard spawnFileSystemUsageActor(turn, ds)
|
|
@ -0,0 +1,53 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
## Thin wrapper over `syndicate/drivers/http_driver`.
|
||||
|
||||
import
|
||||
pkg/taps,
|
||||
pkg/preserves,
|
||||
pkg/syndicate,
|
||||
pkg/syndicate/drivers/http_driver,
|
||||
pkg/syndicate/protocols/[gatekeeper, sturdy],
|
||||
../schema/config
|
||||
|
||||
proc spawnHttpDriver*(turn: Turn; relay: Cap): Actor {.discardable.} =
|
||||
## Create a dataspace for the driver and to the gatekeeper dance.
|
||||
spawnActor(turn, "http-driver") do (turn: Turn):
|
||||
let pat = Resolve?:{ 0: HttpDriverStep.dropType }
|
||||
during(turn, relay, pat):
|
||||
let ds = turn.newDataspace()
|
||||
http_driver.spawnHttpDriver(turn, ds)
|
||||
# Spawn a shared driver.
|
||||
let pat = Resolve?:{ 0: HttpDriverStep.dropType, 1: grab() }
|
||||
during(turn, relay, pat) do (obs: Cap):
|
||||
discard publish(turn, obs, ResolvedAccepted(responderSession: ds))
|
||||
# Pass the shared driver dataspace.
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
|
||||
when defined(solo5):
|
||||
import solo5
|
||||
acquireDevices([("eth0", netBasic)], netAcquireHook)
|
||||
|
||||
proc envRoute: Route =
|
||||
var pr = parsePreserves $solo5_start_info.cmdline
|
||||
if result.fromPreserves pr:
|
||||
return
|
||||
elif pr.isSequence:
|
||||
for e in pr:
|
||||
if result.fromPreserves e:
|
||||
return
|
||||
quit("failed to parse command line for route to Syndicate gatekeeper")
|
||||
|
||||
runActor("main") do (turn: Turn):
|
||||
let relay = newDataspace(turn)
|
||||
spawnRelays(turn, relay)
|
||||
resolve(turn, relay, envRoute(), spawnHttpDriver)
|
||||
|
||||
else:
|
||||
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
|
||||
spawnHttpDriver(turn, relay)
|
|
@ -0,0 +1,2 @@
|
|||
define:ipv6Enabled
|
||||
include:"std/assertions"
|
|
@ -0,0 +1,82 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import
|
||||
std/[json, options],
|
||||
pkg/sys/[ioqueue, sockets],
|
||||
pkg/preserves, pkg/preserves/jsonhooks,
|
||||
pkg/syndicate, pkg/syndicate/protocols/[gatekeeper, sturdy],
|
||||
../schema/[config, json_messages]
|
||||
|
||||
template translateSocketBody {.dirty.} =
|
||||
# Template workaround for CPS and parameterized types.
|
||||
var
|
||||
guard = initGuard(facet)
|
||||
dec = newBufferedDecoder(0)
|
||||
buf = new string #TODO: get a pointer into the decoder
|
||||
alive = true
|
||||
proc kill(turn: Turn) =
|
||||
alive = false
|
||||
proc setup(turn: Turn) =
|
||||
# Closure, not CPS.
|
||||
onMessage(turn, ds, ?:SendJson) do (data: JsonNode):
|
||||
if alive:
|
||||
discard trampoline:
|
||||
whelp write(socket[], $data & "\n")
|
||||
else:
|
||||
stderr.writeLine "dropped send of ", data
|
||||
discard publish(turn, observer, ResolvedAccepted(responderSession: ds))
|
||||
# Resolve the <json-socket-translator { }> step.
|
||||
onStop(facet, kill)
|
||||
run(facet, setup)
|
||||
while alive:
|
||||
# TODO: parse buffer
|
||||
buf[].setLen(0x4000)
|
||||
let n = read(socket[], buf)
|
||||
if n < 1:
|
||||
stderr.writeLine "socket read returned ", n
|
||||
else:
|
||||
buf[].setLen(n)
|
||||
dec.feed(buf[])
|
||||
var data = dec.parse()
|
||||
if data.isSome:
|
||||
proc send(turn: Turn) =
|
||||
# Closure, not CPS.
|
||||
message(turn, ds, initRecord("recv", data.get))
|
||||
run(facet, send)
|
||||
stderr.writeLine "close socket ", sa
|
||||
close(socket[])
|
||||
|
||||
proc translateSocket(facet: Facet; sa: TcpAddress; ds, observer: Cap) {.asyncio.} =
|
||||
var
|
||||
socket = new AsyncConn[Protocol.Tcp]
|
||||
conn = connectTcpAsync(sa.host, Port sa.port)
|
||||
socket[] = conn
|
||||
translateSocketBody()
|
||||
|
||||
proc translateSocket(facet: Facet; sa: UnixAddress; ds, observer: Cap) {.asyncio.} =
|
||||
var
|
||||
socket = new AsyncConn[Protocol.Unix]
|
||||
conn = connectUnixAsync(sa.path)
|
||||
socket[] = conn
|
||||
translateSocketBody()
|
||||
|
||||
proc spawnJsonSocketTranslator*(turn: Turn; relay: Cap): Actor {.discardable.} =
|
||||
let pat = Resolve?:{ 0: JsonSocketTranslatorStep.grabTypeFlat, 1: grab() }
|
||||
spawnActor(turn, "json-socket-translator") do (turn: Turn):
|
||||
during(turn, relay, pat) do (sa: TcpAddress, observer: Cap):
|
||||
linkActor(turn, "json-socket-translator") do (turn: Turn):
|
||||
let ds = turn.newDataspace()
|
||||
discard trampoline:
|
||||
whelp translateSocket(turn.facet, sa, ds, observer)
|
||||
during(turn, relay, pat) do (sa: UnixAddress, observer: Cap):
|
||||
linkActor(turn, "json-socket-translator") do (turn: Turn):
|
||||
let ds = turn.newDataspace()
|
||||
discard trampoline:
|
||||
whelp translateSocket(turn.facet, sa, ds, observer)
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; relay: Cap):
|
||||
spawnJsonSocketTranslator(turn, relay)
|
|
@ -0,0 +1,33 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[json, osproc]
|
||||
import preserves
|
||||
import syndicate
|
||||
|
||||
import ../schema/[config, json_messages]
|
||||
|
||||
proc runChild(params: seq[string]): string =
|
||||
if params.len < 1:
|
||||
stderr.writeLine "not enough parameters"
|
||||
let
|
||||
cmd = params[0]
|
||||
args = params[1..params.high]
|
||||
try: result = execProcess(command=cmd, args=args, options={poUsePath})
|
||||
except CatchableError as err:
|
||||
quit("execProcess failed: " & err.msg)
|
||||
if result == "":
|
||||
stderr.writeLine "no ouput"
|
||||
|
||||
proc spawnJsonStdioTranslator*(turn: Turn; root: Cap): Actor {.discardable.} =
|
||||
spawnActor(turn, "json-stdio-translator") do (turn: Turn):
|
||||
during(turn, root, ?:JsonTranslatorArguments) do (argv: seq[string], ds: Cap):
|
||||
var js = parseJson(runChild(argv))
|
||||
message(turn, ds, RecvJson(data: js))
|
||||
discard publish(turn, ds, RecvJson(data: js))
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
spawnJsonStdioTranslator(turn, ds)
|
|
@ -0,0 +1,105 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[options, tables, times]
|
||||
import preserves, preserves/sugar
|
||||
import syndicate, syndicate/drivers/timers
|
||||
|
||||
import ../schema/[assertions, config]
|
||||
|
||||
type PulseEntity {.final.} = ref object of Entity
|
||||
## An entity that asserts and retracts observers on a pulse.
|
||||
self, timers: Cap
|
||||
target: Entity
|
||||
period: float
|
||||
timerHandle: Handle
|
||||
observers: Table[Handle, AssertionRef]
|
||||
observePattern: Pattern
|
||||
observing: bool
|
||||
|
||||
proc schedule(turn: Turn; pulse: PulseEntity) =
|
||||
## Schedule the next pulse.
|
||||
## The next pulse will be schedule using the current time as
|
||||
## reference point and not the moment of the previous pulse.
|
||||
let then = getTime().toUnixFloat()+pulse.period
|
||||
pulse.timerHandle = publish(turn, pulse.timers, Observe(
|
||||
pattern: LaterThan ?: { 0: ?then },
|
||||
observer: pulse.self,
|
||||
))
|
||||
|
||||
method publish(pulse: PulseEntity; turn: Turn; ass: AssertionRef; h: Handle) =
|
||||
## Publish observers in reponse to <later-than …> assertions.
|
||||
pulse.timers.target.retract(turn, pulse.timerHandle)
|
||||
schedule(turn, pulse)
|
||||
pulse.observing = true
|
||||
for h, a in pulse.observers.pairs:
|
||||
pulse.target.publish(turn, a, h)
|
||||
pulse.target.sync(turn, pulse.self)
|
||||
|
||||
method message(pulse: PulseEntity; turn: Turn; v: AssertionRef) =
|
||||
## Retract observers in response to a sync message.
|
||||
pulse.observing = false
|
||||
for h in pulse.observers.keys:
|
||||
pulse.target.retract(turn, h)
|
||||
|
||||
type ProxyEntity {.final.} = ref object of Entity
|
||||
## A proxy `Entity` that diverts observers to a `PulseEntity`.
|
||||
pulse: PulseEntity
|
||||
|
||||
method publish(proxy: ProxyEntity; turn: Turn; ass: AssertionRef; h: Handle) =
|
||||
## Proxy assertions that are not observations.
|
||||
if proxy.pulse.observePattern.matches ass.value:
|
||||
if proxy.pulse.observers.len == 0:
|
||||
schedule(turn, proxy.pulse)
|
||||
proxy.pulse.observers[h] = ass
|
||||
else:
|
||||
proxy.pulse.target.publish(turn, ass, h)
|
||||
|
||||
method retract(proxy: ProxyEntity; turn: Turn; h: Handle) =
|
||||
## Retract proxied assertions.
|
||||
var obs: AssertionRef
|
||||
if proxy.pulse.observers.pop(h, obs):
|
||||
if proxy.pulse.observing:
|
||||
proxy.pulse.target.retract(turn, h)
|
||||
if proxy.pulse.observers.len == 0:
|
||||
proxy.pulse.timers.target.retract(turn, proxy.pulse.timerHandle)
|
||||
else:
|
||||
proxy.pulse.target.retract(turn, h)
|
||||
|
||||
method message(proxy: ProxyEntity; turn: Turn; v: AssertionRef) =
|
||||
## Proxy mesages.
|
||||
proxy.pulse.target.message(turn, v)
|
||||
|
||||
method sync(proxy: ProxyEntity; turn: Turn; peer: Cap) =
|
||||
## Proxy sync.
|
||||
proxy.pulse.target.sync(turn, peer)
|
||||
|
||||
proc newProxyEntity(turn: Turn; timers, ds: Cap; period: float): ProxyEntity =
|
||||
new result
|
||||
result.pulse = PulseEntity(
|
||||
target: ds.target,
|
||||
timers: timers,
|
||||
observePattern: ?:Observe,
|
||||
period: period,
|
||||
)
|
||||
result.pulse.self = newCap(turn, result.pulse)
|
||||
|
||||
proc spawnPulseActor*(turn: Turn; root: Cap): Actor =
|
||||
## Spawn an actor that retracts and re-asserts observers on
|
||||
## a timed pulse. Requires a timer service on the `root` capability.
|
||||
spawnActor(turn, "pulse") do (turn: Turn):
|
||||
let grabPeriod = observePattern(!Pulse, { @[%0]: grab() })
|
||||
during(turn, root, ?:PulseArguments) do (ds: Cap):
|
||||
during(turn, ds, grabPeriod) do (lit: Literal[float]):
|
||||
if lit.value < 0.000_1:
|
||||
stderr.writeLine("pulse period is too small: ", lit.value, "s")
|
||||
else:
|
||||
let proxyCap = newCap(turn, newProxyEntity(turn, root, ds, lit.value))
|
||||
var pulse = Pulse(periodSec: lit.value, proxy: embed proxyCap)
|
||||
discard publish(turn, ds, pulse)
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
discard spawnPulseActor(turn, ds)
|
|
@ -0,0 +1,34 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[options, parsexml, xmlparser, xmltree]
|
||||
import preserves, preserves/sugar, preserves/xmlhooks
|
||||
import syndicate
|
||||
|
||||
import ../schema/[assertions, config]
|
||||
|
||||
proc translateXml(s: string): XmlTranslation =
|
||||
result.xml = s
|
||||
try: result.pr = result.xml.parseXml({allowUnquotedAttribs}).toPreservesHook
|
||||
except XmlError: discard
|
||||
|
||||
proc translatePreserves(pr: Value): XmlTranslation {.gcsafe.} =
|
||||
result.pr = pr
|
||||
var xn = result.pr.preservesTo(XmlNode)
|
||||
if xn.isSome: result.xml = $get(xn)
|
||||
|
||||
proc spawnXmlTranslator*(turn: Turn; root: Cap): Actor {.discardable.} =
|
||||
spawnActor(turn, "xml-translator") do (turn: Turn):
|
||||
during(turn, root, ?:XmlTranslatorArguments) do (ds: Cap):
|
||||
let xmlPat = observePattern(!XmlTranslation, {@[%0]:grab()})
|
||||
during(turn, ds, xmlPat) do (xs: Literal[string]):
|
||||
publish(turn, ds, translateXml(xs.value))
|
||||
let prPat = observePattern(!XmlTranslation, {@[%1]:grab()})
|
||||
during(turn, ds, prPat) do (pr: Literal[Value]):
|
||||
publish(turn, ds, translatePreserves(pr.value))
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
spawnXmlTranslator(turn, ds)
|
|
@ -0,0 +1,64 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[os, tables]
|
||||
import preserves, syndicate, syndicate/[durings, relays]
|
||||
|
||||
proc parsePattern(pr: Value): Pattern =
|
||||
let
|
||||
dropSigil = initRecord("lit", "_".toSymbol)
|
||||
grabSigil = initRecord("lit", "?".toSymbol)
|
||||
var pr = drop(pr).toPreserves
|
||||
apply(pr) do (pr: var Value):
|
||||
if pr == dropSigil:
|
||||
pr = initRecord("_")
|
||||
elif pr == grabSigil:
|
||||
pr = initRecord("bind", initRecord("_"))
|
||||
doAssert result.fromPreserves(pr)
|
||||
|
||||
proc inputPatterns: seq[Pattern] =
|
||||
var args = commandLineParams()
|
||||
result.setLen(args.len)
|
||||
for i, input in args:
|
||||
try: result[i] = input.parsePreserves.parsePattern
|
||||
except ValueError:
|
||||
quit "failed to parse Preserves argument"
|
||||
|
||||
type DumpEntity {.final.} = ref object of Entity
|
||||
assertions: Table[Handle, seq[Value]]
|
||||
|
||||
proc toLine(values: seq[Value]; prefix: char): string =
|
||||
result = newStringOfCap(1024)
|
||||
let sep = getEnv("FS", " ")
|
||||
result.add(prefix)
|
||||
for v in values:
|
||||
add(result, sep)
|
||||
add(result, $v)
|
||||
add(result, '\n')
|
||||
|
||||
method publish(dump: DumpEntity; turn: Turn; ass: AssertionRef; h: Handle) =
|
||||
var values = ass.value.sequence
|
||||
stdout.write(values.toLine('+'))
|
||||
stdout.flushFile()
|
||||
dump.assertions[h] = values
|
||||
|
||||
method retract(dump: DumpEntity; turn: Turn; h: Handle) =
|
||||
var values: seq[Value]
|
||||
if dump.assertions.pop(h, values):
|
||||
stdout.write(values.toLine('-'))
|
||||
stdout.flushFile()
|
||||
|
||||
method message*(dump: DumpEntity; turn: Turn; ass: AssertionRef) =
|
||||
stdout.write(ass.value.sequence.toLine('!'))
|
||||
stdout.flushFile()
|
||||
|
||||
proc main =
|
||||
let
|
||||
patterns = inputPatterns()
|
||||
entity = DumpEntity()
|
||||
runActor("syndex_card") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; peer: Cap):
|
||||
for pat in patterns:
|
||||
discard observe(turn, peer, pat, entity)
|
||||
|
||||
main()
|
|
@ -1,54 +0,0 @@
|
|||
# SPDX-FileCopyrightText: ☭ 2022 Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[asyncdispatch, re]
|
||||
import preserves, syndicate
|
||||
import ./protocol, ./exec
|
||||
|
||||
bootDataspace("main") do (root: Ref; turn: var Turn):
|
||||
var actions: seq[tuple[regex: Regex; cmd: string; args: seq[Assertion]]]
|
||||
|
||||
connectStdio(root, turn)
|
||||
|
||||
onPublish(turn, root, ?ActionHandler) do (pat: string; cmd: seq[Assertion]):
|
||||
if cmd.len < 2:
|
||||
stderr.writeLine "ignoring ", $cmd, " for ", pat
|
||||
else:
|
||||
if cmd[0].isString:
|
||||
var act = (re(pat, {reIgnoreCase, reStudy}), cmd[0].string, cmd[1..cmd.high],)
|
||||
actions.add act
|
||||
else:
|
||||
stderr.writeLine "not a valid program specification: ", cmd[0]
|
||||
|
||||
during(turn, root, ?ListenOn[Ref]) do (a: Assertion):
|
||||
let ds = unembed a
|
||||
onMessage(turn, ds, ?XdgOpen) do (uris: seq[string]):
|
||||
for uri in uris:
|
||||
var matched: bool
|
||||
for act in actions:
|
||||
if match(uri, act.regex):
|
||||
matched = true
|
||||
var args = newSeq[string](act.args.len)
|
||||
for i, arg in act.args:
|
||||
if arg.isString:
|
||||
args[i] = replacef(uri, act.regex, arg.string)
|
||||
elif arg.isInteger:
|
||||
if arg.int == 0:
|
||||
args[i] = uri
|
||||
else:
|
||||
args[i] = replacef(uri, act.regex, "$" & $arg.int)
|
||||
message(turn, root, Exec(
|
||||
argv: CommandLine(
|
||||
orKind: CommandLineKind.full,
|
||||
full: FullCommandLine(
|
||||
program: act.cmd,
|
||||
args: args)),
|
||||
restartPolicy: RestartPolicy.never))
|
||||
if not matched:
|
||||
stderr.writeLine "no actions matched for ", uri
|
||||
do:
|
||||
# The Syndicate server retracts all assertions when
|
||||
# the config is rewritten.
|
||||
actions.setLen 0
|
||||
|
||||
runForever()
|
|
@ -1,28 +0,0 @@
|
|||
# SPDX-FileCopyrightText: ☭ 2022 Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[asyncdispatch, os]
|
||||
from std/sequtils import map
|
||||
import syndicate, syndicate/capabilities
|
||||
import ./protocol
|
||||
|
||||
proc unixSocketPath: string =
|
||||
result = getEnv("SYNDICATE_SOCK")
|
||||
if result == "":
|
||||
result = getEnv("XDG_RUNTIME_DIR", "/run/user/1000") / "dataspace"
|
||||
|
||||
proc mintCap: SturdyRef =
|
||||
var key: array[16, byte]
|
||||
mint(key, "syndicate")
|
||||
|
||||
bootDataspace("main") do (root: Ref; turn: var Turn):
|
||||
connectUnix(turn, unixSocketPath(), mintCap()) do (turn: var Turn; ds: Ref):
|
||||
var uris = commandLineParams().map do (param: string) -> string:
|
||||
if fileExists param:
|
||||
"file://" & absolutePath(param)
|
||||
else:
|
||||
param
|
||||
message(turn, ds, XdgOpen(uris: uris))
|
||||
|
||||
for i in 0..7: poll(20)
|
||||
# A hack to exit
|
|
@ -0,0 +1,211 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[os, strutils]
|
||||
import preserves, preserves/sugar, syndicate
|
||||
import ./schema/[assertions, config]
|
||||
|
||||
{.passC: staticExec("pkg-config --cflags libxslt").}
|
||||
{.passL: staticExec("pkg-config --libs libxslt").}
|
||||
|
||||
{.pragma: libxslt, header: "libxslt/xslt.h", importc.}
|
||||
|
||||
type
|
||||
xmlElementType {.libxslt.} = enum
|
||||
XML_ELEMENT_NODE = 1,
|
||||
XML_ATTRIBUTE_NODE = 2,
|
||||
XML_TEXT_NODE = 3,
|
||||
XML_CDATA_SECTION_NODE = 4,
|
||||
XML_ENTITY_REF_NODE = 5,
|
||||
XML_ENTITY_NODE = 6,
|
||||
XML_PI_NODE = 7,
|
||||
XML_COMMENT_NODE = 8,
|
||||
XML_DOCUMENT_NODE = 9,
|
||||
XML_DOCUMENT_TYPE_NODE = 10,
|
||||
XML_DOCUMENT_FRAG_NODE = 11,
|
||||
XML_NOTATION_NODE = 12,
|
||||
XML_HTML_DOCUMENT_NODE = 13,
|
||||
XML_DTD_NODE = 14,
|
||||
XML_ELEMENT_DECL = 15,
|
||||
XML_ATTRIBUTE_DECL = 16,
|
||||
XML_ENTITY_DECL = 17,
|
||||
XML_NAMESPACE_DECL = 18,
|
||||
XML_XINCLUDE_START = 19,
|
||||
XML_XINCLUDE_END = 20
|
||||
|
||||
xmlNsPtr = ptr xmlNs
|
||||
xmlNs {.libxslt.} = object
|
||||
next: xmlNsPtr
|
||||
href, prefix: cstring
|
||||
|
||||
xmlAttrPtr = ptr xmlAttr
|
||||
xmlAttr {.libxslt.} = object
|
||||
name: cstring
|
||||
next: xmlAttrPtr
|
||||
children: xmlNodePtr
|
||||
|
||||
xmlElementContentPtr = ptr xmlElementContent
|
||||
xmlElementContent {.libxslt.} = object
|
||||
encoding: cstring
|
||||
|
||||
xmlNodePtr = ptr xmlNode
|
||||
xmlNode {.libxslt.} = object
|
||||
`type`: xmlElementType
|
||||
name: cstring
|
||||
children, next: xmlNodePtr
|
||||
content: cstring
|
||||
properties: xmlAttrPtr
|
||||
nsDef: xmlNsPtr
|
||||
|
||||
xmlDocPtr {.libxslt.} = distinct pointer
|
||||
xsltStylesheetPtr {.libxslt.} = distinct pointer
|
||||
|
||||
proc isNil(x: xmlDocPtr): bool {.borrow.}
|
||||
proc isNil(x: xsltStylesheetPtr): bool {.borrow.}
|
||||
|
||||
proc xmlReadMemory(buf: pointer; len: cint; url, enc: cstring; opts: cint): xmlDocPtr {.libxslt.}
|
||||
|
||||
proc xmlReadMemory(buf: string; uri = "noname.xml"): xmlDocPtr =
|
||||
xmlReadMemory(buf[0].addr, buf.len.cint, uri, "UTF-8", 0)
|
||||
|
||||
proc xmlParseFile(filename: cstring): xmlDocPtr {.libxslt.}
|
||||
|
||||
proc xmlFreeDoc(p: xmlDocPtr) {.libxslt.}
|
||||
|
||||
proc xmlDocGetRootElement(doc: xmlDocPtr): xmlNodePtr {.libxslt.}
|
||||
|
||||
proc loadXmlDoc(text: string): xmlDocPtr =
|
||||
if text.startsWith("/") and fileExists(text):
|
||||
xmlParseFile(text)
|
||||
else:
|
||||
xmlReadMemory(text, "noname.xml")
|
||||
|
||||
proc xsltParseStylesheetFile(filename: cstring): xsltStylesheetPtr {.libxslt.}
|
||||
|
||||
proc xsltParseStylesheetDoc(doc: xmlDocPtr): xsltStylesheetPtr {.libxslt.}
|
||||
|
||||
proc xsltParseStylesheetDoc(text: string; uri = "noname.xml"): xsltStylesheetPtr =
|
||||
var doc = xmlReadMemory(text, uri)
|
||||
result = xsltParseStylesheetDoc(doc)
|
||||
# implicit free of doc
|
||||
|
||||
proc loadStylesheet(text: string): xsltStylesheetPtr =
|
||||
if text.startsWith("/") and fileExists(text):
|
||||
xsltParseStylesheetFile(text)
|
||||
else:
|
||||
xsltParseStylesheetDoc(text, "noname.xsl")
|
||||
|
||||
proc xsltApplyStylesheet(
|
||||
style: xsltStylesheetPtr, doc: xmlDocPtr, params: cstringArray): xmlDocPtr {.libxslt.}
|
||||
|
||||
proc xsltFreeStylesheet(style: xsltStylesheetPtr) {.libxslt.}
|
||||
|
||||
proc xsltSaveResultToString(txt: ptr pointer; len: ptr cint; res: xmlDocPtr; style: xsltStylesheetPtr): cint {.libxslt.}
|
||||
|
||||
proc c_free*(p: pointer) {.importc: "free", header: "<stdlib.h>".}
|
||||
|
||||
proc xsltSaveResultToString(res: xmlDocPtr; style: xsltStylesheetPtr): string =
|
||||
var
|
||||
txt: pointer
|
||||
len: cint
|
||||
if xsltSaveResultToString(addr txt, addr len, res, style) < 0:
|
||||
raise newException(CatchableError, "xsltSaveResultToString failed")
|
||||
if len > 0:
|
||||
result = newString(int len)
|
||||
copyMem(result[0].addr, txt, len)
|
||||
c_free(txt)
|
||||
|
||||
proc initLibXml =
|
||||
discard
|
||||
|
||||
proc XML_GET_CONTENT(xn: xmlNodePtr): xmlElementContentPtr {.libxslt.}
|
||||
|
||||
proc textContent(xn: xmlNodePtr): string =
|
||||
if xn.content != nil: result = $xn.content
|
||||
|
||||
proc content(attr: xmlAttrPtr): string =
|
||||
var child = attr.children
|
||||
while not child.isNil:
|
||||
result.add child.content
|
||||
child = child.next
|
||||
|
||||
proc preserveSiblings(result: var seq[Value]; first: xmlNodePtr) =
|
||||
var xn = first
|
||||
while not xn.isNil:
|
||||
case xn.type
|
||||
of XML_ELEMENT_NODE:
|
||||
var child = Value(kind: pkRecord)
|
||||
if not xn.nsDef.isNil:
|
||||
child.record.add initDictionary()
|
||||
var ns = xn.nsDef
|
||||
while not ns.isNil:
|
||||
if not ns.href.isNil:
|
||||
var key = Value(kind: pkString)
|
||||
if ns.prefix.isNil:
|
||||
key.string = "xmlns"
|
||||
else:
|
||||
key.string = "xmlns:" & $ns.prefix
|
||||
child.record[0][key] = toPreserves($ns.href)
|
||||
ns = ns.next
|
||||
|
||||
if not xn.properties.isNil:
|
||||
if child.record.len < 1:
|
||||
child.record.add initDictionary()
|
||||
var attr = xn.properties
|
||||
while not attr.isNil:
|
||||
var
|
||||
key = toPreserves($attr.name)
|
||||
val = toPreserves(attr.content)
|
||||
child.record[0][key] = val
|
||||
attr = attr.next
|
||||
if not xn.children.isNil:
|
||||
preserveSiblings(child.record, xn.children)
|
||||
child.record.add tosymbol($xn.name)
|
||||
result.add child
|
||||
of XML_TEXT_NODE:
|
||||
result.add textContent(xn).toPreserves
|
||||
else:
|
||||
stderr.writeLine "not an XML_ELEMENT_NODE - ", $xn.type
|
||||
xn = xn.next
|
||||
|
||||
proc toPreservesHook*(xn: xmlNodePtr): Value =
|
||||
var items = newSeqofCap[Value](1)
|
||||
preserveSiblings(items, xn)
|
||||
items[0]
|
||||
|
||||
proc spawnXsltActor*(turn: Turn; root: Cap): Actor {.discardable.} =
|
||||
spawnActor(turn, "xslt") do (turn: Turn):
|
||||
initLibXml()
|
||||
during(turn, root, ?:XsltArguments) do (ds: Cap):
|
||||
let sheetsPat = observePattern(!XsltTransform, {@[%0]: grab(), @[%1]: grab()})
|
||||
during(turn, ds, sheetsPat) do (stylesheet: Literal[string], input: Literal[string]):
|
||||
let cur = loadStylesheet(stylesheet.value)
|
||||
if cur.isNil:
|
||||
stderr.writeLine "failed to parse stylesheet"
|
||||
else:
|
||||
let doc = loadXmlDoc(input.value)
|
||||
if doc.isNil:
|
||||
stderr.writeLine "failed to parse input document"
|
||||
else:
|
||||
let
|
||||
params = allocCStringArray([])
|
||||
res = xsltApplyStylesheet(cur, doc, params)
|
||||
if res.isNil:
|
||||
stderr.writeLine "failed to apply stylesheet transformation"
|
||||
else:
|
||||
let output = xsltSaveResultToString(res, cur)
|
||||
deallocCStringArray(params)
|
||||
publish(turn, ds, XsltTransform(
|
||||
stylesheet: stylesheet.value,
|
||||
input: input.value,
|
||||
output: xmlDocGetRootElement(res).toPreservesHook,
|
||||
))
|
||||
xmlFreeDoc(res)
|
||||
xmlFreeDoc(doc)
|
||||
xsltFreeStylesheet(cur)
|
||||
|
||||
when isMainModule:
|
||||
import syndicate/relays
|
||||
runActor("main") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
spawnXsltActor(turn, ds)
|
|
@ -0,0 +1,61 @@
|
|||
# Emulate Nimble from CycloneDX data at sbom.json.
|
||||
|
||||
import std/json
|
||||
|
||||
proc lookupComponent(sbom: JsonNode; bomRef: string): JsonNode =
|
||||
for c in sbom{"components"}.getElems.items:
|
||||
if c{"bom-ref"}.getStr == bomRef:
|
||||
return c
|
||||
result = newJNull()
|
||||
|
||||
let
|
||||
sbom = (getPkgDir() & "/sbom.json").readFile.parseJson
|
||||
comp = sbom{"metadata", "component"}
|
||||
bomRef = comp{"bom-ref"}.getStr
|
||||
|
||||
version = comp{"version"}.getStr
|
||||
author = comp{"authors"}[0]{"name"}.getStr
|
||||
description = comp{"description"}.getStr
|
||||
license = comp{"licenses"}[0]{"license", "id"}.getStr
|
||||
|
||||
for prop in comp{"properties"}.getElems.items:
|
||||
let (key, val) = (prop{"name"}.getStr, prop{"value"}.getStr)
|
||||
case key
|
||||
of "nim:skipDirs:":
|
||||
add(skipDirs, val)
|
||||
of "nim:skipFiles:":
|
||||
add(skipFiles, val)
|
||||
of "nim:skipExt":
|
||||
add(skipExt, val)
|
||||
of "nim:installDirs":
|
||||
add(installDirs, val)
|
||||
of "nim:installFiles":
|
||||
add(installFiles, val)
|
||||
of "nim:installExt":
|
||||
add(installExt, val)
|
||||
of "nim:binDir":
|
||||
add(binDir, val)
|
||||
of "nim:srcDir":
|
||||
add(srcDir, val)
|
||||
of "nim:backend":
|
||||
add(backend, val)
|
||||
else:
|
||||
if key.startsWith "nim:bin:":
|
||||
namedBin[key[8..key.high]] = val
|
||||
|
||||
for depend in sbom{"dependencies"}.items:
|
||||
if depend{"ref"}.getStr == bomRef:
|
||||
for depRef in depend{"dependsOn"}.items:
|
||||
let dep = sbom.lookupComponent(depRef.getStr)
|
||||
var spec = dep{"name"}.getStr
|
||||
for extRef in dep{"externalReferences"}.elems:
|
||||
if extRef{"type"}.getStr == "vcs":
|
||||
spec = extRef{"url"}.getStr
|
||||
break
|
||||
let ver = dep{"version"}.getStr
|
||||
if ver != "":
|
||||
if ver.allCharsInSet {'0'..'9', '.'}: spec.add " == "
|
||||
else: spec.add '#'
|
||||
spec.add ver
|
||||
requires spec
|
||||
break
|
|
@ -1,50 +0,0 @@
|
|||
; Expose a dataspace over a unix socket
|
||||
let ?root_ds = dataspace
|
||||
<require-service <relay-listener <unix "/run/user/1000/dataspace"> $gatekeeper>>
|
||||
<bind "syndicate" #x"" $root_ds>
|
||||
|
||||
<require-service <daemon uri_runner>>
|
||||
|
||||
<daemon uri_runner {
|
||||
argv: "uri_runner"
|
||||
protocol: text/syndicate
|
||||
}>
|
||||
|
||||
? <service-object <daemon uri_runner> ?cap> [
|
||||
|
||||
; send configuration to uri_runner
|
||||
$cap [
|
||||
<listen-on $root_ds>
|
||||
|
||||
; Here the "0" argument is replaced with the whole URI asserted by xdg-open.
|
||||
<action-handler "http://.*|https://.*|.*html", ["/run/current-system/sw/bin/librewolf" 0]>
|
||||
|
||||
; An argument can be a reference to a capture.
|
||||
<action-handler "(tox:.*)|uri:(tox:.*)", ["/run/current-system/sw/bin/qtox" 1]>
|
||||
|
||||
; An argument can contain a reference to a capture using the $i notation.
|
||||
<action-handler "https://twitter.com/(.*)" ["/run/current-system/sw/bin/librewolf" "https://nitter.net/$1"]>
|
||||
|
||||
<action-handler "gemini://.*|file:///.*.gmi" ["/run/current-system/sw/bin/kristall" 0]>
|
||||
<action-handler ".*\\.avi|.*\\.mkv|.*\\.mp4|.*\\.ogg|.*\\.opus", ["/run/current-system/sw/bin/mpv" 0]>
|
||||
|
||||
; filesystem paths are always prefixed with file://
|
||||
<action-handler "file://(.*.pdf)" ["/run/current-system/sw/bin/mupdf" 1]>
|
||||
]
|
||||
|
||||
; uri_runner sends messages to the server to start handler applications
|
||||
$cap ?? <exec ?argv ?restartPolicy> [
|
||||
let ?id = timestamp
|
||||
let ?facet = facet
|
||||
let ?d = <uri_runner-exec $id $argv>
|
||||
$config <run-service <daemon $d>>
|
||||
$config <daemon $d {
|
||||
argv: $argv,
|
||||
readyOnStart: #f,
|
||||
restart: $restartPolicy,
|
||||
}>
|
||||
$config ? <service-state <daemon $d> complete> [$facet ! stop]
|
||||
$config ? <service-state <daemon $d> failed> [$facet ! stop]
|
||||
]
|
||||
|
||||
]
|
|
@ -1,13 +0,0 @@
|
|||
# Package
|
||||
|
||||
version = "0.4.0"
|
||||
author = "Emery"
|
||||
description = "A better xdg-open"
|
||||
license = "Unlicense"
|
||||
srcDir = "src"
|
||||
bin = @[ "uri_runner", "xdg_open"]
|
||||
|
||||
|
||||
# Dependencies
|
||||
|
||||
requires "nim >= 1.6.4", "syndicate >= 1.3.0"
|
Loading…
Reference in New Issue