Compare commits

..

No commits in common. "main" and "syndicate-tools-v0.5.0" have entirely different histories.

101 changed files with 2319 additions and 5257 deletions

View File

@ -1,24 +0,0 @@
on:
push:
branches:
- main
jobs:
build:
runs-on: docker
container:
image: git.syndicate-lang.org/syndicate-lang/rust-builder:latest
steps:
- uses: actions/checkout@v3
- run: CROSS_CONTAINER_IN_CONTAINER=true make ci-release
- uses: actions/upload-artifact@v3
with:
name: syndicate-server-x86_64
path: target/dist/x86_64
- uses: actions/upload-artifact@v3
with:
name: syndicate-server-aarch64
path: target/dist/aarch64
- uses: actions/upload-artifact@v3
with:
name: syndicate-server-armv7
path: target/dist/armv7

View File

@ -1,7 +0,0 @@
FROM rust:latest
RUN cargo install cross
# This is necessary for cross to be able to access /var/run/docker.sock
COPY --from=docker:dind /usr/local/bin/docker /usr/local/bin/
RUN curl -fsSL https://deb.nodesource.com/setup_20.x -o nodesource_setup.sh && bash nodesource_setup.sh && rm -f nodesource_setup.sh && apt -y install nodejs && apt clean

View File

@ -1,11 +0,0 @@
#!/bin/sh
#
# You need to have already logged in:
#
# docker login git.syndicate-lang.org
#
# Use a token with read-only access to user scope, this seems to be sufficient (!)
set -e
docker build -t git.syndicate-lang.org/syndicate-lang/rust-builder .
docker push git.syndicate-lang.org/syndicate-lang/rust-builder

1452
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -2,7 +2,6 @@ cargo-features = ["strip"]
[workspace] [workspace]
members = [ members = [
"syndicate-schema-plugin",
"syndicate", "syndicate",
"syndicate-macros", "syndicate-macros",
"syndicate-server", "syndicate-server",
@ -26,9 +25,3 @@ strip = true
[profile.bench] [profile.bench]
debug = true debug = true
# [patch.crates-io]
# # Unfortunately, until [1] is fixed (perhaps via [2]), we have to use a patched proc-macro2.
# # [1]: https://github.com/dtolnay/proc-macro2/issues/402
# # [2]: https://github.com/dtolnay/proc-macro2/pull/407
# proc-macro2 = { git = "https://github.com/tonyg/proc-macro2", branch = "repair_span_start_end" }

View File

@ -1,5 +1,3 @@
__ignored__ := $(shell ./setup.sh)
# Use cargo release to manage publication and versions etc. # Use cargo release to manage publication and versions etc.
# #
# cargo install cargo-release # cargo install cargo-release
@ -17,24 +15,22 @@ ws-bump:
cargo workspaces version \ cargo workspaces version \
--no-global-tag \ --no-global-tag \
--individual-tag-prefix '%n-v' \ --individual-tag-prefix '%n-v' \
--allow-branch 'main' \ --allow-branch 'main'
$(BUMP_ARGS)
ws-publish: ws-publish:
cargo workspaces publish \ cargo workspaces publish \
--from-git --from-git
PROTOCOLS_BRANCH=main
pull-protocols: pull-protocols:
git subtree pull -P syndicate/protocols \ git subtree pull -P syndicate/protocols \
-m 'Merge latest changes from the syndicate-protocols repository' \ -m 'Merge latest changes from the syndicate-protocols repository' \
git@git.syndicate-lang.org:syndicate-lang/syndicate-protocols \ git@git.syndicate-lang.org:syndicate-lang/syndicate-protocols \
$(PROTOCOLS_BRANCH) main
static: static-x86_64 static: static-x86_64
static-%: static-%:
CARGO_TARGET_DIR=target/target.$* cross build --target $*-unknown-linux-musl --features vendored-openssl,jemalloc cross build --target $*-unknown-linux-musl --features vendored-openssl
########################################################################### ###########################################################################
@ -56,35 +52,28 @@ static-%:
x86_64-binary: x86_64-binary-release x86_64-binary: x86_64-binary-release
x86_64-binary-release: x86_64-binary-release:
CARGO_TARGET_DIR=target/target.x86_64 cross build --target x86_64-unknown-linux-musl --release --all-targets --features vendored-openssl,jemalloc cross build --target x86_64-unknown-linux-musl --release --all-targets --features vendored-openssl
x86_64-binary-debug: x86_64-binary-debug:
CARGO_TARGET_DIR=target/target.x86_64 cross build --target x86_64-unknown-linux-musl --all-targets --features vendored-openssl cross build --target x86_64-unknown-linux-musl --all-targets --features vendored-openssl
armv7-binary: armv7-binary-release armv7-binary: armv7-binary-release
armv7-binary-release: armv7-binary-release:
CARGO_TARGET_DIR=target/target.armv7 cross build --target=armv7-unknown-linux-musleabihf --release --all-targets --features vendored-openssl cross build --target=armv7-unknown-linux-musleabihf --release --all-targets --features vendored-openssl
armv7-binary-debug: armv7-binary-debug:
CARGO_TARGET_DIR=target/target.armv7 cross build --target=armv7-unknown-linux-musleabihf --all-targets --features vendored-openssl cross build --target=armv7-unknown-linux-musleabihf --all-targets --features vendored-openssl
# As of 2023-05-12 (and probably earlier!) this is no longer required with current Rust nightlies # Hack to workaround https://github.com/rust-embedded/cross/issues/598
# # Hack to workaround https://github.com/rust-embedded/cross/issues/598 HACK_WORKAROUND_ISSUE_598=CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS="-C link-arg=/usr/local/aarch64-linux-musl/lib/libc.a"
# HACK_WORKAROUND_ISSUE_598=CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS="-C link-arg=/usr/local/aarch64-linux-musl/lib/libc.a"
aarch64-binary: aarch64-binary-release aarch64-binary: aarch64-binary-release
aarch64-binary-release: aarch64-binary-release:
CARGO_TARGET_DIR=target/target.aarch64 cross build --target=aarch64-unknown-linux-musl --release --all-targets --features vendored-openssl,jemalloc $(HACK_WORKAROUND_ISSUE_598) \
cross build --target=aarch64-unknown-linux-musl --release --all-targets --features vendored-openssl
aarch64-binary-debug: aarch64-binary-debug:
CARGO_TARGET_DIR=target/target.aarch64 cross build --target=aarch64-unknown-linux-musl --all-targets --features vendored-openssl $(HACK_WORKAROUND_ISSUE_598) \
cross build --target=aarch64-unknown-linux-musl --all-targets --features vendored-openssl
ci-release: x86_64-binary-release aarch64-binary-release armv7-binary-release
rm -rf target/dist
for arch in x86_64 aarch64 armv7; do \
mkdir -p target/dist/$$arch; \
cp -a target/target.$$arch/$$arch-unknown-linux-musl*/release/syndicate-macaroon target/dist/$$arch; \
cp -a target/target.$$arch/$$arch-unknown-linux-musl*/release/syndicate-server target/dist/$$arch; \
done

View File

@ -23,30 +23,16 @@ A Rust implementation of:
## Quickstart ## Quickstart
From docker or podman:
docker run -it --rm leastfixedpoint/syndicate-server /syndicate-server -p 8001
Build and run from source:
git clone https://git.syndicate-lang.org/syndicate-lang/syndicate-rs git clone https://git.syndicate-lang.org/syndicate-lang/syndicate-rs
cd syndicate-rs cd syndicate-rs
cargo build --release cargo build --release
./target/release/syndicate-server -p 8001 ./target/release/syndicate-server -p 8001
If you have [`mold`](https://github.com/rui314/mold) available (`apt install mold`), you may be
able to get faster linking by creating `.cargo/config.toml` as follows:
[build]
rustflags = ["-C", "link-arg=-fuse-ld=mold"]
Enabling the `jemalloc` feature can get a *substantial* (~20%-50%) improvement in throughput.
## Running the examples ## Running the examples
In one window, start the server with a basic configuration: In one window, start the server:
./target/release/syndicate-server -c dev-scripts/benchmark-config.pr ./target/release/syndicate-server -p 8001
Then, choose one of the examples below. Then, choose one of the examples below.
@ -84,7 +70,7 @@ about who kicks off the pingpong session.
You may find better performance by restricting the server to fewer You may find better performance by restricting the server to fewer
cores than you have available. For example, for me, running cores than you have available. For example, for me, running
taskset -c 0,1 ./target/release/syndicate-server -c dev-scripts/benchmark-config.pr taskset -c 0,1 ./target/release/syndicate-server -p 8001
roughly *doubles* throughput for a single producer/consumer pair, roughly *quadruples* throughput for a single producer/consumer pair,
on my 48-core AMD CPU. on my 48-core AMD CPU.

View File

@ -1,3 +1,3 @@
let ?root_ds = dataspace let ?root_ds = dataspace
<require-service <relay-listener <tcp "0.0.0.0" 9001> $gatekeeper>> <require-service <relay-listener <tcp "0.0.0.0" 9001> $gatekeeper>>
<bind <ref { oid: "syndicate" key: #x"" }> $root_ds #f> <bind "syndicate" #x"" $root_ds>

View File

@ -1,7 +1,2 @@
#!/bin/sh #!/bin/sh
TASKSET='taskset -c 0,1' make -C ../syndicate-server binary && exec taskset -c 0,1 ../target/release/syndicate-server -c benchmark-config.pr "$@"
if [ $(uname -s) = 'Darwin' ]
then
TASKSET=
fi
make -C ../syndicate-server binary && exec $TASKSET ../target/release/syndicate-server -c benchmark-config.pr "$@"

2
docker/.gitignore vendored
View File

@ -1 +1 @@
syndicate-server.* syndicate-server

View File

@ -1,6 +1,4 @@
FROM busybox FROM busybox
RUN mkdir /data RUN mkdir /data
ARG TARGETARCH COPY ./syndicate-server /
COPY ./syndicate-server.$TARGETARCH /syndicate-server CMD ["/syndicate-server", "-c", "/data"]
EXPOSE 1
CMD ["/syndicate-server", "-c", "/data", "-p", "1"]

View File

@ -1,37 +1,18 @@
U=leastfixedpoint
I=syndicate-server
ARCHITECTURES:=amd64 arm arm64
SERVERS:=$(patsubst %,syndicate-server.%,$(ARCHITECTURES))
VERSION=$(shell ./syndicate-server.$(shell ./docker-architecture $$(uname -m)) --version | cut -d' ' -f2)
all: all:
.PHONY: all clean image push push-only .PHONY: all clean image push syndicate-server
clean: clean:
rm -f syndicate-server.* rm -f syndicate-server
-podman images -q $(U)/$(I) | sort -u | xargs podman rmi -f docker rmi leastfixedpoint/syndicate-server
image: $(SERVERS) image: syndicate-server
for A in $(ARCHITECTURES); do set -x; \ docker build -t leastfixedpoint/$$(./syndicate-server --version | tr ' ' ':') -t leastfixedpoint/syndicate-server:latest .
podman build --platform=linux/$$A \
-t $(U)/$(I):$(VERSION)-$$A \
-t $(U)/$(I):latest-$$A \
.; \
done
rm -f tmp.image
push: image push-only push: image
docker push leastfixedpoint/$$(./syndicate-server --version | tr ' ' ':')
docker push leastfixedpoint/syndicate-server:latest
push-only: syndicate-server:
$(patsubst %,podman push $(U)/$(I):$(VERSION)-%;,$(ARCHITECTURES)) make -C .. x86_64-binary-release
$(patsubst %,podman push $(U)/$(I):latest-%;,$(ARCHITECTURES)) cp -a ../target/x86_64-unknown-linux-musl/release/syndicate-server $@
podman rmi -f $(U)/$(I):$(VERSION) $(U)/$(I):latest
podman manifest create $(U)/$(I):$(VERSION) $(patsubst %,$(U)/$(I):$(VERSION)-%,$(ARCHITECTURES))
podman manifest create $(U)/$(I):latest $(patsubst %,$(U)/$(I):latest-%,$(ARCHITECTURES))
podman manifest push $(U)/$(I):$(VERSION)
podman manifest push $(U)/$(I):latest
syndicate-server.%:
make -C .. $$(./alpine-architecture $*)-binary-release
cp -a ../target/target.$$(./alpine-architecture $*)/$$(./alpine-architecture $*)-unknown-linux-musl*/release/syndicate-server $@

View File

@ -1,9 +0,0 @@
# Docker images for syndicate-server
Build using podman:
apt install podman
and at least until the dependencies are fixed (?),
apt install uidmap slirp4netns

View File

@ -1,6 +0,0 @@
#!/bin/sh
case $1 in
amd64) echo x86_64;;
arm) echo armv7;;
arm64) echo aarch64;;
esac

View File

@ -1,6 +0,0 @@
#!/bin/sh
case $1 in
x86_64) echo amd64;;
armv7) echo arm;;
aarch64) echo arm64;;
esac

View File

@ -1,9 +0,0 @@
version: "3"
services:
syndicate:
image: leastfixedpoint/syndicate-server
ports:
- "1:1"
volumes:
- "/etc/syndicate:/data"

View File

@ -9,4 +9,3 @@ buildtag() {
git tag "$(buildtag syndicate/Cargo.toml)" git tag "$(buildtag syndicate/Cargo.toml)"
git tag "$(buildtag syndicate-macros/Cargo.toml)" git tag "$(buildtag syndicate-macros/Cargo.toml)"
git tag "$(buildtag syndicate-server/Cargo.toml)" git tag "$(buildtag syndicate-server/Cargo.toml)"
git tag "$(buildtag syndicate-tools/Cargo.toml)"

View File

@ -1,152 +0,0 @@
# We will create a TCP listener on port 9222, which speaks unencrypted
# protocol and allows interaction with the default/system gatekeeper, which
# has a single noise binding for introducing encrypted interaction with a
# *second* gatekeeper, which finally allows resolution of references to
# other objects.
# First, build a space where we place bindings for the inner gatekeeper to
# expose.
let ?inner-bindings = dataspace
# Next, start the inner gatekeeper.
<require-service <gatekeeper $inner-bindings>>
? <service-object <gatekeeper $inner-bindings> ?inner-gatekeeper> [
# Expose it via a noise binding at the outer/system gatekeeper.
<bind <noise { key: #[z1w/OLy0wi3Veyk8/D+2182YxcrKpgc8y0ZJEBDrmWs],
secretKey: #[qLkyuJw/K4yobr4XVKExbinDwEx9QTt9PfDWyx14/kg],
service: world }>
$inner-gatekeeper #f>
]
# Now, expose the outer gatekeeper to the world, via TCP. The system
# gatekeeper is a primordial syndicate-server object bound to $gatekeeper.
<require-service <relay-listener <tcp "0.0.0.0" 9222> $gatekeeper>>
# Finally, let's expose some behaviour accessible via the inner gatekeeper.
#
# We will create a service dataspace called $world.
let ?world = dataspace
# Running `syndicate-macaroon mint --oid a-service --phrase hello` yields:
#
# <ref {oid: a-service, sig: #[JTTGQeYCgohMXW/2S2XH8g]}>
#
# That's a root capability for the service. We use the corresponding
# sturdy.SturdyDescriptionDetail to bind it to $world.
#
$inner-bindings += <bind <ref {oid: a-service, key: #"hello"}>
$world #f>
# Now, we can hand out paths to our services involving an initial noise
# step and a subsequent sturdyref/macaroon step.
#
# For example, running `syndicate-macaroon` like this:
#
# syndicate-macaroon mint --oid a-service --phrase hello \
# --caveat '<rewrite <bind <_>> <rec labelled [<lit "alice"> <ref 0>]>>'
#
# generates
#
# <ref {caveats: [<rewrite <bind <_>> <rec labelled [<lit "alice">, <ref 0>]>>],
# oid: a-service,
# sig: #[CXn7+rAoO3Xr6Y6Laap3OA]}>
#
# which is an attenuation of the root capability we bound that wraps all
# assertions and messages in a `<labelled "alice" _>` wrapper.
#
# All together, the `gatekeeper.Route` that Alice would use would be
# something like:
#
# <route [<ws "wss://generic-dataspace.demo.leastfixedpoint.com/">]
# <noise { key: #[z1w/OLy0wi3Veyk8/D+2182YxcrKpgc8y0ZJEBDrmWs],
# service: world }>
# <ref { caveats: [<rewrite <bind <_>> <rec labelled [<lit "alice">, <ref 0>]>>],
# oid: a-service,
# sig: #[CXn7+rAoO3Xr6Y6Laap3OA] }>>
#
# Here's one for "bob":
#
# syndicate-macaroon mint --oid a-service --phrase hello \
# --caveat '<rewrite <bind <_>> <rec labelled [<lit "bob"> <ref 0>]>>'
#
# <ref {caveats: [<rewrite <bind <_>> <rec labelled [<lit "bob">, <ref 0>]>>],
# oid: a-service,
# sig: #[/75BbF77LOiqNcvpzNHf0g]}>
#
# <route [<ws "wss://generic-dataspace.demo.leastfixedpoint.com/">]
# <noise { key: #[z1w/OLy0wi3Veyk8/D+2182YxcrKpgc8y0ZJEBDrmWs],
# service: world }>
# <ref { caveats: [<rewrite <bind <_>> <rec labelled [<lit "bob">, <ref 0>]>>],
# oid: a-service,
# sig: #[/75BbF77LOiqNcvpzNHf0g] }>>
#
# We relay labelled to unlabelled information, enacting a chat protocol
# that enforces usernames.
$world [
# Assertions of presence have the username wiped out and replaced with the label.
? <labelled ?who <Present _>> <Present $who>
# Likewise utterance messages.
?? <labelled ?who <Says _ ?what>> ! <Says $who $what>
# We allow anyone to subscribe to presence and utterances.
? <labelled _ <Observe <rec Present ?p> ?o>> <Observe <rec Present $p> $o>
? <labelled _ <Observe <rec Says ?p> ?o>> <Observe <rec Says $p> $o>
]
# We can also use sturdyref rewrites to directly handle `Says` and
# `Present` values, rather than wrapping with `<labelled ...>` and
# unwrapping using the script fragment just above.
#
# The multiply-quoted patterns in the `Observe` cases start to get unwieldy
# at this point!
#
# For Alice:
#
# syndicate-macaroon mint --oid a-service --phrase hello --caveat '<or [
# <rewrite <rec Present [<_>]> <rec Present [<lit "alice">]>>
# <rewrite <rec Says [<_> <bind String>]> <rec Says [<lit "alice"> <ref 0>]>>
# <rewrite <bind <rec Observe [<rec rec [<lit Present> <_>]> <_>]>> <ref 0>>
# <rewrite <bind <rec Observe [<rec rec [<lit Says> <_>]> <_>]>> <ref 0>>
# ]>'
#
# <ref { oid: a-service sig: #[s918Jk6As8AWJ9rtozOTlg] caveats: [<or [
# <rewrite <rec Present [<_>]> <rec Present [<lit "alice">]>>
# <rewrite <rec Says [<_>, <bind String>]> <rec Says [<lit "alice">, <ref 0>]>>
# <rewrite <bind <rec Observe [<rec rec [<lit Present>, <_>]>, <_>]>> <ref 0>>
# <rewrite <bind <rec Observe [<rec rec [<lit Says>, <_>]>, <_>]>> <ref 0>> ]>]}>
#
# <route [<ws "wss://generic-dataspace.demo.leastfixedpoint.com/">]
# <noise { key: #[z1w/OLy0wi3Veyk8/D+2182YxcrKpgc8y0ZJEBDrmWs],
# service: world }>
# <ref { oid: a-service sig: #[s918Jk6As8AWJ9rtozOTlg] caveats: [<or [
# <rewrite <rec Present [<_>]> <rec Present [<lit "alice">]>>
# <rewrite <rec Says [<_>, <bind String>]> <rec Says [<lit "alice">, <ref 0>]>>
# <rewrite <bind <rec Observe [<rec rec [<lit Present>, <_>]>, <_>]>> <ref 0>>
# <rewrite <bind <rec Observe [<rec rec [<lit Says>, <_>]>, <_>]>> <ref 0>> ]>]}>>
#
# For Bob:
#
# syndicate-macaroon mint --oid a-service --phrase hello --caveat '<or [
# <rewrite <rec Present [<_>]> <rec Present [<lit "bob">]>>
# <rewrite <rec Says [<_> <bind String>]> <rec Says [<lit "bob"> <ref 0>]>>
# <rewrite <bind <rec Observe [<rec rec [<lit Present> <_>]> <_>]>> <ref 0>>
# <rewrite <bind <rec Observe [<rec rec [<lit Says> <_>]> <_>]>> <ref 0>>
# ]>'
#
# <ref { oid: a-service sig: #[QBbV4LrS0i3BG6OyCPJl+A] caveats: [<or [
# <rewrite <rec Present [<_>]> <rec Present [<lit "bob">]>>
# <rewrite <rec Says [<_>, <bind String>]> <rec Says [<lit "bob">, <ref 0>]>>
# <rewrite <bind <rec Observe [<rec rec [<lit Present>, <_>]>, <_>]>> <ref 0>>
# <rewrite <bind <rec Observe [<rec rec [<lit Says>, <_>]>, <_>]>> <ref 0>> ]>]}>
#
# <route [<ws "wss://generic-dataspace.demo.leastfixedpoint.com/">]
# <noise { key: #[z1w/OLy0wi3Veyk8/D+2182YxcrKpgc8y0ZJEBDrmWs],
# service: world }>
# <ref { oid: a-service sig: #[QBbV4LrS0i3BG6OyCPJl+A] caveats: [<or [
# <rewrite <rec Present [<_>]> <rec Present [<lit "bob">]>>
# <rewrite <rec Says [<_>, <bind String>]> <rec Says [<lit "bob">, <ref 0>]>>
# <rewrite <bind <rec Observe [<rec rec [<lit Present>, <_>]>, <_>]>> <ref 0>>
# <rewrite <bind <rec Observe [<rec rec [<lit Says>, <_>]>, <_>]>> <ref 0>> ]>]}>>

View File

@ -1,23 +0,0 @@
#!/bin/sh
set -e
exec 1>&2
failed=
cmp_and_fail() {
if ! cmp "$1" "$2"
then
failed=failed
fi
}
COMMAND=cmp_and_fail
if [ "$1" = "--fix" ];
then
COMMAND=cp
fi
# Ensure that various copies of cross-package data are identical.
${COMMAND} syndicate/protocols/schema-bundle.bin syndicate-schema-plugin/schema-bundle.bin
[ -z "$failed" ]

View File

@ -1,65 +0,0 @@
# We use $root_ds as the httpd space.
let ?root_ds = dataspace
# Supplying $root_ds as the last parameter in this relay-listener enables httpd service.
<require-service <relay-listener <tcp "0.0.0.0" 9001> $gatekeeper $root_ds>>
# Regular gatekeeper stuff works too.
<bind <ref { oid: "syndicate" key: #x"" }> $root_ds #f>
# Create an httpd router monitoring $root_ds for requests and bind requests.
<require-service <http-router $root_ds>>
# Create a static file server. When it gets a request, it ignores the first n (here, 1)
# elements of the path, and takes the remainder as relative to its configured directory (here,
# ".").
#
<require-service <http-static-files "." 1>>
#
# It publishes a service object: requests should be asserted to this.
# The http-bind record establishes this mapping.
#
? <service-object <http-static-files "." 1> ?handler> [
$root_ds += <http-bind #f 9001 get ["files" ...] $handler>
]
# Separately, bind path /d to $index, and respond there.
#
let ?index = dataspace
$root_ds += <http-bind #f 9001 get ["d"] $index>
$index ? <request _ ?k> [
$k ! <status 200 "OK">
$k ! <header content-type "text/html">
$k ! <chunk "<!DOCTYPE html>">
$k ! <done "<html><body>D</body></html>">
]
# Similarly, bind three paths, /d, /e and /t to $index2
# Because /d doubles up, the httpd router gives a warning when it is accessed.
# Accessing /e works fine.
# Accessing /t results in wasted work because of the hijacking listeners below.
#
let ?index2 = dataspace
$root_ds += <http-bind #f 9001 get ["d"] $index2>
$root_ds += <http-bind #f 9001 get ["e"] $index2>
$root_ds += <http-bind #f 9001 get ["t"] $index2>
$index2 ? <request _ ?k> [
$k ! <status 200 "OK">
$k ! <header content-type "text/html">
$k ! <chunk "<!DOCTYPE html>">
$k ! <done "<html><body>D2</body></html>">
]
# These two hijack /t by listening for raw incoming requests the same way the httpd router
# does. They respond quicker and so win the race. The httpd router's responses are lost.
#
$root_ds ? <request <http-request _ _ _ get ["t"] _ _ _> ?k> [
$k ! <status 200 "OK">
$k ! <header content-type "text/html">
$k ! <done "<html><body>T</body></html>">
]
$root_ds ? <request <http-request _ _ _ get ["t"] _ _ _> ?k> [
$k ! <status 200 "OK">
$k ! <header content-type "text/html">
$k ! <done "<html><body>T2</body></html>">
]

View File

@ -1,4 +0,0 @@
#!/bin/sh
set -e
rustup update
cargo +nightly install --path `pwd`/syndicate-server

View File

@ -1,18 +0,0 @@
#!/bin/sh
#
# Set up a git checkout of this repository for local dev use.
exec 2>/dev/tty 1>&2
set -e
[ -d .git ] || exit 0
for fullhook in ./git-hooks/*
do
hook=$(basename "$fullhook")
[ -L .git/hooks/$hook ] || (
echo "Installing $hook hook"
ln -s ../../git-hooks/$hook .git/hooks/$hook
)
done

View File

@ -1,6 +1,6 @@
[package] [package]
name = "syndicate-macros" name = "syndicate-macros"
version = "0.33.0" version = "0.22.0"
authors = ["Tony Garnock-Jones <tonyg@leastfixedpoint.com>"] authors = ["Tony Garnock-Jones <tonyg@leastfixedpoint.com>"]
edition = "2018" edition = "2018"
@ -13,11 +13,11 @@ license = "Apache-2.0"
proc-macro = true proc-macro = true
[dependencies] [dependencies]
syndicate = { path = "../syndicate", version = "0.41.0"} syndicate = { path = "../syndicate", version = "0.27.0"}
proc-macro2 = { version = "^1.0", features = ["span-locations"] } proc-macro2 = { version = "^1.0", features = ["span-locations"] }
quote = "^1.0" quote = "^1.0"
syn = { version = "^1.0", features = ["extra-traits"] } # for impl Debug for syn::Expr syn = "^1.0"
[dev-dependencies] [dev-dependencies]
tokio = { version = "1.10", features = ["io-std"] } tokio = { version = "1.10", features = ["io-std"] }

View File

@ -1,133 +0,0 @@
use syndicate::actor::*;
use std::env;
use std::sync::Arc;
#[derive(Debug)]
enum Instruction {
SetPeer(Arc<Ref<Instruction>>),
HandleMessage(u64),
}
struct Forwarder {
hop_limit: u64,
supervisor: Arc<Ref<Instruction>>,
peer: Option<Arc<Ref<Instruction>>>,
}
impl Drop for Forwarder {
fn drop(&mut self) {
let r = self.peer.take();
let _ = tokio::spawn(async move {
drop(r);
});
}
}
impl Entity<Instruction> for Forwarder {
fn message(&mut self, turn: &mut Activation, message: Instruction) -> ActorResult {
match message {
Instruction::SetPeer(r) => {
tracing::info!("Setting peer {:?}", r);
self.peer = Some(r);
}
Instruction::HandleMessage(n) => {
let target = if n >= self.hop_limit { &self.supervisor } else { self.peer.as_ref().expect("peer") };
turn.message(target, Instruction::HandleMessage(n + 1));
}
}
Ok(())
}
}
struct Supervisor {
latency_mode: bool,
total_transfers: u64,
remaining_to_receive: u32,
start_time: Option<std::time::Instant>,
}
impl Entity<Instruction> for Supervisor {
fn message(&mut self, turn: &mut Activation, message: Instruction) -> ActorResult {
match message {
Instruction::SetPeer(_) => {
tracing::info!("Start");
self.start_time = Some(std::time::Instant::now());
},
Instruction::HandleMessage(_n) => {
self.remaining_to_receive -= 1;
if self.remaining_to_receive == 0 {
let stop_time = std::time::Instant::now();
let duration = stop_time - self.start_time.unwrap();
tracing::info!("Stop after {:?}; {:?} messages, so {:?} Hz ({} mode)",
duration,
self.total_transfers,
(1000.0 * self.total_transfers as f64) / duration.as_millis() as f64,
if self.latency_mode { "latency" } else { "throughput" });
turn.stop_root();
}
},
}
Ok(())
}
}
#[tokio::main]
async fn main() -> ActorResult {
syndicate::convenient_logging()?;
Actor::top(None, |t| {
let args: Vec<String> = env::args().collect();
let n_actors: u32 = args.get(1).unwrap_or(&"1000000".to_string()).parse()?;
let n_rounds: u32 = args.get(2).unwrap_or(&"200".to_string()).parse()?;
let latency_mode: bool = match args.get(3).unwrap_or(&"throughput".to_string()).as_str() {
"latency" => true,
"throughput" => false,
_other => return Err("Invalid throughput/latency mode".into()),
};
tracing::info!("Will run {:?} actors for {:?} rounds", n_actors, n_rounds);
let total_transfers: u64 = n_actors as u64 * n_rounds as u64;
let (hop_limit, injection_count) = if latency_mode {
(total_transfers, 1)
} else {
(n_rounds as u64, n_actors)
};
let me = t.create(Supervisor {
latency_mode,
total_transfers,
remaining_to_receive: injection_count,
start_time: None,
});
let mut forwarders: Vec<Arc<Ref<Instruction>>> = Vec::new();
for _i in 0 .. n_actors {
if _i % 10000 == 0 { tracing::info!("Actor {:?}", _i); }
forwarders.push(
t.spawn_for_entity(None, true, Box::new(
Forwarder {
hop_limit,
supervisor: me.clone(),
peer: forwarders.last().cloned(),
}))
.0.expect("an entity"));
}
t.message(&forwarders[0], Instruction::SetPeer(forwarders.last().expect("an entity").clone()));
t.later(move |t| {
t.message(&me, Instruction::SetPeer(me.clone()));
t.later(move |t| {
let mut injected: u32 = 0;
for f in forwarders.into_iter() {
if injected >= injection_count {
break;
}
t.message(&f, Instruction::HandleMessage(0));
injected += 1;
}
Ok(())
});
Ok(())
});
Ok(())
}).await??;
Ok(())
}

View File

@ -1,175 +0,0 @@
use std::env;
use std::sync::Arc;
use std::sync::atomic::AtomicU64;
use std::sync::atomic::Ordering;
use tokio::sync::mpsc::{unbounded_channel, UnboundedSender};
type Ref<T> = UnboundedSender<Box<T>>;
#[derive(Debug)]
enum Instruction {
SetPeer(Arc<Ref<Instruction>>),
HandleMessage(u64),
}
struct Forwarder {
hop_limit: u64,
supervisor: Arc<Ref<Instruction>>,
peer: Option<Arc<Ref<Instruction>>>,
}
impl Drop for Forwarder {
fn drop(&mut self) {
let r = self.peer.take();
let _ = tokio::spawn(async move {
drop(r);
});
}
}
enum Action { Continue, Stop }
trait Actor<T> {
fn message(&mut self, message: T) -> Action;
}
fn send<T: std::marker::Send + 'static>(ch: &Arc<Ref<T>>, message: T) -> () {
match ch.send(Box::new(message)) {
Ok(()) => (),
Err(v) => panic!("Aiee! Could not send {:?}", v),
}
}
fn spawn<T: std::marker::Send + 'static, R: Actor<T> + std::marker::Send + 'static>(rt: Option<Arc<AtomicU64>>, mut ac: R) -> Arc<Ref<T>> {
let (tx, mut rx) = unbounded_channel::<Box<T>>();
if let Some(ref c) = rt {
c.fetch_add(1, Ordering::SeqCst);
}
tokio::spawn(async move {
loop {
match rx.recv().await {
None => break,
Some(message) => {
match ac.message(*message) {
Action::Continue => continue,
Action::Stop => break,
}
}
}
}
if let Some(c) = rt {
c.fetch_sub(1, Ordering::SeqCst);
}
});
Arc::new(tx)
}
impl Actor<Instruction> for Forwarder {
fn message(&mut self, message: Instruction) -> Action {
match message {
Instruction::SetPeer(r) => {
tracing::info!("Setting peer {:?}", r);
self.peer = Some(r);
}
Instruction::HandleMessage(n) => {
let target = if n >= self.hop_limit { &self.supervisor } else { self.peer.as_ref().expect("peer") };
send(target, Instruction::HandleMessage(n + 1));
}
}
Action::Continue
}
}
struct Supervisor {
latency_mode: bool,
total_transfers: u64,
remaining_to_receive: u32,
start_time: Option<std::time::Instant>,
}
impl Actor<Instruction> for Supervisor {
fn message(&mut self, message: Instruction) -> Action {
match message {
Instruction::SetPeer(_) => {
tracing::info!("Start");
self.start_time = Some(std::time::Instant::now());
},
Instruction::HandleMessage(_n) => {
self.remaining_to_receive -= 1;
if self.remaining_to_receive == 0 {
let stop_time = std::time::Instant::now();
let duration = stop_time - self.start_time.unwrap();
tracing::info!("Stop after {:?}; {:?} messages, so {:?} Hz ({} mode)",
duration,
self.total_transfers,
(1000.0 * self.total_transfers as f64) / duration.as_millis() as f64,
if self.latency_mode { "latency" } else { "throughput" });
return Action::Stop;
}
},
}
Action::Continue
}
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error + std::marker::Send + std::marker::Sync>> {
syndicate::convenient_logging()?;
let args: Vec<String> = env::args().collect();
let n_actors: u32 = args.get(1).unwrap_or(&"1000000".to_string()).parse()?;
let n_rounds: u32 = args.get(2).unwrap_or(&"200".to_string()).parse()?;
let latency_mode: bool = match args.get(3).unwrap_or(&"throughput".to_string()).as_str() {
"latency" => true,
"throughput" => false,
_other => return Err("Invalid throughput/latency mode".into()),
};
tracing::info!("Will run {:?} actors for {:?} rounds", n_actors, n_rounds);
let count = Arc::new(AtomicU64::new(0));
let total_transfers: u64 = n_actors as u64 * n_rounds as u64;
let (hop_limit, injection_count) = if latency_mode {
(total_transfers, 1)
} else {
(n_rounds as u64, n_actors)
};
let me = spawn(Some(count.clone()), Supervisor {
latency_mode,
total_transfers,
remaining_to_receive: injection_count,
start_time: None,
});
let mut forwarders: Vec<Arc<Ref<Instruction>>> = Vec::new();
for _i in 0 .. n_actors {
if _i % 10000 == 0 { tracing::info!("Actor {:?}", _i); }
forwarders.push(spawn(None, Forwarder {
hop_limit,
supervisor: me.clone(),
peer: forwarders.last().cloned(),
}));
}
send(&forwarders[0], Instruction::SetPeer(forwarders.last().expect("an entity").clone()));
send(&me, Instruction::SetPeer(me.clone()));
let mut injected: u32 = 0;
for f in forwarders.into_iter() {
if injected >= injection_count {
break;
}
send(&f, Instruction::HandleMessage(0));
injected += 1;
}
loop {
if count.load(Ordering::SeqCst) == 0 {
break;
}
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
}
Ok(())
}

View File

@ -27,7 +27,6 @@ use pat::lit;
enum SymbolVariant<'a> { enum SymbolVariant<'a> {
Normal(&'a str), Normal(&'a str),
#[allow(dead_code)] // otherwise we get 'warning: field `0` is never read'
Binder(&'a str), Binder(&'a str),
Substitution(&'a str), Substitution(&'a str),
Discard, Discard,
@ -36,7 +35,7 @@ enum SymbolVariant<'a> {
fn compile_sequence_members(vs: &[IOValue]) -> Vec<TokenStream> { fn compile_sequence_members(vs: &[IOValue]) -> Vec<TokenStream> {
vs.iter().enumerate().map(|(i, f)| { vs.iter().enumerate().map(|(i, f)| {
let p = compile_pattern(f); let p = compile_pattern(f);
quote!((syndicate::value::Value::from(#i).wrap(), #p)) quote!((#i .into(), #p))
}).collect::<Vec<_>>() }).collect::<Vec<_>>()
} }
@ -80,6 +79,10 @@ impl ValueCompiler {
match v.value() { match v.value() {
Value::Boolean(b) => Value::Boolean(b) =>
quote!(#V_::Value::from(#b).wrap()), quote!(#V_::Value::from(#b).wrap()),
Value::Float(f) => {
let f = f.0;
quote!(#V_::Value::from(#f).wrap())
}
Value::Double(d) => { Value::Double(d) => {
let d = d.0; let d = d.0;
quote!(#V_::Value::from(#d).wrap()) quote!(#V_::Value::from(#d).wrap())
@ -151,14 +154,16 @@ fn compile_pattern(v: &IOValue) -> TokenStream {
#[allow(non_snake_case)] #[allow(non_snake_case)]
let V_: TokenStream = quote!(syndicate::value); let V_: TokenStream = quote!(syndicate::value);
#[allow(non_snake_case)] #[allow(non_snake_case)]
let MapFrom_: TokenStream = quote!(<#V_::Map<_, _>>::from); let MapFromIterator_: TokenStream = quote!(<#V_::Map<_, _> as std::iter::FromIterator<_>>::from_iter);
match v.value() { match v.value() {
Value::Symbol(s) => match analyze_symbol(&s, true) { Value::Symbol(s) => match analyze_symbol(&s, true) {
SymbolVariant::Binder(_) => SymbolVariant::Binder(_) =>
quote!(#P_::Pattern::Bind{ pattern: Box::new(#P_::Pattern::Discard) }), quote!(#P_::Pattern::DBind(Box::new(#P_::DBind {
pattern: #P_::Pattern::DDiscard(Box::new(#P_::DDiscard))
}))),
SymbolVariant::Discard => SymbolVariant::Discard =>
quote!(#P_::Pattern::Discard), quote!(#P_::Pattern::DDiscard(Box::new(#P_::DDiscard))),
SymbolVariant::Substitution(s) => SymbolVariant::Substitution(s) =>
lit(Ident::new(s, Span::call_site())), lit(Ident::new(s, Span::call_site())),
SymbolVariant::Normal(_) => SymbolVariant::Normal(_) =>
@ -170,7 +175,9 @@ fn compile_pattern(v: &IOValue) -> TokenStream {
Some(label) => Some(label) =>
if label.starts_with("$") && r.arity() == 1 { if label.starts_with("$") && r.arity() == 1 {
let nested = compile_pattern(&r.fields()[0]); let nested = compile_pattern(&r.fields()[0]);
quote!(#P_::Pattern::Bind{ pattern: Box::new(#nested) }) quote!(#P_::Pattern::DBind(Box::new(#P_::DBind {
pattern: #nested
})))
} else { } else {
let label_stx = if label.starts_with("=") { let label_stx = if label.starts_with("=") {
let id = Ident::new(&label[1..], Span::call_site()); let id = Ident::new(&label[1..], Span::call_site());
@ -179,19 +186,18 @@ fn compile_pattern(v: &IOValue) -> TokenStream {
quote!(#V_::Value::symbol(#label).wrap()) quote!(#V_::Value::symbol(#label).wrap())
}; };
let members = compile_sequence_members(r.fields()); let members = compile_sequence_members(r.fields());
quote!(#P_::Pattern::Group { quote!(#P_::Pattern::DCompound(Box::new(#P_::DCompound::Rec {
type_: Box::new(#P_::GroupType::Rec { label: #label_stx }), label: #label_stx,
entries: #MapFrom_([#(#members),*]), fields: vec![#(#members),*],
}) })))
} }
} }
} }
Value::Sequence(vs) => { Value::Sequence(vs) => {
let members = compile_sequence_members(vs); let members = compile_sequence_members(vs);
quote!(#P_::Pattern::Group { quote!(#P_::Pattern::DCompound(Box::new(#P_::DCompound::Arr {
type_: Box::new(#P_::GroupType::Arr), items: vec![#(#members),*],
entries: #MapFrom_([#(#members),*]), })))
})
} }
Value::Set(_) => Value::Set(_) =>
panic!("Cannot match sets in patterns"), panic!("Cannot match sets in patterns"),
@ -201,10 +207,9 @@ fn compile_pattern(v: &IOValue) -> TokenStream {
let v = compile_pattern(v); let v = compile_pattern(v);
quote!((#k, #v)) quote!((#k, #v))
}).collect::<Vec<_>>(); }).collect::<Vec<_>>();
quote!(#P_::Pattern::Group { quote!(#P_::Pattern::DCompound(Box::new(#P_::DCompound::Dict {
type_: Box::new(#P_::GroupType::Dict), entries: #MapFromIterator_(vec![#(#members),*])
entries: #MapFrom_([#(#members),*]), })))
})
} }
_ => lit(ValueCompiler::for_patterns().compile(v)), _ => lit(ValueCompiler::for_patterns().compile(v)),
} }

View File

@ -15,9 +15,10 @@ pub fn lit<T: ToTokens>(e: T) -> TokenStream2 {
} }
fn compile_sequence_members(stxs: &Vec<Stx>) -> Result<Vec<TokenStream2>, &'static str> { fn compile_sequence_members(stxs: &Vec<Stx>) -> Result<Vec<TokenStream2>, &'static str> {
stxs.iter().enumerate().map(|(i, stx)| { stxs.iter().map(|stx| {
let p = to_pattern_expr(stx)?; // let p = to_pattern_expr(stx)?;
Ok(quote!((syndicate::value::Value::from(#i).wrap(), #p))) // Ok(quote!(#p))
to_pattern_expr(stx)
}).collect() }).collect()
} }
@ -27,7 +28,7 @@ pub fn to_pattern_expr(stx: &Stx) -> Result<TokenStream2, &'static str> {
#[allow(non_snake_case)] #[allow(non_snake_case)]
let V_: TokenStream2 = quote!(syndicate::value); let V_: TokenStream2 = quote!(syndicate::value);
#[allow(non_snake_case)] #[allow(non_snake_case)]
let MapFrom_: TokenStream2 = quote!(<#V_::Map<_, _>>::from); let MapFromIterator_: TokenStream2 = quote!(<#V_::Map<_, _> as std::iter::FromIterator<_>>::from_iter);
match stx { match stx {
Stx::Atom(v) => Stx::Atom(v) =>
@ -40,27 +41,26 @@ pub fn to_pattern_expr(stx: &Stx) -> Result<TokenStream2, &'static str> {
None => to_pattern_expr(&Stx::Discard)?, None => to_pattern_expr(&Stx::Discard)?,
} }
}; };
Ok(quote!(#P_::Pattern::Bind { pattern: Box::new(#inner_pat_expr) })) Ok(quote!(#P_::Pattern::DBind(Box::new(#P_::DBind { pattern: #inner_pat_expr }))))
} }
Stx::Subst(e) => Stx::Subst(e) =>
Ok(lit(e)), Ok(lit(e)),
Stx::Discard => Stx::Discard =>
Ok(quote!(#P_::Pattern::Discard)), Ok(quote!(#P_::Pattern::DDiscard(Box::new(#P_::DDiscard)))),
Stx::Rec(l, fs) => { Stx::Rec(l, fs) => {
let label = to_value_expr(&*l)?; let label = to_value_expr(&*l)?;
let members = compile_sequence_members(fs)?; let members = compile_sequence_members(fs)?;
Ok(quote!(#P_::Pattern::Group { Ok(quote!(#P_::Pattern::DCompound(Box::new(#P_::DCompound::Rec {
type_: Box::new(#P_::GroupType::Rec { label: #label }), label: #label,
entries: #MapFrom_([#(#members),*]), fields: vec![#(#members),*],
})) }))))
}, },
Stx::Seq(stxs) => { Stx::Seq(stxs) => {
let members = compile_sequence_members(stxs)?; let members = compile_sequence_members(stxs)?;
Ok(quote!(#P_::Pattern::Group { Ok(quote!(#P_::Pattern::DCompound(Box::new(#P_::DCompound::Arr {
type_: Box::new(#P_::GroupType::Arr), items: vec![#(#members),*],
entries: #MapFrom_([#(#members),*]), }))))
}))
} }
Stx::Set(_stxs) => Stx::Set(_stxs) =>
Err("Set literals not supported in patterns"), Err("Set literals not supported in patterns"),
@ -70,10 +70,9 @@ pub fn to_pattern_expr(stx: &Stx) -> Result<TokenStream2, &'static str> {
let v = to_pattern_expr(v)?; let v = to_pattern_expr(v)?;
Ok(quote!((#k, #v))) Ok(quote!((#k, #v)))
}).collect::<Result<Vec<_>, &'static str>>()?; }).collect::<Result<Vec<_>, &'static str>>()?;
Ok(quote!(#P_::Pattern::Group { Ok(quote!(#P_::Pattern::DCompound(Box::new(#P_::DCompound::Dict {
type_: Box::new(#P_::GroupType::Dict), entries: #MapFromIterator_(vec![#(#members),*])
entries: #MapFrom_([#(#members),*]) }))))
}))
} }
} }
} }

View File

@ -1,6 +1,5 @@
use proc_macro2::Delimiter; use proc_macro2::Delimiter;
use proc_macro2::LineColumn; use proc_macro2::LineColumn;
use proc_macro2::Span;
use proc_macro2::TokenStream; use proc_macro2::TokenStream;
use syn::ExprLit; use syn::ExprLit;
@ -15,6 +14,7 @@ use syn::parse::Parser;
use syn::parse::ParseStream; use syn::parse::ParseStream;
use syn::parse_str; use syn::parse_str;
use syndicate::value::Float;
use syndicate::value::Double; use syndicate::value::Double;
use syndicate::value::IOValue; use syndicate::value::IOValue;
use syndicate::value::NestedValue; use syndicate::value::NestedValue;
@ -70,41 +70,24 @@ fn punct_char(c: Cursor) -> Option<(char, Cursor)> {
c.punct().map(|(p, c)| (p.as_char(), c)) c.punct().map(|(p, c)| (p.as_char(), c))
} }
fn start_pos(s: Span) -> LineColumn {
// We would like to write
// s.start()
// here, but until [1] is fixed (perhaps via [2]), we have to go the unsafe route
// and assume we are in procedural macro context.
// [1]: https://github.com/dtolnay/proc-macro2/issues/402
// [2]: https://github.com/dtolnay/proc-macro2/pull/407
let u = s.unwrap().start();
LineColumn { column: u.column(), line: u.line() }
}
fn end_pos(s: Span) -> LineColumn {
// See start_pos
let u = s.unwrap().end();
LineColumn { column: u.column(), line: u.line() }
}
fn parse_id(mut c: Cursor) -> Result<(String, Cursor)> { fn parse_id(mut c: Cursor) -> Result<(String, Cursor)> {
let mut id = String::new(); let mut id = String::new();
let mut prev_pos = start_pos(c.span()); let mut prev_pos = c.span().start();
loop { loop {
if c.eof() || start_pos(c.span()) != prev_pos { if c.eof() || c.span().start() != prev_pos {
return Ok((id, c)); return Ok((id, c));
} else if let Some((p, next)) = c.punct() { } else if let Some((p, next)) = c.punct() {
match p.as_char() { match p.as_char() {
'<' | '>' | '(' | ')' | '{' | '}' | '[' | ']' | ',' | ':' => return Ok((id, c)), '<' | '>' | '(' | ')' | '{' | '}' | '[' | ']' | ',' | ':' => return Ok((id, c)),
ch => { ch => {
id.push(ch); id.push(ch);
prev_pos = end_pos(c.span()); prev_pos = c.span().end();
c = next; c = next;
} }
} }
} else if let Some((i, next)) = c.ident() { } else if let Some((i, next)) = c.ident() {
id.push_str(&i.to_string()); id.push_str(&i.to_string());
prev_pos = end_pos(i.span()); prev_pos = i.span().end();
c = next; c = next;
} else { } else {
return Ok((id, c)); return Ok((id, c));
@ -170,7 +153,7 @@ fn parse_kv(c: Cursor) -> Result<((Stx, Stx), Cursor)> {
} }
fn adjacent_ident(pos: LineColumn, c: Cursor) -> (Option<Ident>, Cursor) { fn adjacent_ident(pos: LineColumn, c: Cursor) -> (Option<Ident>, Cursor) {
if start_pos(c.span()) != pos { if c.span().start() != pos {
(None, c) (None, c)
} else if let Some((id, next)) = c.ident() { } else if let Some((id, next)) = c.ident() {
(Some(id), next) (Some(id), next)
@ -194,8 +177,8 @@ fn parse_generic<T: Parse>(mut c: Cursor) -> Option<(T, Cursor)> {
// OK, because parse2 checks for end-of-stream, let's chop // OK, because parse2 checks for end-of-stream, let's chop
// the input at the position of the error and try again (!). // the input at the position of the error and try again (!).
let mut collected = Vec::new(); let mut collected = Vec::new();
let upto = start_pos(e.span()); let upto = e.span().start();
while !c.eof() && start_pos(c.span()) != upto { while !c.eof() && c.span().start() != upto {
let (tt, next) = c.token_tree().unwrap(); let (tt, next) = c.token_tree().unwrap();
collected.push(tt); collected.push(tt);
c = next; c = next;
@ -217,7 +200,7 @@ fn parse1(c: Cursor) -> Result<(Stx, Cursor)> {
Ok((Stx::Rec(Box::new(q.remove(0)), q), c)) Ok((Stx::Rec(Box::new(q.remove(0)), q), c))
}), }),
'$' => { '$' => {
let (maybe_id, next) = adjacent_ident(end_pos(p.span()), next); let (maybe_id, next) = adjacent_ident(p.span().end(), next);
let (maybe_type, next) = if let Some((':', next)) = punct_char(next) { let (maybe_type, next) = if let Some((':', next)) = punct_char(next) {
match parse_generic::<Type>(next) { match parse_generic::<Type>(next) {
Some((t, next)) => (Some(t), next), Some((t, next)) => (Some(t), next),
@ -265,7 +248,7 @@ fn parse1(c: Cursor) -> Result<(Stx, Cursor)> {
IOValue::new(i.base10_parse::<i128>()?) IOValue::new(i.base10_parse::<i128>()?)
} }
Lit::Float(f) => if f.suffix() == "f32" { Lit::Float(f) => if f.suffix() == "f32" {
IOValue::new(&Double(f.base10_parse::<f32>()? as f64)) IOValue::new(&Float(f.base10_parse::<f32>()?))
} else { } else {
IOValue::new(&Double(f.base10_parse::<f64>()?)) IOValue::new(&Double(f.base10_parse::<f64>()?))
} }

View File

@ -50,6 +50,10 @@ pub fn value_to_value_expr(v: &IOValue) -> TokenStream2 {
match v.value() { match v.value() {
Value::Boolean(b) => Value::Boolean(b) =>
quote!(#V_::Value::from(#b).wrap()), quote!(#V_::Value::from(#b).wrap()),
Value::Float(f) => {
let f = f.0;
quote!(#V_::Value::from(#f).wrap())
}
Value::Double(d) => { Value::Double(d) => {
let d = d.0; let d = d.0;
quote!(#V_::Value::from(#d).wrap()) quote!(#V_::Value::from(#d).wrap())

View File

@ -1,15 +0,0 @@
{
"folders": [
{
"path": "."
},
{
"path": "../syndicate-protocols"
}
],
"settings": {
"files.exclude": {
"target": true
}
}
}

View File

@ -1,23 +0,0 @@
[package]
name = "syndicate-schema-plugin"
version = "0.10.1"
authors = ["Tony Garnock-Jones <tonyg@leastfixedpoint.com>"]
edition = "2018"
description = "Support for using Preserves Schema with Syndicate macros."
homepage = "https://syndicate-lang.org/"
repository = "https://git.syndicate-lang.org/syndicate-lang/syndicate-rs"
license = "Apache-2.0"
[lib]
[build-dependencies]
preserves-schema = "5.995"
[dependencies]
preserves = "4.995"
preserves-schema = "5.995"
lazy_static = "1.4"
[package.metadata.workspaces]
independent = true

View File

@ -1,15 +0,0 @@
use preserves_schema::compiler::*;
fn main() -> std::io::Result<()> {
let buildroot = std::path::PathBuf::from(std::env::var_os("OUT_DIR").unwrap());
let mut gen_dir = buildroot.clone();
gen_dir.push("src/schemas");
let mut c = CompilerConfig::new("crate::schemas".to_owned());
c.add_external_module(ExternalModule::new(vec!["EntityRef".to_owned()], "crate::placeholder"));
let inputs = expand_inputs(&vec!["./schema-bundle.bin".to_owned()])?;
c.load_schemas_and_bundles(&inputs, &vec![])?;
compile(&c, &mut CodeCollector::files(gen_dir))
}

View File

@ -1,44 +0,0 @@
´³bundle·µ³rpc„´³schema·³version°³ definitions·³Answer´³rec´³lit³a„´³tupleµ´³named³request³any„´³named³response³any„„„„³Result´³orµµ±ok´³rec´³lit³ok„´³tupleµ´³named³value³any„„„„„µ±error´³rec´³lit³error„´³tupleµ´³named³error³any„„„„„„„³Question´³rec´³lit³q„´³tupleµ´³named³request³any„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³tcp„´³schema·³version°³ definitions·³TcpLocal´³rec´³lit³ tcp-local„´³tupleµ´³named³host´³atom³String„„´³named³port´³atom³ SignedInteger„„„„„³ TcpRemote´³rec´³lit³
tcp-remote„´³tupleµ´³named³host´³atom³String„„´³named³port´³atom³ SignedInteger„„„„„³ TcpPeerInfo´³rec´³lit³tcp-peer„´³tupleµ´³named³handle´³embedded³any„„´³named³local´³refµ„³TcpLocal„„´³named³remote´³refµ„³ TcpRemote„„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³http„´³schema·³version°³ definitions·³Chunk´³orµµ±string´³atom³String„„µ±bytes´³atom³
ByteString„„„„³Headers´³dictof´³atom³Symbol„´³atom³String„„³MimeType´³atom³Symbol„³
QueryValue´³orµµ±string´³atom³String„„µ±file´³rec´³lit³file„´³tupleµ´³named³filename´³atom³String„„´³named³headers´³refµ„³Headers„„´³named³body´³atom³
ByteString„„„„„„„„³ HostPattern´³orµµ±host´³atom³String„„µ±any´³lit€„„„„³ HttpBinding´³rec´³lit³ http-bind„´³tupleµ´³named³host´³refµ„³ HostPattern„„´³named³port´³atom³ SignedInteger„„´³named³method´³refµ„³ MethodPattern„„´³named³path´³refµ„³ PathPattern„„´³named³handler´³embedded´³refµ„³ HttpRequest„„„„„„³ HttpContext´³rec´³lit³request„´³tupleµ´³named³req´³refµ„³ HttpRequest„„´³named³res´³embedded´³refµ„³ HttpResponse„„„„„„³ HttpRequest´³rec´³lit³ http-request„´³tupleµ´³named³sequenceNumber´³atom³ SignedInteger„„´³named³host´³refµ„³ RequestHost„„´³named³port´³atom³ SignedInteger„„´³named³method´³atom³Symbol„„´³named³path´³seqof´³atom³String„„„´³named³headers´³refµ„³Headers„„´³named³query´³dictof´³atom³Symbol„´³seqof´³refµ„³
QueryValue„„„„´³named³body´³refµ„³ RequestBody„„„„„³ HttpService´³rec´³lit³ http-service„´³tupleµ´³named³host´³refµ„³ HostPattern„„´³named³port´³atom³ SignedInteger„„´³named³method´³refµ„³ MethodPattern„„´³named³path´³refµ„³ PathPattern„„„„„³ PathPattern´³seqof´³refµ„³PathPatternElement„„³ RequestBody´³orµµ±absent´³lit€„„µ±present´³atom³
ByteString„„„„³ RequestHost´³orµµ±absent´³lit€„„µ±present´³atom³String„„„„³ HttpListener´³rec´³lit³ http-listener„´³tupleµ´³named³port´³atom³ SignedInteger„„„„„³ HttpResponse´³orµµ±status´³rec´³lit³status„´³tupleµ´³named³code´³atom³ SignedInteger„„´³named³message´³atom³String„„„„„„µ±header´³rec´³lit³header„´³tupleµ´³named³name´³atom³Symbol„„´³named³value´³atom³String„„„„„„µ±chunk´³rec´³lit³chunk„´³tupleµ´³named³chunk´³refµ„³Chunk„„„„„„µ±done´³rec´³lit³done„´³tupleµ´³named³chunk´³refµ„³Chunk„„„„„„„„³ MethodPattern´³orµµ±any´³lit€„„µ±specific´³atom³Symbol„„„„³PathPatternElement´³orµµ±label´³atom³String„„µ±wildcard´³lit³_„„µ±rest´³lit³...„„„„„³ embeddedType€„„µ³noise„´³schema·³version°³ definitions·³Packet´³orµµ±complete´³atom³
ByteString„„µ±
fragmented´³seqof´³atom³
ByteString„„„„„³ Initiator´³rec´³lit³ initiator„´³tupleµ´³named³initiatorSession´³embedded´³refµ„³Packet„„„„„„³ NoiseSpec´³andµ´³dict·³key´³named³key´³atom³
ByteString„„³service´³named³service´³refµ„³ServiceSelector„„„„´³named³protocol´³refµ„³ NoiseProtocol„„´³named³ preSharedKeys´³refµ„³NoisePreSharedKeys„„„„³ SessionItem´³orµµ± Initiator´³refµ„³ Initiator„„µ±Packet´³refµ„³Packet„„„„³ NoiseProtocol´³orµµ±present´³dict·³protocol´³named³protocol´³atom³String„„„„„µ±invalid´³dict·³protocol´³named³protocol³any„„„„µ±absent´³dict·„„„„„³ NoiseStepType´³lit³noise„³SecretKeyField´³orµµ±present´³dict·³ secretKey´³named³ secretKey´³atom³
ByteString„„„„„µ±invalid´³dict·³ secretKey´³named³ secretKey³any„„„„µ±absent´³dict·„„„„„³DefaultProtocol´³lit±!Noise_NK_25519_ChaChaPoly_BLAKE2s„³NoiseStepDetail´³refµ„³ServiceSelector„³ServiceSelector³any³NoiseServiceSpec´³andµ´³named³base´³refµ„³ NoiseSpec„„´³named³ secretKey´³refµ„³SecretKeyField„„„„³NoisePreSharedKeys´³orµµ±present´³dict·³ preSharedKeys´³named³ preSharedKeys´³seqof´³atom³
ByteString„„„„„„µ±invalid´³dict·³ preSharedKeys´³named³ preSharedKeys³any„„„„µ±absent´³dict·„„„„„³NoisePathStepDetail´³refµ„³ NoiseSpec„³NoiseDescriptionDetail´³refµ„³NoiseServiceSpec„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³timer„´³schema·³version°³ definitions·³SetTimer´³rec´³lit³ set-timer„´³tupleµ´³named³label³any„´³named³seconds´³atom³Double„„´³named³kind´³refµ„³ TimerKind„„„„„³ LaterThan´³rec´³lit³
later-than„´³tupleµ´³named³seconds´³atom³Double„„„„„³ TimerKind´³orµµ±relative´³lit³relative„„µ±absolute´³lit³absolute„„µ±clear´³lit³clear„„„„³ TimerExpired´³rec´³lit³ timer-expired„´³tupleµ´³named³label³any„´³named³seconds´³atom³Double„„„„„„³ embeddedType€„„µ³trace„´³schema·³version°³ definitions·³Oid³any³Name´³orµµ± anonymous´³rec´³lit³ anonymous„´³tupleµ„„„„µ±named´³rec´³lit³named„´³tupleµ´³named³name³any„„„„„„„³Target´³rec´³lit³entity„´³tupleµ´³named³actor´³refµ„³ActorId„„´³named³facet´³refµ„³FacetId„„´³named³oid´³refµ„³Oid„„„„„³TaskId³any³TurnId³any³ActorId³any³FacetId³any³ TurnCause´³orµµ±turn´³rec´³lit³ caused-by„´³tupleµ´³named³id´³refµ„³TurnId„„„„„„µ±cleanup´³rec´³lit³cleanup„´³tupleµ„„„„µ±linkedTaskRelease´³rec´³lit³linked-task-release„´³tupleµ´³named³id´³refµ„³TaskId„„´³named³reason´³refµ„³LinkedTaskReleaseReason„„„„„„µ±periodicActivation´³rec´³lit³periodic-activation„´³tupleµ´³named³period´³atom³Double„„„„„„µ±delay´³rec´³lit³delay„´³tupleµ´³named³ causingTurn´³refµ„³TurnId„„´³named³amount´³atom³Double„„„„„„µ±external´³rec´³lit³external„´³tupleµ´³named³ description³any„„„„„„„³ TurnEvent´³orµµ±assert´³rec´³lit³assert„´³tupleµ´³named³ assertion´³refµ„³AssertionDescription„„´³named³handle´³refµ³protocol„³Handle„„„„„„µ±retract´³rec´³lit³retract„´³tupleµ´³named³handle´³refµ³protocol„³Handle„„„„„„µ±message´³rec´³lit³message„´³tupleµ´³named³body´³refµ„³AssertionDescription„„„„„„µ±sync´³rec´³lit³sync„´³tupleµ´³named³peer´³refµ„³Target„„„„„„µ± breakLink´³rec´³lit³
break-link„´³tupleµ´³named³source´³refµ„³ActorId„„´³named³handle´³refµ³protocol„³Handle„„„„„„„„³
ExitStatus´³orµµ±ok´³lit³ok„„µ±Error´³refµ³protocol„³Error„„„„³
TraceEntry´³rec´³lit³trace„´³tupleµ´³named³ timestamp´³atom³Double„„´³named³actor´³refµ„³ActorId„„´³named³item´³refµ„³ActorActivation„„„„„³ActorActivation´³orµµ±start´³rec´³lit³start„´³tupleµ´³named³ actorName´³refµ„³Name„„„„„„µ±turn´³refµ„³TurnDescription„„µ±stop´³rec´³lit³stop„´³tupleµ´³named³status´³refµ„³
ExitStatus„„„„„„„„³FacetStopReason´³orµµ±explicitAction´³lit³explicit-action„„µ±inert´³lit³inert„„µ±parentStopping´³lit³parent-stopping„„µ± actorStopping´³lit³actor-stopping„„„„³TurnDescription´³rec´³lit³turn„´³tupleµ´³named³id´³refµ„³TurnId„„´³named³cause´³refµ„³ TurnCause„„´³named³actions´³seqof´³refµ„³ActionDescription„„„„„„³ActionDescription´³orµµ±dequeue´³rec´³lit³dequeue„´³tupleµ´³named³event´³refµ„³TargetedTurnEvent„„„„„„µ±enqueue´³rec´³lit³enqueue„´³tupleµ´³named³event´³refµ„³TargetedTurnEvent„„„„„„µ±dequeueInternal´³rec´³lit³dequeue-internal„´³tupleµ´³named³event´³refµ„³TargetedTurnEvent„„„„„„µ±enqueueInternal´³rec´³lit³enqueue-internal„´³tupleµ´³named³event´³refµ„³TargetedTurnEvent„„„„„„µ±spawn´³rec´³lit³spawn„´³tupleµ´³named³link´³atom³Boolean„„´³named³id´³refµ„³ActorId„„„„„„µ±link´³rec´³lit³link„´³tupleµ´³named³ parentActor´³refµ„³ActorId„„´³named³ childToParent´³refµ³protocol„³Handle„„´³named³
childActor´³refµ„³ActorId„„´³named³ parentToChild´³refµ³protocol„³Handle„„„„„„µ±
facetStart´³rec´³lit³ facet-start„´³tupleµ´³named³path´³seqof´³refµ„³FacetId„„„„„„„µ± facetStop´³rec´³lit³
facet-stop„´³tupleµ´³named³path´³seqof´³refµ„³FacetId„„„´³named³reason´³refµ„³FacetStopReason„„„„„„µ±linkedTaskStart´³rec´³lit³linked-task-start„´³tupleµ´³named³taskName´³refµ„³Name„„´³named³id´³refµ„³TaskId„„„„„„„„³TargetedTurnEvent´³rec´³lit³event„´³tupleµ´³named³target´³refµ„³Target„„´³named³detail´³refµ„³ TurnEvent„„„„„³AssertionDescription´³orµµ±value´³rec´³lit³value„´³tupleµ´³named³value³any„„„„„µ±opaque´³rec´³lit³opaque„´³tupleµ´³named³ description³any„„„„„„„³LinkedTaskReleaseReason´³orµµ± cancelled´³lit³ cancelled„„µ±normal´³lit³normal„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³stdenv„´³schema·³version°³ definitions·³ StandardRoute´³orµµ±standard´³ tuplePrefixµ´³named³
transports´³seqof´³refµ„³StandardTransport„„„´³named³key´³atom³
ByteString„„´³named³service³any„´³named³sig´³atom³
ByteString„„´³named³oid³any„„´³named³caveats´³seqof´³refµ³sturdy„³Caveat„„„„„µ±general´³refµ³
gatekeeper„³Route„„„„³StandardTransport´³orµµ±wsUrl´³atom³String„„µ±other³any„„„„³ embeddedType€„„µ³stream„´³schema·³version°³ definitions·³Mode´³orµµ±bytes´³lit³bytes„„µ±lines´³refµ„³LineMode„„µ±packet´³rec´³lit³packet„´³tupleµ´³named³size´³atom³ SignedInteger„„„„„„µ±object´³rec´³lit³object„´³tupleµ´³named³ description³any„„„„„„„³Sink´³orµµ±source´³rec´³lit³source„´³tupleµ´³named³
controller´³embedded´³refµ„³Source„„„„„„„µ± StreamError´³refµ„³ StreamError„„µ±data´³rec´³lit³data„´³tupleµ´³named³payload³any„´³named³mode´³refµ„³Mode„„„„„„µ±eof´³rec´³lit³eof„´³tupleµ„„„„„„³Source´³orµµ±sink´³rec´³lit³sink„´³tupleµ´³named³
controller´³embedded´³refµ„³Sink„„„„„„„µ± StreamError´³refµ„³ StreamError„„µ±credit´³rec´³lit³credit„´³tupleµ´³named³amount´³refµ„³ CreditAmount„„´³named³mode´³refµ„³Mode„„„„„„„„³LineMode´³orµµ±lf´³lit³lf„„µ±crlf´³lit³crlf„„„„³ StreamError´³rec´³lit³error„´³tupleµ´³named³message´³atom³String„„„„„³ CreditAmount´³orµµ±count´³atom³ SignedInteger„„µ± unbounded´³lit³ unbounded„„„„³StreamConnection´³rec´³lit³stream-connection„´³tupleµ´³named³source´³embedded´³refµ„³Source„„„´³named³sink´³embedded´³refµ„³Sink„„„´³named³spec³any„„„„³StreamListenerError´³rec´³lit³stream-listener-error„´³tupleµ´³named³spec³any„´³named³message´³atom³String„„„„„³StreamListenerReady´³rec´³lit³stream-listener-ready„´³tupleµ´³named³spec³any„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³sturdy„´³schema·³version°³ definitions·³Lit´³rec´³lit³lit„´³tupleµ´³named³value³any„„„„³Oid´³atom³ SignedInteger„³Alts´³rec´³lit³or„´³tupleµ´³named³ alternatives´³seqof´³refµ„³Rewrite„„„„„„³PAnd´³rec´³lit³and„´³tupleµ´³named³patterns´³seqof´³refµ„³Pattern„„„„„„³PNot´³rec´³lit³not„´³tupleµ´³named³pattern´³refµ„³Pattern„„„„„³TRef´³rec´³lit³ref„´³tupleµ´³named³binding´³atom³ SignedInteger„„„„„³PAtom´³orµµ±Boolean´³lit³Boolean„„µ±Double´³lit³Double„„µ± SignedInteger´³lit³ SignedInteger„„µ±String´³lit³String„„µ±
ByteString´³lit³
ByteString„„µ±Symbol´³lit³Symbol„„„„³PBind´³rec´³lit³bind„´³tupleµ´³named³pattern´³refµ„³Pattern„„„„„³Caveat´³orµµ±Rewrite´³refµ„³Rewrite„„µ±Alts´³refµ„³Alts„„µ±Reject´³refµ„³Reject„„µ±unknown³any„„„³Reject´³rec´³lit³reject„´³tupleµ´³named³pattern´³refµ„³Pattern„„„„„³Pattern´³orµµ±PDiscard´³refµ„³PDiscard„„µ±PAtom´³refµ„³PAtom„„µ± PEmbedded´³refµ„³ PEmbedded„„µ±PBind´³refµ„³PBind„„µ±PAnd´³refµ„³PAnd„„µ±PNot´³refµ„³PNot„„µ±Lit´³refµ„³Lit„„µ± PCompound´³refµ„³ PCompound„„„„³Rewrite´³rec´³lit³rewrite„´³tupleµ´³named³pattern´³refµ„³Pattern„„´³named³template´³refµ„³Template„„„„„³WireRef´³orµµ±mine´³tupleµ´³lit°„´³named³oid´³refµ„³Oid„„„„„µ±yours´³ tuplePrefixµ´³lit°„´³named³oid´³refµ„³Oid„„„´³named³ attenuation´³seqof´³refµ„³Caveat„„„„„„„³PDiscard´³rec´³lit³_„´³tupleµ„„„³Template´³orµµ±
TAttenuate´³refµ„³
TAttenuate„„µ±TRef´³refµ„³TRef„„µ±Lit´³refµ„³Lit„„µ± TCompound´³refµ„³ TCompound„„„„³ PCompound´³orµµ±rec´³rec´³lit³rec„´³tupleµ´³named³label³any„´³named³fields´³seqof´³refµ„³Pattern„„„„„„„µ±arr´³rec´³lit³arr„´³tupleµ´³named³items´³seqof´³refµ„³Pattern„„„„„„„µ±dict´³rec´³lit³dict„´³tupleµ´³named³entries´³dictof³any´³refµ„³Pattern„„„„„„„„„³ PEmbedded´³lit³Embedded„³ SturdyRef´³rec´³lit³ref„´³tupleµ´³named³
parameters´³refµ„³
Parameters„„„„„³ TCompound´³orµµ±rec´³rec´³lit³rec„´³tupleµ´³named³label³any„´³named³fields´³seqof´³refµ„³Template„„„„„„„µ±arr´³rec´³lit³arr„´³tupleµ´³named³items´³seqof´³refµ„³Template„„„„„„„µ±dict´³rec´³lit³dict„´³tupleµ´³named³entries´³dictof³any´³refµ„³Template„„„„„„„„„³
Parameters´³andµ´³dict·³oid´³named³oid³any„³sig´³named³sig´³atom³
ByteString„„„„´³named³caveats´³refµ„³ CaveatsField„„„„³
TAttenuate´³rec´³lit³ attenuate„´³tupleµ´³named³template´³refµ„³Template„„´³named³ attenuation´³seqof´³refµ„³Caveat„„„„„„³ CaveatsField´³orµµ±present´³dict·³caveats´³named³caveats´³seqof´³refµ„³Caveat„„„„„„µ±invalid´³dict·³caveats´³named³caveats³any„„„„µ±absent´³dict·„„„„„³SturdyStepType´³lit³ref„³SturdyStepDetail´³refµ„³
Parameters„³SturdyPathStepDetail´³refµ„³
Parameters„³SturdyDescriptionDetail´³dict·³key´³named³key´³atom³
ByteString„„³oid´³named³oid³any„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³worker„´³schema·³version°³ definitions·³Instance´³rec´³lit³Instance„´³tupleµ´³named³name´³atom³String„„´³named³argument³any„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³service„´³schema·³version°³ definitions·³State´³orµµ±started´³lit³started„„µ±ready´³lit³ready„„µ±failed´³lit³failed„„µ±complete´³lit³complete„„µ± userDefined³any„„„³
RunService´³rec´³lit³ run-service„´³tupleµ´³named³ serviceName³any„„„„³ ServiceState´³rec´³lit³ service-state„´³tupleµ´³named³ serviceName³any„´³named³state´³refµ„³State„„„„„³ ServiceObject´³rec´³lit³service-object„´³tupleµ´³named³ serviceName³any„´³named³object³any„„„„³RequireService´³rec´³lit³require-service„´³tupleµ´³named³ serviceName³any„„„„³RestartService´³rec´³lit³restart-service„´³tupleµ´³named³ serviceName³any„„„„³ServiceDependency´³rec´³lit³
depends-on„´³tupleµ´³named³depender³any„´³named³dependee´³refµ„³ ServiceState„„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³protocol„´³schema·³version°³ definitions·³Nop´³lit€„³Oid´³atom³ SignedInteger„³Sync´³rec´³lit³S„´³tupleµ´³named³peer´³embedded´³lit<69>„„„„„„³Turn´³seqof´³refµ„³ TurnEvent„„³Error´³rec´³lit³error„´³tupleµ´³named³message´³atom³String„„´³named³detail³any„„„„³Event´³orµµ±Assert´³refµ„³Assert„„µ±Retract´³refµ„³Retract„„µ±Message´³refµ„³Message„„µ±Sync´³refµ„³Sync„„„„³Assert´³rec´³lit³A„´³tupleµ´³named³ assertion´³refµ„³ Assertion„„´³named³handle´³refµ„³Handle„„„„„³Handle´³atom³ SignedInteger„³Packet´³orµµ±Turn´³refµ„³Turn„„µ±Error´³refµ„³Error„„µ± Extension´³refµ„³ Extension„„µ±Nop´³refµ„³Nop„„„„³Message´³rec´³lit³M„´³tupleµ´³named³body´³refµ„³ Assertion„„„„„³Retract´³rec´³lit³R„´³tupleµ´³named³handle´³refµ„³Handle„„„„„³ Assertion³any³ Extension´³rec´³named³label³any„´³named³fields´³seqof³any„„„³ TurnEvent´³tupleµ´³named³oid´³refµ„³Oid„„´³named³event´³refµ„³Event„„„„„³ embeddedType€„„µ³ dataspace„´³schema·³version°³ definitions·³Observe´³rec´³lit³Observe„´³tupleµ´³named³pattern´³refµ³dataspacePatterns„³Pattern„„´³named³observer´³embedded³any„„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³
gatekeeper„´³schema·³version°³ definitions·³Bind´³rec´³lit³bind„´³tupleµ´³named³ description´³refµ„³ Description„„´³named³target´³embedded³any„„´³named³observer´³refµ„³ BindObserver„„„„„³Step´³rec´³named³stepType´³atom³Symbol„„´³tupleµ´³named³detail³any„„„„³Bound´³orµµ±bound´³rec´³lit³bound„´³tupleµ´³named³pathStep´³refµ„³PathStep„„„„„„µ±Rejected´³refµ„³Rejected„„„„³Route´³rec´³lit³route„´³ tuplePrefixµ´³named³
transports´³seqof³any„„„´³named³ pathSteps´³seqof´³refµ„³PathStep„„„„„³Resolve´³rec´³lit³resolve„´³tupleµ´³named³step´³refµ„³Step„„´³named³observer´³embedded´³refµ„³Resolved„„„„„„³PathStep´³rec´³named³stepType´³atom³Symbol„„´³tupleµ´³named³detail³any„„„„³Rejected´³rec´³lit³rejected„´³tupleµ´³named³detail³any„„„„³Resolved´³orµµ±accepted´³rec´³lit³accepted„´³tupleµ´³named³responderSession´³embedded³any„„„„„„µ±Rejected´³refµ„³Rejected„„„„³ Description´³rec´³named³stepType´³atom³Symbol„„´³tupleµ´³named³detail³any„„„„³ ResolvePath´³rec´³lit³ resolve-path„´³tupleµ´³named³route´³refµ„³Route„„„„„³ BindObserver´³orµµ±present´³embedded´³refµ„³Bound„„„µ±absent´³lit€„„„„³ ResolvedPath´³rec´³lit³ resolved-path„´³tupleµ´³named³addr³any„´³named³control´³embedded´³refµ„³TransportControl„„„´³named³responderSession´³embedded³any„„„„„³ForceDisconnect´³rec´³lit³force-disconnect„´³tupleµ„„„³ResolvePathStep´³rec´³lit³resolve-path-step„´³tupleµ´³named³origin´³embedded´³refµ„³Resolve„„„´³named³pathStep´³refµ„³PathStep„„„„„³ConnectTransport´³rec´³lit³connect-transport„´³tupleµ´³named³addr³any„„„„³ResolvedPathStep´³embedded³any„³TransportControl´³refµ„³ForceDisconnect„³ConnectedTransport´³rec´³lit³connected-transport„´³tupleµ´³named³addr³any„´³named³control´³embedded´³refµ„³TransportControl„„„´³named³responderSession´³embedded³any„„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³transportAddress„´³schema·³version°³ definitions·³Tcp´³rec´³lit³tcp„´³tupleµ´³named³host´³atom³String„„´³named³port´³atom³ SignedInteger„„„„„³Unix´³rec´³lit³unix„´³tupleµ´³named³path´³atom³String„„„„„³Stdio´³rec´³lit³stdio„´³tupleµ„„„³ WebSocket´³rec´³lit³ws„´³tupleµ´³named³url´³atom³String„„„„„„³ embeddedType€„„µ³dataspacePatterns„´³schema·³version°³ definitions·³AnyAtom´³orµµ±bool´³atom³Boolean„„µ±double´³atom³Double„„µ±int´³atom³ SignedInteger„„µ±string´³atom³String„„µ±bytes´³atom³
ByteString„„µ±symbol´³atom³Symbol„„µ±embedded´³embedded³any„„„„³Pattern´³orµµ±discard´³rec´³lit³_„´³tupleµ„„„„µ±bind´³rec´³lit³bind„´³tupleµ´³named³pattern´³refµ„³Pattern„„„„„„µ±lit´³rec´³lit³lit„´³tupleµ´³named³value´³refµ„³AnyAtom„„„„„„µ±group´³rec´³lit³group„´³tupleµ´³named³type´³refµ„³ GroupType„„´³named³entries´³dictof³any´³refµ„³Pattern„„„„„„„„„³ GroupType´³orµµ±rec´³rec´³lit³rec„´³tupleµ´³named³label³any„„„„„µ±arr´³rec´³lit³arr„´³tupleµ„„„„µ±dict´³rec´³lit³dict„´³tupleµ„„„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„„„

View File

@ -1,25 +0,0 @@
use std::sync::Arc;
use preserves::value::ArcValue;
use preserves::value::Domain;
mod schemas {
//! Auto-generated codecs for [Syndicate protocol
//! schemas](https://git.syndicate-lang.org/syndicate-lang/syndicate-protocols/src/branch/main/schemas).
include!(concat!(env!("OUT_DIR"), "/src/schemas/mod.rs"));
}
mod placeholder {
pub type Cap = super::Cap;
}
#[derive(Debug, Clone, Hash, Ord, Eq, PartialOrd, PartialEq)]
pub enum Cap {}
impl Domain for Cap {}
preserves_schema::define_language!(language(): Language<ArcValue<Arc<Cap>>> {
syndicate: schemas::Language,
});
mod pattern;
mod pattern_plugin;
pub use pattern_plugin::PatternPlugin;

View File

@ -1,35 +0,0 @@
use crate::schemas::dataspace_patterns::*;
use preserves::value::NestedValue;
use preserves::value::Value;
pub fn lift_literal<N: NestedValue>(v: &N) -> Pattern<N> {
match v.value() {
Value::Boolean(b) => Pattern::Lit { value: Box::new(AnyAtom::Bool(*b)) },
Value::Double(d) => Pattern::Lit { value: Box::new(AnyAtom::Double(*d)) },
Value::SignedInteger(i) => Pattern::Lit { value: Box::new(AnyAtom::Int(i.clone())) },
Value::String(s) => Pattern::Lit { value: Box::new(AnyAtom::String(s.clone())) },
Value::ByteString(bs) => Pattern::Lit { value: Box::new(AnyAtom::Bytes(bs.clone())) },
Value::Symbol(s) => Pattern::Lit { value: Box::new(AnyAtom::Symbol(s.clone())) },
Value::Record(r) => Pattern::Group {
type_: Box::new(GroupType::Rec { label: r.label().clone() }),
entries: r.fields().iter().enumerate()
.map(|(i, v)| (N::new(i), lift_literal(v)))
.collect(),
},
Value::Sequence(items) => Pattern::Group {
type_: Box::new(GroupType::Arr),
entries: items.iter().enumerate()
.map(|(i, v)| (N::new(i), lift_literal(v)))
.collect(),
},
Value::Set(_members) => panic!("Cannot express literal set in pattern"),
Value::Dictionary(entries) => Pattern::Group {
type_: Box::new(GroupType::Dict),
entries: entries.iter()
.map(|(k, v)| (k.clone(), lift_literal(v)))
.collect(),
},
Value::Embedded(e) => Pattern::Lit { value: Box::new(AnyAtom::Embedded(e.clone())) },
}
}

View File

@ -1,176 +0,0 @@
use preserves_schema::*;
use preserves_schema::compiler::*;
use preserves_schema::compiler::context::ModuleContext;
use preserves_schema::compiler::types::definition_type;
use preserves_schema::compiler::types::Purpose;
use preserves_schema::gen::schema::*;
use preserves_schema::syntax::block::escape_string;
use preserves_schema::syntax::block::constructors::*;
use preserves::value::IOValue;
use preserves::value::Map;
use preserves::value::NestedValue;
use std::iter::FromIterator;
use crate::pattern::lift_literal;
use crate::schemas::dataspace_patterns as P;
#[derive(Debug)]
pub struct PatternPlugin {
pub syndicate_crate: String,
}
impl PatternPlugin {
pub fn new() -> Self {
PatternPlugin {
syndicate_crate: "syndicate".to_string(),
}
}
}
type WalkState<'a, 'm, 'b> =
preserves_schema::compiler::cycles::WalkState<&'a ModuleContext<'m, 'b>>;
impl Plugin for PatternPlugin {
fn generate_definition(
&self,
ctxt: &mut ModuleContext,
definition_name: &str,
definition: &Definition,
) {
if ctxt.mode == context::ModuleContextMode::TargetGeneric {
let mut s = WalkState::new(ctxt, ctxt.module_path.clone());
if let Some(p) = definition.wc(&mut s) {
let ty = definition_type(&ctxt.module_path,
Purpose::Codegen,
definition_name,
definition);
let v = crate::language().unparse(&p);
let v = preserves_schema::support::preserves::value::TextWriter::encode(
&mut preserves_schema::support::preserves::value::NoEmbeddedDomainCodec,
&v).unwrap();
ctxt.define_type(item(seq![
"impl",
ty.generic_decl(ctxt),
" ",
names::render_constructor(definition_name),
ty.generic_arg(ctxt),
" ", codeblock![
seq!["#[allow(unused)] pub fn wildcard_dataspace_pattern() ",
seq!["-> ", self.syndicate_crate.clone(), "::schemas::dataspace_patterns::Pattern "],
codeblock![
seq!["use ", self.syndicate_crate.clone(), "::schemas::dataspace_patterns::*;"],
"use preserves_schema::Codec;",
seq!["let _v = ", self.syndicate_crate.clone(), "::value::text::from_str(",
escape_string(&v),
", ", self.syndicate_crate.clone(), "::value::ViaCodec::new(",
self.syndicate_crate.clone(), "::value::NoEmbeddedDomainCodec)).unwrap();"],
seq![self.syndicate_crate.clone(), "::language().parse(&_v).unwrap()"]]]]]));
}
}
}
}
fn discard() -> P::Pattern {
P::Pattern::Discard
}
trait WildcardPattern {
fn wc(&self, s: &mut WalkState) -> Option<P::Pattern>;
}
impl WildcardPattern for Definition {
fn wc(&self, s: &mut WalkState) -> Option<P::Pattern> {
match self {
Definition::Or { .. } => None,
Definition::And { .. } => None,
Definition::Pattern(p) => p.wc(s),
}
}
}
impl WildcardPattern for Pattern {
fn wc(&self, s: &mut WalkState) -> Option<P::Pattern> {
match self {
Pattern::CompoundPattern(p) => p.wc(s),
Pattern::SimplePattern(p) => p.wc(s),
}
}
}
fn from_io(v: &IOValue) -> Option<P::_Any> {
Some(v.value().copy_via(&mut |_| Err(())).ok()?.wrap())
}
impl WildcardPattern for CompoundPattern {
fn wc(&self, s: &mut WalkState) -> Option<P::Pattern> {
match self {
CompoundPattern::Tuple { patterns } |
CompoundPattern::TuplePrefix { fixed: patterns, .. }=>
Some(P::Pattern::Group {
type_: Box::new(P::GroupType::Arr),
entries: patterns.iter().enumerate()
.map(|(i, p)| Some((P::_Any::new(i), unname(p).wc(s)?)))
.collect::<Option<Map<P::_Any, P::Pattern>>>()?,
}),
CompoundPattern::Dict { entries } =>
Some(P::Pattern::Group {
type_: Box::new(P::GroupType::Dict),
entries: Map::from_iter(
entries.0.iter()
.map(|(k, p)| Some((from_io(k)?, unname_simple(p).wc(s)?)))
.filter(|e| discard() != e.as_ref().unwrap().1)
.collect::<Option<Vec<(P::_Any, P::Pattern)>>>()?
.into_iter()),
}),
CompoundPattern::Rec { label, fields } => match (unname(label), unname(fields)) {
(Pattern::SimplePattern(label), Pattern::CompoundPattern(fields)) =>
match (*label, *fields) {
(SimplePattern::Lit { value }, CompoundPattern::Tuple { patterns }) =>
Some(P::Pattern::Group{
type_: Box::new(P::GroupType::Rec { label: from_io(&value)? }),
entries: patterns.iter().enumerate()
.map(|(i, p)| Some((P::_Any::new(i), unname(p).wc(s)?)))
.collect::<Option<Map<P::_Any, P::Pattern>>>()?,
}),
_ => None,
},
_ => None,
},
}
}
}
impl WildcardPattern for SimplePattern {
fn wc(&self, s: &mut WalkState) -> Option<P::Pattern> {
match self {
SimplePattern::Any |
SimplePattern::Atom { .. } |
SimplePattern::Embedded { .. } |
SimplePattern::Seqof { .. } |
SimplePattern::Setof { .. } |
SimplePattern::Dictof { .. } => Some(discard()),
SimplePattern::Lit { value } => Some(lift_literal(&from_io(value)?)),
SimplePattern::Ref(r) => s.cycle_check(
r,
|ctxt, r| ctxt.bundle.lookup_definition(r).map(|v| v.0),
|s, d| d.and_then(|d| d.wc(s)).or_else(|| Some(discard())),
|| Some(discard())),
}
}
}
fn unname(np: &NamedPattern) -> Pattern {
match np {
NamedPattern::Anonymous(p) => (**p).clone(),
NamedPattern::Named(b) => Pattern::SimplePattern(Box::new(b.pattern.clone())),
}
}
fn unname_simple(np: &NamedSimplePattern) -> &SimplePattern {
match np {
NamedSimplePattern::Anonymous(p) => p,
NamedSimplePattern::Named(b) => &b.pattern,
}
}

View File

@ -1,6 +1,6 @@
[package] [package]
name = "syndicate-server" name = "syndicate-server"
version = "0.46.0" version = "0.30.0"
authors = ["Tony Garnock-Jones <tonyg@leastfixedpoint.com>"] authors = ["Tony Garnock-Jones <tonyg@leastfixedpoint.com>"]
edition = "2018" edition = "2018"
@ -9,20 +9,14 @@ homepage = "https://syndicate-lang.org/"
repository = "https://git.syndicate-lang.org/syndicate-lang/syndicate-rs" repository = "https://git.syndicate-lang.org/syndicate-lang/syndicate-rs"
license = "Apache-2.0" license = "Apache-2.0"
default-run = "syndicate-server"
[features]
jemalloc = ["dep:tikv-jemallocator"]
[build-dependencies] [build-dependencies]
preserves-schema = "5.995" preserves-schema = "3.2"
syndicate = { path = "../syndicate", version = "0.41.0"} syndicate = { path = "../syndicate", version = "0.27.0"}
syndicate-schema-plugin = { path = "../syndicate-schema-plugin", version = "0.10.0"}
[dependencies] [dependencies]
preserves-schema = "5.995" preserves-schema = "3.2"
syndicate = { path = "../syndicate", version = "0.41.0"} syndicate = { path = "../syndicate", version = "0.27.0"}
syndicate-macros = { path = "../syndicate-macros", version = "0.33.0"} syndicate-macros = { path = "../syndicate-macros", version = "0.22.0"}
chrono = "0.4" chrono = "0.4"
futures = "0.3" futures = "0.3"
@ -32,18 +26,15 @@ noise-rust-crypto = "0.5"
notify = "4.0" notify = "4.0"
structopt = "0.3" structopt = "0.3"
tikv-jemallocator = { version = "0.5.0", optional = true } tungstenite = "0.13"
tokio-tungstenite = "0.14"
tokio = { version = "1.10", features = ["io-std", "time", "process"] } tokio = { version = "1.10", features = ["io-std", "time", "process"] }
tokio-util = "0.6" tokio-util = "0.6"
tokio-stream = "0.1"
tracing = "0.1" tracing = "0.1"
tracing-subscriber = "0.2" tracing-subscriber = "0.2"
tracing-futures = "0.2" tracing-futures = "0.2"
hyper = { version = "0.14.27", features = ["server", "http1", "stream"] }
hyper-tungstenite = "0.11.1"
parking_lot = "0.12.1"
[package.metadata.workspaces] [package.metadata.workspaces]
independent = true independent = true

View File

@ -13,7 +13,7 @@ inotifytest:
binary: binary-release binary: binary-release
binary-release: binary-release:
cargo build --release --all-targets --features jemalloc cargo build --release --all-targets
binary-debug: binary-debug:
cargo build --all-targets cargo build --all-targets

View File

@ -1,13 +1,168 @@
use preserves_schema::compiler::*; use preserves_schema::compiler::*;
mod pattern_plugin {
use preserves_schema::*;
use preserves_schema::compiler::*;
use preserves_schema::compiler::context::ModuleContext;
use preserves_schema::gen::schema::*;
use preserves_schema::syntax::block::escape_string;
use preserves_schema::syntax::block::constructors::*;
use std::iter::FromIterator;
use syndicate::pattern::lift_literal;
use syndicate::schemas::dataspace_patterns as P;
use syndicate::value::IOValue;
use syndicate::value::Map;
use syndicate::value::NestedValue;
#[derive(Debug)]
pub struct PatternPlugin;
type WalkState<'a, 'm, 'b> =
preserves_schema::compiler::cycles::WalkState<&'a ModuleContext<'m, 'b>>;
impl Plugin for PatternPlugin {
fn generate_definition(
&self,
ctxt: &mut ModuleContext,
definition_name: &str,
definition: &Definition,
) {
if ctxt.mode == context::ModuleContextMode::TargetGeneric {
let mut s = WalkState::new(ctxt, ctxt.module_path.clone());
if let Some(p) = definition.wc(&mut s) {
let v = syndicate::language().unparse(&p);
let v = preserves_schema::support::preserves::value::TextWriter::encode(
&mut preserves_schema::support::preserves::value::NoEmbeddedDomainCodec,
&v).unwrap();
ctxt.define_type(item(seq![
"impl ", definition_name.to_owned(), " ", codeblock![
seq!["#[allow(unused)] pub fn wildcard_dataspace_pattern() ",
"-> syndicate::schemas::dataspace_patterns::Pattern ",
codeblock![
"use syndicate::schemas::dataspace_patterns::*;",
"use preserves_schema::Codec;",
seq!["let _v = syndicate::value::text::from_str(",
escape_string(&v),
", syndicate::value::ViaCodec::new(syndicate::value::NoEmbeddedDomainCodec)).unwrap();"],
"syndicate::language().parse(&_v).unwrap()"]]]]));
}
}
}
}
fn discard() -> P::Pattern {
P::Pattern::DDiscard(Box::new(P::DDiscard))
}
trait WildcardPattern {
fn wc(&self, s: &mut WalkState) -> Option<P::Pattern>;
}
impl WildcardPattern for Definition {
fn wc(&self, s: &mut WalkState) -> Option<P::Pattern> {
match self {
Definition::Or { .. } => None,
Definition::And { .. } => None,
Definition::Pattern(p) => p.wc(s),
}
}
}
impl WildcardPattern for Pattern {
fn wc(&self, s: &mut WalkState) -> Option<P::Pattern> {
match self {
Pattern::CompoundPattern(p) => p.wc(s),
Pattern::SimplePattern(p) => p.wc(s),
}
}
}
fn from_io(v: &IOValue) -> Option<P::_Any> {
Some(v.value().copy_via(&mut |_| Err(())).ok()?.wrap())
}
impl WildcardPattern for CompoundPattern {
fn wc(&self, s: &mut WalkState) -> Option<P::Pattern> {
match self {
CompoundPattern::Tuple { patterns } =>
Some(P::Pattern::DCompound(Box::new(P::DCompound::Arr {
items: patterns.iter()
.map(|p| unname(p).wc(s))
.collect::<Option<Vec<P::Pattern>>>()?,
}))),
CompoundPattern::TuplePrefix { .. } =>
Some(discard()),
CompoundPattern::Dict { entries } =>
Some(P::Pattern::DCompound(Box::new(P::DCompound::Dict {
entries: Map::from_iter(
entries.0.iter()
.map(|(k, p)| Some((from_io(k)?, unname_simple(p).wc(s)?)))
.filter(|e| discard() != e.as_ref().unwrap().1)
.collect::<Option<Vec<(P::_Any, P::Pattern)>>>()?
.into_iter()),
}))),
CompoundPattern::Rec { label, fields } => match (unname(label), unname(fields)) {
(Pattern::SimplePattern(label), Pattern::CompoundPattern(fields)) =>
match (*label, *fields) {
(SimplePattern::Lit { value }, CompoundPattern::Tuple { patterns }) =>
Some(P::Pattern::DCompound(Box::new(P::DCompound::Rec {
label: from_io(&value)?,
fields: patterns.iter()
.map(|p| unname(p).wc(s))
.collect::<Option<Vec<P::Pattern>>>()?,
}))),
_ => None,
},
_ => None,
},
}
}
}
impl WildcardPattern for SimplePattern {
fn wc(&self, s: &mut WalkState) -> Option<P::Pattern> {
match self {
SimplePattern::Any |
SimplePattern::Atom { .. } |
SimplePattern::Embedded { .. } |
SimplePattern::Seqof { .. } |
SimplePattern::Setof { .. } |
SimplePattern::Dictof { .. } => Some(discard()),
SimplePattern::Lit { value } => Some(lift_literal(&from_io(value)?)),
SimplePattern::Ref(r) => s.cycle_check(
r,
|ctxt, r| ctxt.bundle.lookup_definition(r).map(|v| v.0),
|s, d| d.and_then(|d| d.wc(s)).or_else(|| Some(discard())),
|| Some(discard())),
}
}
}
fn unname(np: &NamedPattern) -> Pattern {
match np {
NamedPattern::Anonymous(p) => (**p).clone(),
NamedPattern::Named(b) => Pattern::SimplePattern(Box::new(b.pattern.clone())),
}
}
fn unname_simple(np: &NamedSimplePattern) -> &SimplePattern {
match np {
NamedSimplePattern::Anonymous(p) => p,
NamedSimplePattern::Named(b) => &b.pattern,
}
}
}
fn main() -> std::io::Result<()> { fn main() -> std::io::Result<()> {
let buildroot = std::path::PathBuf::from(std::env::var_os("OUT_DIR").unwrap()); let buildroot = std::path::PathBuf::from(std::env::var_os("OUT_DIR").unwrap());
let mut gen_dir = buildroot.clone(); let mut gen_dir = buildroot.clone();
gen_dir.push("src/schemas"); gen_dir.push("src/schemas");
let mut c = CompilerConfig::new("crate::schemas".to_owned()); let mut c = CompilerConfig::new(gen_dir, "crate::schemas".to_owned());
c.plugins.push(Box::new(syndicate_schema_plugin::PatternPlugin::new())); c.plugins.push(Box::new(pattern_plugin::PatternPlugin));
c.add_external_module(ExternalModule::new(vec!["EntityRef".to_owned()], "syndicate::actor")); c.add_external_module(ExternalModule::new(vec!["EntityRef".to_owned()], "syndicate::actor"));
c.add_external_module( c.add_external_module(
ExternalModule::new(vec!["TransportAddress".to_owned()], ExternalModule::new(vec!["TransportAddress".to_owned()],
@ -28,5 +183,5 @@ fn main() -> std::io::Result<()> {
let inputs = expand_inputs(&vec!["protocols/schema-bundle.bin".to_owned()])?; let inputs = expand_inputs(&vec!["protocols/schema-bundle.bin".to_owned()])?;
c.load_schemas_and_bundles(&inputs, &vec![])?; c.load_schemas_and_bundles(&inputs, &vec![])?;
c.load_xref_bin("syndicate", syndicate::schemas::_bundle())?; c.load_xref_bin("syndicate", syndicate::schemas::_bundle())?;
compile(&c, &mut CodeCollector::files(gen_dir)) compile(&c)
} }

View File

@ -15,7 +15,7 @@ use core::time::Duration;
#[derive(Clone, Debug, StructOpt)] #[derive(Clone, Debug, StructOpt)]
pub struct Config { pub struct Config {
#[structopt(short = "d", default_value = "b4b303726566b7b3036f6964b10973796e646963617465b303736967b21069ca300c1dbfa08fba692102dd82311a8484")] #[structopt(short = "d", default_value = "b4b303726566b10973796e646963617465b584b21069ca300c1dbfa08fba692102dd82311a84")]
dataspace: String, dataspace: String,
} }
@ -49,7 +49,8 @@ async fn main() -> ActorResult {
})?; })?;
Ok(None) Ok(None)
}) });
Ok(())
}).await??; }).await??;
Ok(()) Ok(())
} }

View File

@ -26,7 +26,7 @@ mod dirty;
#[derive(Clone, Debug, StructOpt)] #[derive(Clone, Debug, StructOpt)]
pub struct Config { pub struct Config {
#[structopt(short = "d", default_value = "b4b303726566b7b3036f6964b10973796e646963617465b303736967b21069ca300c1dbfa08fba692102dd82311a8484")] #[structopt(short = "d", default_value = "b4b303726566b10973796e646963617465b584b21069ca300c1dbfa08fba692102dd82311a84")]
dataspace: String, dataspace: String,
} }
@ -58,10 +58,7 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut buf = [0; 131072]; let mut buf = [0; 131072];
let turn_size = { let turn_size = {
let n = stream.read(&mut buf)?; stream.read(&mut buf)?;
if n == 0 {
return Ok(());
}
let mut src = BytesBinarySource::new(&buf); let mut src = BytesBinarySource::new(&buf);
src.packed_iovalues().demand_next(false)?; src.packed_iovalues().demand_next(false)?;
src.index src.index

View File

@ -25,7 +25,7 @@ pub struct Config {
#[structopt(short = "b", default_value = "0")] #[structopt(short = "b", default_value = "0")]
bytes_padding: usize, bytes_padding: usize,
#[structopt(short = "d", default_value = "b4b303726566b7b3036f6964b10973796e646963617465b303736967b21069ca300c1dbfa08fba692102dd82311a8484")] #[structopt(short = "d", default_value = "b4b303726566b10973796e646963617465b584b21069ca300c1dbfa08fba692102dd82311a84")]
dataspace: String, dataspace: String,
} }

View File

@ -16,16 +16,15 @@ pub fn dirty_resolve(stream: &mut TcpStream, dataspace: &str) -> Result<(), Box<
let iolang = Language::<IOValue>::default(); let iolang = Language::<IOValue>::default();
let sturdyref = sturdy::SturdyRef::from_hex(dataspace)?; let sturdyref = sturdy::SturdyRef::from_hex(dataspace)?;
let sturdyref = iolang.parse::<gatekeeper::Step<IOValue>>( let sturdyref = iolang.parse(&syndicate::language().unparse(&sturdyref)
&syndicate::language().unparse(&sturdyref) .copy_via(&mut |_| Err("no!"))?)?;
.copy_via(&mut |_| Err("no!"))?)?;
let resolve_turn = P::Turn(vec![ let resolve_turn = P::Turn(vec![
P::TurnEvent { P::TurnEvent {
oid: P::Oid(0.into()), oid: P::Oid(0.into()),
event: P::Event::Assert(Box::new(P::Assert { event: P::Event::Assert(Box::new(P::Assert {
assertion: P::Assertion(iolang.unparse(&gatekeeper::Resolve::<IOValue> { assertion: P::Assertion(iolang.unparse(&gatekeeper::Resolve::<IOValue> {
step: sturdyref, sturdyref,
observer: iolang.unparse(&sturdy::WireRef::Mine { observer: iolang.unparse(&sturdy::WireRef::Mine {
oid: Box::new(sturdy::Oid(0.into())), oid: Box::new(sturdy::Oid(0.into())),
}), }),

View File

@ -43,7 +43,7 @@ pub struct Config {
#[structopt(subcommand)] #[structopt(subcommand)]
mode: PingPongMode, mode: PingPongMode,
#[structopt(short = "d", default_value = "b4b303726566b7b3036f6964b10973796e646963617465b303736967b21069ca300c1dbfa08fba692102dd82311a8484")] #[structopt(short = "d", default_value = "b4b303726566b10973796e646963617465b584b21069ca300c1dbfa08fba692102dd82311a84")]
dataspace: String, dataspace: String,
} }
@ -177,7 +177,7 @@ async fn main() -> ActorResult {
})?; })?;
if let PingPongMode::Ping(c) = &config.mode { if let PingPongMode::Ping(c) = &config.mode {
let facet = t.facet_ref(); let facet = t.facet.clone();
let turn_count = c.turn_count; let turn_count = c.turn_count;
let action_count = c.action_count; let action_count = c.action_count;
let account = Arc::clone(t.account()); let account = Arc::clone(t.account());
@ -199,7 +199,8 @@ async fn main() -> ActorResult {
} }
Ok(None) Ok(None)
}) });
Ok(())
}).await??; }).await??;
Ok(()) Ok(())
} }

View File

@ -16,7 +16,7 @@ pub struct Config {
#[structopt(short = "b", default_value = "0")] #[structopt(short = "b", default_value = "0")]
bytes_padding: usize, bytes_padding: usize,
#[structopt(short = "d", default_value = "b4b303726566b7b3036f6964b10973796e646963617465b303736967b21069ca300c1dbfa08fba692102dd82311a8484")] #[structopt(short = "d", default_value = "b4b303726566b10973796e646963617465b584b21069ca300c1dbfa08fba692102dd82311a84")]
dataspace: String, dataspace: String,
} }
@ -28,7 +28,7 @@ async fn main() -> ActorResult {
let (i, o) = TcpStream::connect("127.0.0.1:9001").await?.into_split(); let (i, o) = TcpStream::connect("127.0.0.1:9001").await?.into_split();
Actor::top(None, |t| { Actor::top(None, |t| {
relay::connect_stream(t, i, o, false, sturdyref, (), move |_state, t, ds| { relay::connect_stream(t, i, o, false, sturdyref, (), move |_state, t, ds| {
let facet = t.facet_ref(); let facet = t.facet.clone();
let padding = AnyValue::new(&vec![0u8; config.bytes_padding][..]); let padding = AnyValue::new(&vec![0u8; config.bytes_padding][..]);
let action_count = config.action_count; let action_count = config.action_count;
let account = Account::new(None, None); let account = Account::new(None, None);
@ -46,7 +46,8 @@ async fn main() -> ActorResult {
} }
}); });
Ok(None) Ok(None)
}) });
Ok(())
}).await??; }).await??;
Ok(()) Ok(())
} }

View File

@ -15,7 +15,7 @@ use core::time::Duration;
#[derive(Clone, Debug, StructOpt)] #[derive(Clone, Debug, StructOpt)]
pub struct Config { pub struct Config {
#[structopt(short = "d", default_value = "b4b303726566b7b3036f6964b10973796e646963617465b303736967b21069ca300c1dbfa08fba692102dd82311a8484")] #[structopt(short = "d", default_value = "b4b303726566b10973796e646963617465b584b21069ca300c1dbfa08fba692102dd82311a84")]
dataspace: String, dataspace: String,
} }
@ -70,7 +70,8 @@ async fn main() -> ActorResult {
})?; })?;
Ok(None) Ok(None)
}) });
Ok(())
}).await??; }).await??;
Ok(()) Ok(())
} }

View File

@ -10,7 +10,7 @@ use tokio::net::TcpStream;
#[derive(Clone, Debug, StructOpt)] #[derive(Clone, Debug, StructOpt)]
pub struct Config { pub struct Config {
#[structopt(short = "d", default_value = "b4b303726566b7b3036f6964b10973796e646963617465b303736967b21069ca300c1dbfa08fba692102dd82311a8484")] #[structopt(short = "d", default_value = "b4b303726566b10973796e646963617465b584b21069ca300c1dbfa08fba692102dd82311a84")]
dataspace: String, dataspace: String,
} }
@ -22,7 +22,7 @@ async fn main() -> ActorResult {
let (i, o) = TcpStream::connect("127.0.0.1:9001").await?.into_split(); let (i, o) = TcpStream::connect("127.0.0.1:9001").await?.into_split();
Actor::top(None, |t| { Actor::top(None, |t| {
relay::connect_stream(t, i, o, false, sturdyref, (), move |_state, t, ds| { relay::connect_stream(t, i, o, false, sturdyref, (), move |_state, t, ds| {
let facet = t.facet_ref(); let facet = t.facet.clone();
let account = Account::new(None, None); let account = Account::new(None, None);
t.linked_task(Some(AnyValue::symbol("sender")), async move { t.linked_task(Some(AnyValue::symbol("sender")), async move {
let presence = rec![AnyValue::symbol("Present"), AnyValue::new(std::process::id())]; let presence = rec![AnyValue::symbol("Present"), AnyValue::new(std::process::id())];
@ -42,7 +42,8 @@ async fn main() -> ActorResult {
} }
}); });
Ok(None) Ok(None)
}) });
Ok(())
}).await??; }).await??;
Ok(()) Ok(())
} }

View File

@ -1,17 +1,12 @@
´³bundle·µ³control„´³schema·³version°³ definitions·³ ´³bundle·µ³ documentation„´³schema·³version³ definitions·³Url´³orµµ±present´³dict·³url´³named³url´³atom³String„„„„„µ±invalid´³dict·³url´³named³url³any„„„„µ±absent´³dict·„„„„„³IOList´³orµµ±bytes´³atom³
ExitServer´³rec´³lit³exit„´³tupleµ´³named³code´³atom³ SignedInteger„„„„„„³ embeddedType€„„µ³ documentation„´³schema·³version°³ definitions·³Url´³orµµ±present´³dict·³url´³named³url´³atom³String„„„„„µ±invalid´³dict·³url´³named³url³any„„„„µ±absent´³dict·„„„„„³IOList´³orµµ±bytes´³atom³ ByteString„„µ±string´³atom³String„„µ±nested´³seqof´³refµ„³IOList„„„„„³Metadata´³rec´³lit³metadata„´³tupleµ´³named³object³any„´³named³info´³dictof´³atom³Symbol„³any„„„„„³ Description´³orµµ±present´³dict·³ description´³named³ description´³refµ„³IOList„„„„„µ±invalid´³dict·³ description´³named³ description³any„„„„µ±absent´³dict·„„„„„„³ embeddedType€„„µ³ gatekeeperMux„´³schema·³version³ definitions·³API´³orµµ±Resolve´³refµ³
ByteString„„µ±string´³atom³String„„µ±nested´³seqof´³refµ„³IOList„„„„„³Metadata´³rec´³lit³metadata„´³tupleµ´³named³object³any„´³named³info´³dictof´³atom³Symbol„³any„„„„„³ Description´³orµµ±present´³dict·³ description´³named³ description´³refµ„³IOList„„„„„µ±invalid´³dict·³ description´³named³ description³any„„„„µ±absent´³dict·„„„„„„³ embeddedType€„„µ³externalServices„´³schema·³version°³ definitions·³Process´³orµµ±simple´³refµ„³ CommandLine„„µ±full´³refµ„³ FullProcess„„„„³Service´³refµ„³ DaemonService„³ClearEnv´³orµµ±present´³dict·³clearEnv´³named³clearEnv´³atom³Boolean„„„„„µ±invalid´³dict·³clearEnv´³named³clearEnv³any„„„„µ±absent´³dict·„„„„„³EnvValue´³orµµ±set´³atom³String„„µ±remove´³lit€„„µ±invalid³any„„„³Protocol´³orµµ±none´³lit³none„„µ±binarySyndicate´³lit³application/syndicate„„µ± textSyndicate´³lit³text/syndicate„„„„³ gatekeeper„³Resolve„„µ±Connect´³refµ³noise„³Connect„„„„³ NoiseService´³rec´³lit³noise„´³tupleµ´³named³spec´³refµ³noise„³ NoiseSpec„„´³named³service´³embedded³any„„„„„³SecretKeyField´³orµµ±present´³dict·³ secretKey´³named³ secretKey´³atom³
ByteString„„„„„µ±invalid´³dict·³ secretKey´³named³ secretKey³any„„„„µ±absent´³dict·„„„„„³NoiseServiceSpec´³andµ´³named³base´³refµ³noise„³ NoiseSpec„„´³named³ secretKey´³refµ„³SecretKeyField„„„„„³ embeddedType€„„µ³externalServices„´³schema·³version³ definitions·³Process´³orµµ±simple´³refµ„³ CommandLine„„µ±full´³refµ„³ FullProcess„„„„³Service´³refµ„³ DaemonService„³ClearEnv´³orµµ±present´³dict·³clearEnv´³named³clearEnv´³atom³Boolean„„„„„µ±invalid´³dict·³clearEnv´³named³clearEnv³any„„„„µ±absent´³dict·„„„„„³EnvValue´³orµµ±set´³atom³String„„µ±remove´³lit€„„µ±invalid³any„„„³Protocol´³orµµ±none´³lit³none„„µ±binarySyndicate´³lit³application/syndicate„„µ± textSyndicate´³lit³text/syndicate„„„„³
ProcessDir´³orµµ±present´³dict·³dir´³named³dir´³atom³String„„„„„µ±invalid´³dict·³dir´³named³dir³any„„„„µ±absent´³dict·„„„„„³ ProcessDir´³orµµ±present´³dict·³dir´³named³dir´³atom³String„„„„„µ±invalid´³dict·³dir´³named³dir³any„„„„µ±absent´³dict·„„„„„³
ProcessEnv´³orµµ±present´³dict·³env´³named³env´³dictof´³refµ„³ EnvVariable„´³refµ„³EnvValue„„„„„„µ±invalid´³dict·³env´³named³env³any„„„„µ±absent´³dict·„„„„„³ CommandLine´³orµµ±shell´³atom³String„„µ±full´³refµ„³FullCommandLine„„„„³ EnvVariable´³orµµ±string´³atom³String„„µ±symbol´³atom³Symbol„„µ±invalid³any„„„³ FullProcess´³andµ´³dict·³argv´³named³argv´³refµ„³ CommandLine„„„„´³named³env´³refµ„³ ProcessEnv´³orµµ±present´³dict·³env´³named³env´³dictof´³refµ„³ EnvVariable„´³refµ„³EnvValue„„„„„„µ±invalid´³dict·³env´³named³env³any„„„„µ±absent´³dict·„„„„„³ CommandLine´³orµµ±shell´³atom³String„„µ±full´³refµ„³FullCommandLine„„„„³ EnvVariable´³orµµ±string´³atom³String„„µ±symbol´³atom³Symbol„„µ±invalid³any„„„³ FullProcess´³andµ´³dict·³argv´³named³argv´³refµ„³ CommandLine„„„„´³named³env´³refµ„³
ProcessEnv„„´³named³dir´³refµ„³ ProcessEnv„„´³named³dir´³refµ„³
ProcessDir„„´³named³clearEnv´³refµ„³ClearEnv„„„„³ ReadyOnStart´³orµµ±present´³dict·³ readyOnStart´³named³ readyOnStart´³atom³Boolean„„„„„µ±invalid´³dict·³ readyOnStart´³named³ readyOnStart³any„„„„µ±absent´³dict·„„„„„³ RestartField´³orµµ±present´³dict·³restart´³named³restart´³refµ„³ RestartPolicy„„„„„µ±invalid´³dict·³restart´³named³restart³any„„„„µ±absent´³dict·„„„„„³ DaemonProcess´³rec´³lit³daemon„´³tupleµ´³named³id³any„´³named³config´³refµ„³DaemonProcessSpec„„„„„³ DaemonService´³rec´³lit³daemon„´³tupleµ´³named³id³any„„„„³ ProtocolField´³orµµ±present´³dict·³protocol´³named³protocol´³refµ„³Protocol„„„„„µ±invalid´³dict·³protocol´³named³protocol³any„„„„µ±absent´³dict·„„„„„³ RestartPolicy´³orµµ±always´³lit³always„„µ±onError´³lit³on-error„„µ±all´³lit³all„„µ±never´³lit³never„„„„³FullCommandLine´³ tuplePrefixµ´³named³program´³atom³String„„„´³named³args´³seqof´³atom³String„„„„³DaemonProcessSpec´³orµµ±simple´³refµ„³ CommandLine„„µ±oneShot´³rec´³lit³one-shot„´³tupleµ´³named³setup´³refµ„³ CommandLine„„„„„„µ±full´³refµ„³FullDaemonProcess„„„„³FullDaemonProcess´³andµ´³named³process´³refµ„³ FullProcess„„´³named³ readyOnStart´³refµ„³ ReadyOnStart„„´³named³restart´³refµ„³ RestartField„„´³named³protocol´³refµ„³ ProtocolField„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³internalServices„´³schema·³version°³ definitions·³ ConfigEnv´³dictof´³atom³Symbol„³any„³ ProcessDir„„´³named³clearEnv´³refµ„³ClearEnv„„„„³ ReadyOnStart´³orµµ±present´³dict·³ readyOnStart´³named³ readyOnStart´³atom³Boolean„„„„„µ±invalid´³dict·³ readyOnStart´³named³ readyOnStart³any„„„„µ±absent´³dict·„„„„„³ RestartField´³orµµ±present´³dict·³restart´³named³restart´³refµ„³ RestartPolicy„„„„„µ±invalid´³dict·³restart´³named³restart³any„„„„µ±absent´³dict·„„„„„³ DaemonProcess´³rec´³lit³daemon„´³tupleµ´³named³id³any„´³named³config´³refµ„³DaemonProcessSpec„„„„„³ DaemonService´³rec´³lit³daemon„´³tupleµ´³named³id³any„„„„³ ProtocolField´³orµµ±present´³dict·³protocol´³named³protocol´³refµ„³Protocol„„„„„µ±invalid´³dict·³protocol´³named³protocol³any„„„„µ±absent´³dict·„„„„„³ RestartPolicy´³orµµ±always´³lit³always„„µ±onError´³lit³on-error„„µ±all´³lit³all„„µ±never´³lit³never„„„„³FullCommandLine´³ tuplePrefixµ´³named³program´³atom³String„„„´³named³args´³seqof´³atom³String„„„„³DaemonProcessSpec´³orµµ±simple´³refµ„³ CommandLine„„µ±oneShot´³rec´³lit³one-shot„´³tupleµ´³named³setup´³refµ„³ CommandLine„„„„„„µ±full´³refµ„³FullDaemonProcess„„„„³FullDaemonProcess´³andµ´³named³process´³refµ„³ FullProcess„„´³named³ readyOnStart´³refµ„³ ReadyOnStart„„´³named³restart´³refµ„³ RestartField„„´³named³protocol´³refµ„³ ProtocolField„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³internalServices„´³schema·³version³ definitions·³ ConfigEnv´³dictof´³atom³Symbol„³any„³ DebtReporter´³rec´³lit³ debt-reporter„´³tupleµ´³named³intervalSeconds´³atom³Double„„„„„³ ConfigWatcher´³rec´³lit³config-watcher„´³tupleµ´³named³path´³atom³String„„´³named³env´³refµ„³ ConfigEnv„„„„„³TcpRelayListener´³rec´³lit³relay-listener„´³tupleµ´³named³addr´³refµ³TransportAddress„³Tcp„„´³named³
Gatekeeper´³rec´³lit³
gatekeeper„´³tupleµ´³named³ bindspace´³embedded´³refµ³
gatekeeper„³Bind„„„„„„³
HttpRouter´³rec´³lit³ http-router„´³tupleµ´³named³httpd´³embedded³any„„„„„³ TcpWithHttp´³rec´³lit³relay-listener„´³tupleµ´³named³addr´³refµ³TransportAddress„³Tcp„„´³named³
gatekeeper´³embedded´³refµ³ gatekeeper´³embedded´³refµ³
gatekeeper„³Resolve„„„´³named³httpd´³embedded´³refµ³http„³ HttpContext„„„„„„³ DebtReporter´³rec´³lit³ debt-reporter„´³tupleµ´³named³intervalSeconds´³atom³Double„„„„„³ ConfigWatcher´³rec´³lit³config-watcher„´³tupleµ´³named³path´³atom³String„„´³named³env´³refµ„³ ConfigEnv„„„„„³TcpWithoutHttp´³rec´³lit³relay-listener„´³tupleµ´³named³addr´³refµ³TransportAddress„³Tcp„„´³named³ gatekeeper„³Resolve„„„„„„³UnixRelayListener´³rec´³lit³relay-listener„´³tupleµ´³named³addr´³refµ³TransportAddress„³Unix„„´³named³
gatekeeper´³embedded´³refµ³ gatekeeper´³embedded´³refµ³
gatekeeper„³Resolve„„„„„„³TcpRelayListener´³orµµ±TcpWithoutHttp´³refµ„³TcpWithoutHttp„„µ± TcpWithHttp´³refµ„³ TcpWithHttp„„„„³UnixRelayListener´³rec´³lit³relay-listener„´³tupleµ´³named³addr´³refµ³TransportAddress„³Unix„„´³named³ gatekeeper„³Resolve„„„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„„„
gatekeeper´³embedded´³refµ³
gatekeeper„³Resolve„„„„„„³HttpStaticFileServer´³rec´³lit³http-static-files„´³tupleµ´³named³dir´³atom³String„„´³named³pathPrefixElements´³atom³ SignedInteger„„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„„„

View File

@ -1,12 +0,0 @@
version 1 .
# Messages and assertions relating to the `$control` entity enabled in syndicate-server when
# the `--control` flag is supplied.
#
# For example, placing the following into `control-config.pr` and starting the server with
# `syndicate-server --control -c control-config.pr` will result in the server exiting with
# exit code 2:
#
# $control ! <exit 2>
ExitServer = <exit @code int> .

View File

@ -1,11 +1,11 @@
version 1 . version 1 .
# Assertion. Describes `object`. ; Assertion. Describes `object`.
Metadata = <metadata @object any @info { symbol: any ...:... }> . Metadata = <metadata @object any @info { symbol: any ...:... }> .
# Projections of the `info` in a `Metadata` record. ; Projections of the `info` in a `Metadata` record.
Description = @present { description: IOList } / @invalid { description: any } / @absent {} . Description = @present { description: IOList } / @invalid { description: any } / @absent {} .
Url = @present { url: string } / @invalid { url: any } / @absent {} . Url = @present { url: string } / @invalid { url: any } / @absent {} .
# Data type. From preserves' `conventions.md`. ; Data type. From preserves' `conventions.md`.
IOList = @bytes bytes / @string string / @nested [IOList ...] . IOList = @bytes bytes / @string string / @nested [IOList ...] .

View File

@ -30,26 +30,26 @@ EnvVariable = @string string / @symbol symbol / @invalid any .
EnvValue = @set string / @remove #f / @invalid any . EnvValue = @set string / @remove #f / @invalid any .
RestartPolicy = RestartPolicy =
/ # Whether the process terminates normally or abnormally, restart it / ; Whether the process terminates normally or abnormally, restart it
# without affecting any peer processes within the service. ; without affecting any peer processes within the service.
=always =always
/ # If the process terminates normally, leave everything alone; if it / ; If the process terminates normally, leave everything alone; if it
# terminates abnormally, restart it without affecting peers. ; terminates abnormally, restart it without affecting peers.
@onError =on-error @onError =on-error
/ # If the process terminates normally, leave everything alone; if it / ; If the process terminates normally, leave everything alone; if it
# terminates abnormally, restart the whole daemon (all processes ; terminates abnormally, restart the whole daemon (all processes
# within the daemon). ; within the daemon).
=all =all
/ # Treat both normal and abnormal termination as normal termination; that is, never restart, / ; Treat both normal and abnormal termination as normal termination; that is, never restart,
# and enter state "complete" even if the process fails. ; and enter state "complete" even if the process fails.
=never =never
. .
Protocol = Protocol =
/ # stdin is /dev/null, output and error are logged / ; stdin is /dev/null, output and error are logged
=none =none
/ # stdin and stdout are *binary* Syndicate-protocol channels / ; stdin and stdout are *binary* Syndicate-protocol channels
@binarySyndicate =application/syndicate @binarySyndicate =application/syndicate
/ # stdin and stdout are *text* Syndicate-protocol channels / ; stdin and stdout are *text* Syndicate-protocol channels
@textSyndicate =text/syndicate @textSyndicate =text/syndicate
. .

View File

@ -0,0 +1,7 @@
version 1 .
API = gatekeeper.Resolve / noise.Connect .
NoiseService = <noise @spec noise.NoiseSpec @service #!any> .
NoiseServiceSpec = @base noise.NoiseSpec & @secretKey SecretKeyField .
SecretKeyField = @present { secretKey: bytes } / @invalid { secretKey: any } / @absent {} .

View File

@ -1,18 +1,10 @@
version 1 . version 1 .
embeddedType EntityRef.Cap . embeddedType EntityRef.Cap .
Gatekeeper = <gatekeeper @bindspace #:gatekeeper.Bind> .
DebtReporter = <debt-reporter @intervalSeconds double>. DebtReporter = <debt-reporter @intervalSeconds double>.
TcpRelayListener = TcpWithoutHttp / TcpWithHttp . TcpRelayListener = <relay-listener @addr TransportAddress.Tcp @gatekeeper #!gatekeeper.Resolve> .
TcpWithoutHttp = <relay-listener @addr TransportAddress.Tcp @gatekeeper #:gatekeeper.Resolve> . UnixRelayListener = <relay-listener @addr TransportAddress.Unix @gatekeeper #!gatekeeper.Resolve> .
TcpWithHttp = <relay-listener @addr TransportAddress.Tcp @gatekeeper #:gatekeeper.Resolve @httpd #:http.HttpContext> .
UnixRelayListener = <relay-listener @addr TransportAddress.Unix @gatekeeper #:gatekeeper.Resolve> .
ConfigWatcher = <config-watcher @path string @env ConfigEnv>. ConfigWatcher = <config-watcher @path string @env ConfigEnv>.
ConfigEnv = { symbol: any ...:... }. ConfigEnv = { symbol: any ...:... }.
HttpRouter = <http-router @httpd #:any> .
HttpStaticFileServer = <http-static-files @dir string @pathPrefixElements int> .

View File

@ -0,0 +1,352 @@
use noise_protocol::CipherState;
use noise_protocol::U8Array;
use noise_protocol::patterns::HandshakePattern;
use noise_rust_crypto::Blake2s;
use noise_rust_crypto::ChaCha20Poly1305;
use noise_rust_crypto::X25519;
use preserves_schema::Codec;
use syndicate::relay::Mutex;
use syndicate::relay::TunnelRelay;
use syndicate::trace::TurnCause;
use syndicate::value::NoEmbeddedDomainCodec;
use syndicate::value::packed::PackedWriter;
use std::convert::TryInto;
use std::sync::Arc;
use syndicate::actor::*;
use syndicate::during::DuringResult;
use syndicate::value::NestedValue;
use syndicate::schemas::dataspace;
use syndicate::schemas::gatekeeper;
use syndicate::schemas::noise;
use crate::language::language;
use crate::schemas::gatekeeper_mux::Api;
use crate::schemas::gatekeeper_mux::NoiseServiceSpec;
use crate::schemas::gatekeeper_mux::SecretKeyField;
// pub fn bind(
// t: &mut Activation,
// ds: &Arc<Cap>,
// oid: syndicate::schemas::sturdy::_Any,
// key: [u8; 16],
// target: Arc<Cap>,
// ) {
// let sr = sturdy::SturdyRef::mint(oid.clone(), &key);
// tracing::info!(cap = ?language().unparse(&sr), hex = %sr.to_hex());
// ds.assert(t, language(), &gatekeeper::Bind { oid, key: key.to_vec(), target });
// }
pub fn handle_assertion(
ds: &mut Arc<Cap>,
t: &mut Activation,
a: Api<AnyValue>,
) -> DuringResult<Arc<Cap>> {
match a {
Api::Resolve(resolve_box) => handle_resolve(ds, t, *resolve_box),
Api::Connect(connect_box) => handle_connect(ds, t, *connect_box),
}
}
fn handle_resolve(
ds: &mut Arc<Cap>,
t: &mut Activation,
a: gatekeeper::Resolve,
) -> DuringResult<Arc<Cap>> {
let gatekeeper::Resolve { sturdyref, observer } = a;
let queried_oid = sturdyref.oid.clone();
let handler = syndicate::entity(observer)
.on_asserted(move |observer, t, a: AnyValue| {
let bindings = a.value().to_sequence()?;
let key = bindings[0].value().to_bytestring()?;
let unattenuated_target = bindings[1].value().to_embedded()?;
match sturdyref.validate_and_attenuate(key, unattenuated_target) {
Err(e) => {
tracing::warn!(sturdyref = ?language().unparse(&sturdyref),
"sturdyref failed validation: {}", e);
Ok(None)
},
Ok(target) => {
tracing::trace!(sturdyref = ?language().unparse(&sturdyref),
?target,
"sturdyref resolved");
if let Some(h) = observer.assert(t, &(), &AnyValue::domain(target)) {
Ok(Some(Box::new(move |_observer, t| Ok(t.retract(h)))))
} else {
Ok(None)
}
}
}
})
.create_cap(t);
if let Some(oh) = ds.assert(t, language(), &dataspace::Observe {
// TODO: codegen plugin to generate pattern constructors
pattern: syndicate_macros::pattern!{<bind #(&queried_oid) $ $>},
observer: handler,
}) {
Ok(Some(Box::new(move |_ds, t| Ok(t.retract(oh)))))
} else {
Ok(None)
}
}
fn handle_connect(
ds: &mut Arc<Cap>,
t: &mut Activation,
a: noise::Connect<AnyValue>,
) -> DuringResult<Arc<Cap>> {
let noise::Connect { service_selector, initiator_session } = a;
let handler = syndicate::entity(())
.on_asserted_facet(move |_state, t, a: AnyValue| {
let initiator_session = Arc::clone(&initiator_session);
t.spawn_link(None, move |t| {
let bindings = a.value().to_sequence()?;
let spec: NoiseServiceSpec<AnyValue> = language().parse(&bindings[0])?;
let protocol = match spec.base.protocol {
noise::NoiseProtocol::Present { protocol } =>
protocol,
noise::NoiseProtocol::Invalid { protocol } =>
Err(format!("Invalid noise protocol {:?}", protocol))?,
noise::NoiseProtocol::Absent =>
language().unparse(&noise::DefaultProtocol).value().to_string()?.clone(),
};
let psks = match spec.base.pre_shared_keys {
noise::NoisePreSharedKeys::Present { pre_shared_keys } =>
pre_shared_keys,
noise::NoisePreSharedKeys::Invalid { pre_shared_keys } =>
Err(format!("Invalid pre-shared-keys {:?}", pre_shared_keys))?,
noise::NoisePreSharedKeys::Absent =>
vec![],
};
let secret_key = match spec.secret_key {
SecretKeyField::Present { secret_key } =>
Some(secret_key),
SecretKeyField::Invalid { secret_key } =>
Err(format!("Invalid secret key {:?}", secret_key))?,
SecretKeyField::Absent =>
None,
};
let service = bindings[1].value().to_embedded()?;
run_noise_responder(t,
spec.base.service,
protocol,
psks,
secret_key,
initiator_session,
Arc::clone(service))
});
Ok(())
})
.create_cap(t);
if let Some(oh) = ds.assert(t, language(), &dataspace::Observe {
// TODO: codegen plugin to generate pattern constructors
pattern: syndicate_macros::pattern!{
<noise $spec:NoiseServiceSpec{ { service: #(&service_selector) } } $service >
},
observer: handler,
}) {
Ok(Some(Box::new(move |_ds, t| Ok(t.retract(oh)))))
} else {
Ok(None)
}
}
struct ResponderDetails {
initiator_session: Arc<Cap>,
service: Arc<Cap>,
}
struct ResponderTransport {
relay_input: Arc<Mutex<Option<TunnelRelay>>>,
c_recv: CipherState<ChaCha20Poly1305>
}
enum ResponderState {
Handshake(ResponderDetails, noise_protocol::HandshakeState<X25519, ChaCha20Poly1305, Blake2s>),
Transport(ResponderTransport),
}
impl Entity<noise::Packet> for ResponderState {
fn message(&mut self, t: &mut Activation, p: noise::Packet) -> ActorResult {
match self {
ResponderState::Handshake(details, hs) => match p {
noise::Packet::Complete(bs) => {
if bs.len() < hs.get_next_message_overhead() {
Err("Invalid handshake message for pattern")?;
}
if bs.len() > hs.get_next_message_overhead() {
Err("Cannot accept payload during handshake")?;
}
hs.read_message(&bs, &mut [])?;
let mut reply = vec![0u8; hs.get_next_message_overhead()];
hs.write_message(&[], &mut reply[..])?;
details.initiator_session.message(t, language(), &noise::Packet::Complete(reply.into()));
if hs.completed() {
let (c_recv, mut c_send) = hs.get_ciphers();
let (_, relay_input, mut relay_output) =
TunnelRelay::_run(t, Some(Arc::clone(&details.service)), None, false);
let trace_collector = t.trace_collector();
let transport = ResponderTransport { relay_input, c_recv };
let initiator_session = Arc::clone(&details.initiator_session);
let relay_output_name = Some(AnyValue::symbol("relay_output"));
let transport_facet = t.facet.clone();
t.linked_task(relay_output_name.clone(), async move {
let account = Account::new(relay_output_name, trace_collector);
let cause = TurnCause::external("relay_output");
loop {
match relay_output.recv().await {
None => return Ok(LinkedTaskTermination::KeepFacet),
Some(loaned_item) => {
const MAXSIZE: usize = 65535 - 16; /* Noise tag length is 16 */
let p = if loaned_item.item.len() > MAXSIZE {
noise::Packet::Fragmented(
loaned_item.item
.chunks(MAXSIZE)
.map(|c| c_send.encrypt_vec(c))
.collect())
} else {
noise::Packet::Complete(c_send.encrypt_vec(&loaned_item.item))
};
if !transport_facet.activate(&account, Some(cause.clone()), |t| {
initiator_session.message(t, language(), &p);
Ok(())
}) {
break;
}
}
}
}
Ok(LinkedTaskTermination::Normal)
});
*self = ResponderState::Transport(transport);
}
}
_ => Err("Fragmented handshake is not allowed")?,
},
ResponderState::Transport(transport) => {
let bs = match p {
noise::Packet::Complete(bs) =>
transport.c_recv.decrypt_vec(&bs[..]).map_err(|_| "Cannot decrypt packet")?,
noise::Packet::Fragmented(pieces) => {
let mut result = Vec::with_capacity(1024);
for piece in pieces {
result.extend(transport.c_recv.decrypt_vec(&piece[..])
.map_err(|_| "Cannot decrypt packet fragment")?);
}
result
}
};
let mut g = transport.relay_input.lock();
let tr = g.as_mut().expect("initialized");
tr.handle_inbound_datagram(t, &bs[..])?;
}
}
Ok(())
}
}
fn lookup_pattern(name: &str) -> Option<HandshakePattern> {
use noise_protocol::patterns::*;
Some(match name {
"N" => noise_n(),
"K" => noise_k(),
"X" => noise_x(),
"NN" => noise_nn(),
"NK" => noise_nk(),
"NX" => noise_nx(),
"XN" => noise_xn(),
"XK" => noise_xk(),
"XX" => noise_xx(),
"KN" => noise_kn(),
"KK" => noise_kk(),
"KX" => noise_kx(),
"IN" => noise_in(),
"IK" => noise_ik(),
"IX" => noise_ix(),
"Npsk0" => noise_n_psk0(),
"Kpsk0" => noise_k_psk0(),
"Xpsk1" => noise_x_psk1(),
"NNpsk0" => noise_nn_psk0(),
"NNpsk2" => noise_nn_psk2(),
"NKpsk0" => noise_nk_psk0(),
"NKpsk2" => noise_nk_psk2(),
"NXpsk2" => noise_nx_psk2(),
"XNpsk3" => noise_xn_psk3(),
"XKpsk3" => noise_xk_psk3(),
"XXpsk3" => noise_xx_psk3(),
"KNpsk0" => noise_kn_psk0(),
"KNpsk2" => noise_kn_psk2(),
"KKpsk0" => noise_kk_psk0(),
"KKpsk2" => noise_kk_psk2(),
"KXpsk2" => noise_kx_psk2(),
"INpsk1" => noise_in_psk1(),
"INpsk2" => noise_in_psk2(),
"IKpsk1" => noise_ik_psk1(),
"IKpsk2" => noise_ik_psk2(),
"IXpsk2" => noise_ix_psk2(),
"NNpsk0+psk2" => noise_nn_psk0_psk2(),
"NXpsk0+psk1+psk2" => noise_nx_psk0_psk1_psk2(),
"XNpsk1+psk3" => noise_xn_psk1_psk3(),
"XKpsk0+psk3" => noise_xk_psk0_psk3(),
"KNpsk1+psk2" => noise_kn_psk1_psk2(),
"KKpsk0+psk2" => noise_kk_psk0_psk2(),
"INpsk1+psk2" => noise_in_psk1_psk2(),
"IKpsk0+psk2" => noise_ik_psk0_psk2(),
"IXpsk0+psk2" => noise_ix_psk0_psk2(),
"XXpsk0+psk1" => noise_xx_psk0_psk1(),
"XXpsk0+psk2" => noise_xx_psk0_psk2(),
"XXpsk0+psk3" => noise_xx_psk0_psk3(),
"XXpsk0+psk1+psk2+psk3" => noise_xx_psk0_psk1_psk2_psk3(),
_ => return None,
})
}
fn run_noise_responder(
t: &mut Activation,
service_selector: AnyValue,
protocol: String,
psks: Vec<Vec<u8>>,
secret_key: Option<Vec<u8>>,
initiator_session: Arc<Cap>,
service: Arc<Cap>,
) -> ActorResult {
const PREFIX: &'static str = "Noise_";
const SUFFIX: &'static str = "_25519_ChaChaPoly_BLAKE2s";
if !protocol.starts_with(PREFIX) || !protocol.ends_with(SUFFIX) {
Err(format!("Unsupported protocol {:?}", protocol))?;
}
let pattern_name = &protocol[PREFIX.len()..(protocol.len()-SUFFIX.len())];
let pattern = lookup_pattern(pattern_name).ok_or_else::<ActorError, _>(
|| format!("Unsupported handshake pattern {:?}", pattern_name).into())?;
let hs = {
let mut builder = noise_protocol::HandshakeStateBuilder::new();
builder.set_pattern(pattern);
builder.set_is_initiator(false);
let prologue = PackedWriter::encode(&mut NoEmbeddedDomainCodec, &service_selector)?;
builder.set_prologue(&prologue);
match secret_key {
None => (),
Some(sk) => {
let sk: [u8; 32] = sk.try_into().map_err(|_| "Bad secret key length")?;
builder.set_s(U8Array::from_slice(&sk));
},
}
let mut hs = builder.build_handshake_state();
for psk in psks.into_iter() {
hs.push_psk(&psk);
}
hs
};
let details = ResponderDetails {
initiator_session: initiator_session.clone(),
service,
};
let responder_session =
Cap::guard(crate::Language::arc(), t.create(ResponderState::Handshake(details, hs)));
initiator_session.assert(t, language(), &noise::Accept { responder_session });
Ok(())
}

View File

@ -1,195 +0,0 @@
use std::convert::TryInto;
use std::sync::Arc;
use std::sync::atomic::AtomicU64;
use std::sync::atomic::Ordering;
use hyper::{Request, Response, Body, StatusCode};
use hyper::body;
use hyper::header::HeaderName;
use hyper::header::HeaderValue;
use syndicate::actor::*;
use syndicate::error::Error;
use syndicate::trace;
use syndicate::value::Map;
use syndicate::value::NestedValue;
use syndicate::schemas::http;
use tokio::sync::oneshot;
use tokio::sync::mpsc::{UnboundedSender, unbounded_channel};
use tokio_stream::wrappers::UnboundedReceiverStream;
use crate::language;
static NEXT_SEQ: AtomicU64 = AtomicU64::new(0);
pub fn empty_response(code: StatusCode) -> Response<Body> {
let mut r = Response::new(Body::empty());
*r.status_mut() = code;
r
}
type ChunkItem = Result<body::Bytes, Box<dyn std::error::Error + Send + Sync>>;
struct ResponseCollector {
tx_res: Option<(oneshot::Sender<Response<Body>>, Response<Body>)>,
body_tx: Option<UnboundedSender<ChunkItem>>,
}
impl ResponseCollector {
fn new(tx: oneshot::Sender<Response<Body>>) -> Self {
let (body_tx, body_rx) = unbounded_channel();
let body_stream: Box<dyn futures::Stream<Item = ChunkItem> + Send> =
Box::new(UnboundedReceiverStream::new(body_rx));
let mut res = Response::new(body_stream.into());
*res.status_mut() = StatusCode::OK;
ResponseCollector {
tx_res: Some((tx, res)),
body_tx: Some(body_tx),
}
}
fn with_res<F: FnOnce(&mut Response<Body>) -> ActorResult>(&mut self, f: F) -> ActorResult {
if let Some((_, res)) = &mut self.tx_res {
f(res)?;
}
Ok(())
}
fn deliver_res(&mut self) {
if let Some((tx, res)) = std::mem::replace(&mut self.tx_res, None) {
let _ = tx.send(res);
}
}
fn add_chunk(&mut self, value: http::Chunk) -> ActorResult {
self.deliver_res();
if let Some(body_tx) = self.body_tx.as_mut() {
body_tx.send(Ok(match value {
http::Chunk::Bytes(bs) => bs.into(),
http::Chunk::String(s) => s.as_bytes().to_vec().into(),
}))?;
}
Ok(())
}
fn finish(&mut self, t: &mut Activation) -> ActorResult {
self.deliver_res();
self.body_tx = None;
t.stop();
Ok(())
}
}
impl Entity<http::HttpResponse> for ResponseCollector {
fn message(&mut self, t: &mut Activation, message: http::HttpResponse) -> ActorResult {
match message {
http::HttpResponse::Status { code, .. } => self.with_res(|r| {
*r.status_mut() = StatusCode::from_u16(
(&code).try_into().map_err(|_| "bad status code")?)?;
Ok(())
}),
http::HttpResponse::Header { name, value } => self.with_res(|r| {
r.headers_mut().insert(HeaderName::from_bytes(name.as_bytes())?,
HeaderValue::from_str(value.as_str())?);
Ok(())
}),
http::HttpResponse::Chunk { chunk } => {
self.add_chunk(*chunk)
}
http::HttpResponse::Done { chunk } => {
self.add_chunk(*chunk)?;
self.finish(t)
}
}
}
}
pub async fn serve(
trace_collector: Option<trace::TraceCollector>,
facet: FacetRef,
httpd: Arc<Cap>,
mut req: Request<Body>,
port: u16,
) -> Result<Response<Body>, Error> {
let host = match req.headers().get("host").and_then(|v| v.to_str().ok()) {
None => http::RequestHost::Absent,
Some(h) => http::RequestHost::Present(match h.rsplit_once(':') {
None => h.to_string(),
Some((h, _port)) => h.to_string(),
})
};
let uri = req.uri();
let mut path: Vec<String> = uri.path().split('/').map(|s| s.to_string()).collect();
path.remove(0);
let mut query: Map<String, Vec<http::QueryValue>> = Map::new();
for piece in uri.query().unwrap_or("").split('&').into_iter() {
match piece.split_once('=') {
Some((k, v)) => {
let key = k.to_string();
let value = v.to_string();
match query.get_mut(&key) {
None => { query.insert(key, vec![http::QueryValue::String(value)]); },
Some(vs) => { vs.push(http::QueryValue::String(value)); },
}
}
None => {
if piece.len() > 0 {
let key = piece.to_string();
if !query.contains_key(&key) {
query.insert(key, vec![]);
}
}
}
}
}
let mut headers: Map<String, String> = Map::new();
for h in req.headers().into_iter() {
match h.1.to_str() {
Ok(v) => { headers.insert(h.0.as_str().to_string().to_lowercase(), v.to_string()); },
Err(_) => return Ok(empty_response(StatusCode::BAD_REQUEST)),
}
}
let body = match body::to_bytes(req.body_mut()).await {
Ok(b) => http::RequestBody::Present(b.to_vec()),
Err(_) => return Ok(empty_response(StatusCode::BAD_REQUEST)),
};
let account = Account::new(Some(AnyValue::symbol("http")), trace_collector);
let (tx, rx) = oneshot::channel();
facet.activate(&account, Some(trace::TurnCause::external("http")), |t| {
t.facet(move |t| {
let sreq = http::HttpRequest {
sequence_number: NEXT_SEQ.fetch_add(1, Ordering::Relaxed).into(),
host,
port: port.into(),
method: req.method().to_string().to_lowercase(),
path,
headers: http::Headers(headers),
query,
body,
};
tracing::debug!(?sreq);
let srep = Cap::guard(&language().syndicate, t.create(ResponseCollector::new(tx)));
httpd.assert(t, language(), &http::HttpContext { req: sreq, res: srep });
Ok(())
})?;
Ok(())
});
let response_result = rx.await;
match response_result {
Ok(response) => Ok(response),
Err(_ /* sender dropped */) => Ok(empty_response(StatusCode::INTERNAL_SERVER_ERROR)),
}
}

View File

@ -1,7 +1,5 @@
use preserves_schema::Codec; use preserves_schema::Codec;
use services::gatekeeper;
use std::convert::TryInto;
use std::io; use std::io;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
@ -21,18 +19,13 @@ use syndicate::value::NestedValue;
mod counter; mod counter;
mod dependencies; mod dependencies;
mod http; mod gatekeeper;
mod language; mod language;
mod lifecycle; mod lifecycle;
mod protocol; mod protocol;
mod resolution;
mod script; mod script;
mod services; mod services;
#[cfg(feature = "jemalloc")]
#[global_allocator]
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
mod schemas { mod schemas {
include!(concat!(env!("OUT_DIR"), "/src/schemas/mod.rs")); include!(concat!(env!("OUT_DIR"), "/src/schemas/mod.rs"));
} }
@ -63,10 +56,6 @@ struct ServerConfig {
#[structopt(short = "t", long)] #[structopt(short = "t", long)]
trace_file: Option<PathBuf>, trace_file: Option<PathBuf>,
/// Enable `$control` entity.
#[structopt(long)]
control: bool,
} }
#[tokio::main] #[tokio::main]
@ -119,37 +108,21 @@ async fn main() -> ActorResult {
})); }));
} }
let gatekeeper = gatekeeper::create_gatekeeper(t, &server_config_ds)?; let gatekeeper = Cap::guard(Language::arc(), t.create(
syndicate::entity(Arc::clone(&server_config_ds))
.on_asserted(gatekeeper::handle_assertion)));
let mut env = Map::new(); let mut env = Map::new();
env.insert("config".to_owned(), AnyValue::domain(Arc::clone(&server_config_ds))); env.insert("config".to_owned(), AnyValue::domain(Arc::clone(&server_config_ds)));
env.insert("log".to_owned(), AnyValue::domain(Arc::clone(&log_ds))); env.insert("log".to_owned(), AnyValue::domain(Arc::clone(&log_ds)));
env.insert("gatekeeper".to_owned(), AnyValue::domain(Arc::clone(&gatekeeper))); env.insert("gatekeeper".to_owned(), AnyValue::domain(Arc::clone(&gatekeeper)));
if config.control {
env.insert("control".to_owned(), AnyValue::domain(Cap::guard(Language::arc(), t.create(
syndicate::entity(())
.on_message(|_, _t, m: crate::schemas::control::ExitServer| {
tracing::info!("$control received exit request with code {}", m.code);
std::process::exit((&m.code).try_into().unwrap_or_else(|_| {
tracing::warn!(
"exit code {} out-of-range of 32-bit signed integer, using 1 instead",
m.code);
1
}))
})))));
}
dependencies::boot(t, Arc::clone(&server_config_ds)); dependencies::boot(t, Arc::clone(&server_config_ds));
services::config_watcher::on_demand(t, Arc::clone(&server_config_ds)); services::config_watcher::on_demand(t, Arc::clone(&server_config_ds));
services::daemon::on_demand(t, Arc::clone(&server_config_ds), Arc::clone(&log_ds)); services::daemon::on_demand(t, Arc::clone(&server_config_ds), Arc::clone(&log_ds));
services::debt_reporter::on_demand(t, Arc::clone(&server_config_ds)); services::debt_reporter::on_demand(t, Arc::clone(&server_config_ds));
services::gatekeeper::on_demand(t, Arc::clone(&server_config_ds));
services::http_router::on_demand(t, Arc::clone(&server_config_ds));
services::tcp_relay_listener::on_demand(t, Arc::clone(&server_config_ds)); services::tcp_relay_listener::on_demand(t, Arc::clone(&server_config_ds));
services::unix_relay_listener::on_demand(t, Arc::clone(&server_config_ds)); services::unix_relay_listener::on_demand(t, Arc::clone(&server_config_ds));
resolution::client::start(t, Arc::clone(&server_config_ds));
resolution::transports::on_demand(t, Arc::clone(&server_config_ds));
if config.debt_reporter { if config.debt_reporter {
server_config_ds.assert(t, language(), &service::RunService { server_config_ds.assert(t, language(), &service::RunService {
@ -161,7 +134,7 @@ async fn main() -> ActorResult {
for port in config.ports.clone() { for port in config.ports.clone() {
server_config_ds.assert(t, language(), &service::RunService { server_config_ds.assert(t, language(), &service::RunService {
service_name: language().unparse(&internal_services::TcpWithoutHttp { service_name: language().unparse(&internal_services::TcpRelayListener {
addr: transport_address::Tcp { addr: transport_address::Tcp {
host: "0.0.0.0".to_owned(), host: "0.0.0.0".to_owned(),
port: (port as i32).into(), port: (port as i32).into(),

View File

@ -1,15 +1,11 @@
use futures::SinkExt; use futures::SinkExt;
use futures::StreamExt; use futures::StreamExt;
use hyper::header::HeaderValue;
use hyper::service::service_fn;
use std::future::ready; use std::future::ready;
use std::io;
use std::sync::Arc; use std::sync::Arc;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
use syndicate::actor::*; use syndicate::actor::*;
use syndicate::enclose;
use syndicate::error::Error; use syndicate::error::Error;
use syndicate::error::error; use syndicate::error::error;
use syndicate::relay; use syndicate::relay;
@ -18,7 +14,7 @@ use syndicate::value::NestedValue;
use tokio::net::TcpStream; use tokio::net::TcpStream;
use hyper_tungstenite::tungstenite::Message; use tungstenite::Message;
struct ExitListener; struct ExitListener;
@ -35,7 +31,7 @@ pub fn run_io_relay(
initial_ref: Arc<Cap>, initial_ref: Arc<Cap>,
) -> ActorResult { ) -> ActorResult {
let exit_listener = t.create(ExitListener); let exit_listener = t.create(ExitListener);
t.add_exit_hook(&exit_listener); t.state.add_exit_hook(&exit_listener);
relay::TunnelRelay::run(t, i, o, Some(initial_ref), None, false); relay::TunnelRelay::run(t, i, o, Some(initial_ref), None, false);
Ok(()) Ok(())
} }
@ -57,75 +53,34 @@ pub async fn detect_protocol(
facet: FacetRef, facet: FacetRef,
stream: TcpStream, stream: TcpStream,
gateway: Arc<Cap>, gateway: Arc<Cap>,
httpd: Option<Arc<Cap>>,
addr: std::net::SocketAddr, addr: std::net::SocketAddr,
server_port: u16,
) -> ActorResult { ) -> ActorResult {
let mut buf = [0; 1]; // peek at the first byte to see what kind of connection to expect let (i, o) = {
match stream.peek(&mut buf).await? { let mut buf = [0; 1]; // peek at the first byte to see what kind of connection to expect
1 => match buf[0] { match stream.peek(&mut buf).await? {
v if v == b'[' /* Turn */ || v == b'<' /* Error and Extension */ || v >= 128 => { 1 => match buf[0] {
tracing::info!(protocol = %(if v >= 128 { "application/syndicate" } else { "text/syndicate" }), peer = ?addr); b'G' /* ASCII 'G' for "GET" */ => {
let (i, o) = stream.into_split(); tracing::info!(protocol = %"websocket", peer = ?addr);
let i = relay::Input::Bytes(Box::pin(i)); let s = tokio_tungstenite::accept_async(stream).await
let o = relay::Output::Bytes(Box::pin(o /* BufWriter::new(o) */)); .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
run_connection(trace_collector, facet, i, o, gateway); let (o, i) = s.split();
Ok(()) let i = i.filter_map(|r| ready(extract_binary_packets(r).transpose()));
} let o = o.sink_map_err(message_error).with(|bs| ready(Ok(Message::Binary(bs))));
_ => { (relay::Input::Packets(Box::pin(i)), relay::Output::Packets(Box::pin(o)))
let upgraded = Arc::new(AtomicBool::new(false)); },
let keepalive = facet.actor.keep_alive(); _ => {
let mut http = hyper::server::conn::Http::new(); tracing::info!(protocol = %"raw", peer = ?addr);
http.http1_keep_alive(true); let (i, o) = stream.into_split();
http.http1_only(true); (relay::Input::Bytes(Box::pin(i)),
let service = service_fn(|mut req| enclose!( relay::Output::Bytes(Box::pin(o /* BufWriter::new(o) */)))
(upgraded, keepalive, trace_collector, facet, gateway, httpd) async move {
if hyper_tungstenite::is_upgrade_request(&req) {
tracing::info!(protocol = %"websocket",
method=%req.method(),
uri=?req.uri(),
host=?req.headers().get("host").unwrap_or(&HeaderValue::from_static("")));
let (response, websocket) = hyper_tungstenite::upgrade(&mut req, None)
.map_err(|e| message_error(e))?;
upgraded.store(true, Ordering::SeqCst);
tokio::spawn(enclose!(() async move {
let (o, i) = websocket.await?.split();
let i = i.filter_map(|r| ready(extract_binary_packets(r).transpose()));
let o = o.sink_map_err(message_error).with(|bs| ready(Ok(Message::Binary(bs))));
let i = relay::Input::Packets(Box::pin(i));
let o = relay::Output::Packets(Box::pin(o));
run_connection(trace_collector, facet, i, o, gateway);
drop(keepalive);
Ok(()) as ActorResult
}));
Ok(response)
} else {
match httpd {
None => Ok(crate::http::empty_response(
hyper::StatusCode::SERVICE_UNAVAILABLE)),
Some(httpd) => {
tracing::info!(protocol = %"http",
method=%req.method(),
uri=?req.uri(),
host=?req.headers().get("host").unwrap_or(&HeaderValue::from_static("")));
crate::http::serve(trace_collector, facet, httpd, req, server_port).await
}
}
}
}));
http.serve_connection(stream, service).with_upgrades().await?;
if upgraded.load(Ordering::SeqCst) {
tracing::debug!("serve_connection completed after upgrade to websocket");
} else {
tracing::debug!("serve_connection completed after regular HTTP session");
facet.activate(&Account::new(None, None), None, |t| Ok(t.stop()));
} }
Ok(()) }
}, 0 => Err(error("closed before starting", AnyValue::new(false)))?,
_ => unreachable!()
} }
0 => Err(error("closed before starting", AnyValue::new(false)))?, };
_ => unreachable!() run_connection(trace_collector, facet, i, o, gateway);
} Ok(())
} }
fn message_error<E: std::fmt::Display>(e: E) -> Error { fn message_error<E: std::fmt::Display>(e: E) -> Error {
@ -133,7 +88,7 @@ fn message_error<E: std::fmt::Display>(e: E) -> Error {
} }
fn extract_binary_packets( fn extract_binary_packets(
r: Result<Message, hyper_tungstenite::tungstenite::Error>, r: Result<Message, tungstenite::Error>,
) -> Result<Option<Vec<u8>>, Error> { ) -> Result<Option<Vec<u8>>, Error> {
match r { match r {
Ok(m) => match m { Ok(m) => match m {
@ -147,8 +102,6 @@ fn extract_binary_packets(
Ok(None), // unsolicited pongs are to be ignored Ok(None), // unsolicited pongs are to be ignored
Message::Close(_) => Message::Close(_) =>
Ok(None), // we're about to see the end of the stream, so ignore this Ok(None), // we're about to see the end of the stream, so ignore this
Message::Frame(_) =>
Err("Raw frames are not accepted")?,
}, },
Err(e) => Err(message_error(e)), Err(e) => Err(message_error(e)),
} }

View File

@ -1,139 +0,0 @@
use preserves_schema::Codec;
use syndicate::dataspace::Dataspace;
use syndicate::preserves::value::Set;
use std::sync::Arc;
use syndicate::actor::*;
use syndicate::rpc;
use syndicate::supervise::{Supervisor, SupervisorConfiguration};
use syndicate::value::NestedValue;
use syndicate::schemas::gatekeeper as G;
use syndicate::schemas::rpc as R;
use crate::language;
use syndicate::enclose;
use syndicate::preserves::rec;
use syndicate_macros::during;
pub fn start(t: &mut Activation, ds: Arc<Cap>) {
t.spawn(Some(AnyValue::symbol("path_resolver")), enclose!((ds) move |t| {
during!(t, ds, language(), <q <resolve-path $route0>>, |t| {
if let Ok(route) = language().parse::<G::Route>(&route0) {
Supervisor::start(
t,
Some(rec![AnyValue::symbol("path_resolver"), language().unparse(&route)]),
SupervisorConfiguration::default(),
|_t, _s| Ok(()),
enclose!((ds) move |t| enclose!((ds, route) run(t, ds, route))))
} else {
tracing::debug!(?route0, "Ignoring bogus route");
Ok(())
}
});
Ok(())
}));
t.spawn(Some(AnyValue::symbol("sturdy_ref_step")),
enclose!((ds) move |t| super::sturdy::handle_sturdy_path_steps(t, ds)));
t.spawn(Some(AnyValue::symbol("noise_ref_step")),
enclose!((ds) move |t| super::noise::handle_noise_path_steps(t, ds)));
}
fn run(t: &mut Activation, ds: Arc<Cap>, route: G::Route) -> ActorResult {
let candidates = t.named_field("candidates", Set::new());
for addr in &route.transports {
ds.assert(t, language(), &rpc::question(language(), G::ConnectTransport { addr: addr.clone() }));
enclose!((candidates) during!(
t, ds, language(),
<a <connect-transport #(addr)> <ok $c: G::ConnectedTransport::<AnyValue>>>,
|t: &mut Activation| {
t.get_mut(&candidates).insert(c.clone());
t.on_stop(enclose!((candidates, c) move |t: &mut Activation| {
t.get_mut(&candidates).remove(&c);
Ok(())
}));
Ok(())
}));
}
let best = t.named_field("best", None);
let root_peer = t.named_field("rootPeer", None);
t.dataflow(enclose!((best, root_peer) move |t| {
let c = t.get(&candidates).first().cloned();
t.set(&root_peer, c.as_ref().map(
|G::ConnectedTransport { responder_session, .. }| responder_session.clone()));
t.set(&best, c);
Ok(())
}))?;
let steps_ref = t.create(Dataspace::new(None));
let steps_ds = Cap::new(&steps_ref);
let mut handle_zero = None;
t.dataflow(enclose!((root_peer) move |t| {
let p = t.get(&root_peer).as_ref().cloned();
t.update(&mut handle_zero, &steps_ref, p.map(|p| AnyValue::new(
vec![AnyValue::new(0), AnyValue::domain(p)])));
Ok(())
}))?;
for (i, step) in route.path_steps.clone().into_iter().enumerate() {
enclose!((ds, steps_ds) during!(
t, steps_ds, language(),
[#(&AnyValue::new(i)), $origin: G::ResolvedPathStep::<AnyValue>],
enclose!((ds, step, steps_ds) move |t: &mut Activation| {
let q = G::ResolvePathStep { origin: origin.0, path_step: step };
ds.assert(t, language(), &rpc::question(language(), q.clone()));
let q2 = q.clone();
during!(
t, ds, language(),
<a #(&language().unparse(&q2)) $a>,
enclose!((q) |t| {
if let Ok(a) = language().parse::<R::Result>(&a) {
match a {
R::Result::Error { .. } => {
ds.assert(t, language(), &rpc::answer(language(), q, a));
}
R::Result::Ok { value } => {
if let Some(next) = value.value().as_embedded() {
steps_ds.assert(t, language(), &AnyValue::new(
vec![AnyValue::new(i + 1),
AnyValue::domain(next.clone())]));
} else {
ds.assert(t, language(), &rpc::answer(
language(), q, R::Result::Error {
error: AnyValue::symbol("invalid-path-step-result"),
}));
}
}
}
}
Ok(())
}));
Ok(())
})));
}
let i = route.path_steps.len();
during!(t, steps_ds, language(),
[#(&AnyValue::new(i)), $r: G::ResolvedPathStep::<AnyValue>],
enclose!((best, ds, route) move |t: &mut Activation| {
let G::ConnectedTransport { addr, control, .. } =
t.get(&best).as_ref().unwrap().clone();
let responder_session = r.0;
ds.assert(t, language(), &rpc::answer(
language(),
G::ResolvePath { route },
R::Result::Ok { value: language().unparse(
&G::ResolvedPath { addr, control, responder_session }) }));
Ok(())
}));
Ok(())
}

View File

@ -1,37 +0,0 @@
use std::sync::Arc;
use syndicate::actor::*;
use syndicate::schemas::gatekeeper;
use syndicate::enclose;
use crate::language;
pub mod client;
pub mod noise;
pub mod sturdy;
pub mod transports;
fn handle_direct_resolution(
ds: &mut Arc<Cap>,
t: &mut Activation,
a: gatekeeper::Resolve,
) -> Result<FacetId, ActorError> {
let outer_facet = t.facet_id();
t.facet(move |t| {
let handler = syndicate::entity(a.observer)
.on_asserted(move |observer, t, a: AnyValue| {
t.stop_facet_and_continue(outer_facet, Some(
enclose!((observer, a) move |t: &mut Activation| {
observer.assert(t, language(), &a);
Ok(())
})))?;
Ok(None)
})
.create_cap(t);
ds.assert(t, language(), &gatekeeper::Resolve {
step: a.step.clone(),
observer: handler,
});
Ok(())
})
}

View File

@ -1,561 +0,0 @@
use noise_protocol::CipherState;
use noise_protocol::U8Array;
use noise_protocol::patterns::HandshakePattern;
use noise_rust_crypto::Blake2s;
use noise_rust_crypto::ChaCha20Poly1305;
use noise_rust_crypto::X25519;
use std::convert::TryInto;
use std::sync::Arc;
use preserves_schema::Codec;
use syndicate::actor::*;
use syndicate::relay::Mutex;
use syndicate::relay::TunnelRelay;
use syndicate::rpc;
use syndicate::trace::TurnCause;
use syndicate::value::NestedValue;
use syndicate::value::NoEmbeddedDomainCodec;
use syndicate::value::PackedWriter;
use syndicate::enclose;
use syndicate_macros::during;
use syndicate_macros::pattern;
use syndicate::schemas::dataspace;
use syndicate::schemas::gatekeeper;
use syndicate::schemas::noise;
use syndicate::schemas::rpc as R;
use syndicate::schemas::sturdy;
use crate::language;
fn noise_step_type() -> String {
language().unparse(&noise::NoiseStepType).value().to_symbol().unwrap().clone()
}
pub fn handle_noise_binds(t: &mut Activation, ds: &Arc<Cap>) -> ActorResult {
during!(t, ds, language(), <bind <noise $desc> $target $observer>, |t: &mut Activation| {
t.spawn_link(None, move |t| {
target.value().to_embedded()?;
let observer = language().parse::<gatekeeper::BindObserver>(&observer)?;
let spec = language().parse::<noise::NoiseDescriptionDetail<AnyValue>>(&desc)?.0;
match validate_noise_service_spec(spec) {
Ok(spec) => if let gatekeeper::BindObserver::Present(o) = observer {
o.assert(t, language(), &gatekeeper::Bound::Bound {
path_step: Box::new(gatekeeper::PathStep {
step_type: noise_step_type(),
detail: language().unparse(&noise::NoisePathStepDetail(noise::NoiseSpec {
key: spec.public_key,
service: noise::ServiceSelector(spec.service),
protocol: if spec.protocol == default_noise_protocol() {
noise::NoiseProtocol::Absent
} else {
noise::NoiseProtocol::Present {
protocol: spec.protocol,
}
},
pre_shared_keys: if spec.psks.is_empty() {
noise::NoisePreSharedKeys::Absent
} else {
noise::NoisePreSharedKeys::Present {
pre_shared_keys: spec.psks,
}
},
})),
}),
});
},
Err(e) => {
if let gatekeeper::BindObserver::Present(o) = observer {
o.assert(t, language(), &gatekeeper::Bound::Rejected(
Box::new(gatekeeper::Rejected {
detail: AnyValue::new(format!("{}", &e)),
})));
}
tracing::error!("Invalid noise bind description: {}", e);
}
}
Ok(())
});
Ok(())
});
Ok(())
}
pub fn take_noise_step(t: &mut Activation, ds: &mut Arc<Cap>, a: &gatekeeper::Resolve, detail: &mut &'static str) -> Result<bool, ActorError> {
if a.step.step_type == noise_step_type() {
*detail = "invalid";
if let Ok(s) = language().parse::<noise::NoiseStepDetail<AnyValue>>(&a.step.detail) {
t.facet(|t| {
let f = super::handle_direct_resolution(ds, t, a.clone())?;
await_bind_noise(ds, t, s.0.0, a.observer.clone(), f)
})?;
return Ok(true);
}
}
Ok(false)
}
struct ValidatedNoiseSpec {
service: AnyValue,
protocol: String,
pattern: HandshakePattern,
psks: Vec<Vec<u8>>,
secret_key: Option<Vec<u8>>,
public_key: Vec<u8>,
}
fn default_noise_protocol() -> String {
language().unparse(&noise::DefaultProtocol).value().to_string().unwrap().clone()
}
fn validate_noise_spec(
spec: noise::NoiseSpec<AnyValue>,
) -> Result<ValidatedNoiseSpec, ActorError> {
let protocol = match spec.protocol {
noise::NoiseProtocol::Present { protocol } => protocol,
noise::NoiseProtocol::Invalid { protocol } =>
Err(format!("Invalid noise protocol {:?}", protocol))?,
noise::NoiseProtocol::Absent => default_noise_protocol(),
};
const PREFIX: &'static str = "Noise_";
const SUFFIX: &'static str = "_25519_ChaChaPoly_BLAKE2s";
if !protocol.starts_with(PREFIX) || !protocol.ends_with(SUFFIX) {
Err(format!("Unsupported protocol {:?}", protocol))?;
}
let pattern_name = &protocol[PREFIX.len()..(protocol.len()-SUFFIX.len())];
let pattern = lookup_pattern(pattern_name).ok_or_else::<ActorError, _>(
|| format!("Unsupported handshake pattern {:?}", pattern_name).into())?;
let psks = match spec.pre_shared_keys {
noise::NoisePreSharedKeys::Present { pre_shared_keys } => pre_shared_keys,
noise::NoisePreSharedKeys::Invalid { pre_shared_keys } =>
Err(format!("Invalid pre-shared-keys {:?}", pre_shared_keys))?,
noise::NoisePreSharedKeys::Absent => vec![],
};
Ok(ValidatedNoiseSpec {
service: spec.service.0,
protocol,
pattern,
psks,
secret_key: None,
public_key: spec.key,
})
}
fn validate_noise_service_spec(
spec: noise::NoiseServiceSpec<AnyValue>,
) -> Result<ValidatedNoiseSpec, ActorError> {
let noise::NoiseServiceSpec { base, secret_key } = spec;
let v = validate_noise_spec(base)?;
let secret_key = match secret_key {
noise::SecretKeyField::Present { secret_key } => Some(secret_key),
noise::SecretKeyField::Invalid { secret_key } =>
Err(format!("Invalid secret key {:?}", secret_key))?,
noise::SecretKeyField::Absent => None,
};
Ok(ValidatedNoiseSpec { secret_key, .. v })
}
fn await_bind_noise(
ds: &mut Arc<Cap>,
t: &mut Activation,
service_selector: AnyValue,
observer: Arc<Cap>,
direct_resolution_facet: FacetId,
) -> ActorResult {
let handler = syndicate::entity(())
.on_asserted_facet(move |_state, t, a: AnyValue| {
t.stop_facet(direct_resolution_facet);
let observer = Arc::clone(&observer);
t.spawn_link(None, move |t| {
let bindings = a.value().to_sequence()?;
let spec = validate_noise_service_spec(language().parse(&bindings[0])?)?;
let service = bindings[1].value().to_embedded()?.clone();
let hs = make_handshake(&spec, false)?;
let responder_session = Cap::guard(crate::Language::arc(), t.create(
ResponderState::Introduction{ service, hs }));
observer.assert(
t, language(), &gatekeeper::Resolved::Accepted { responder_session });
Ok(())
});
Ok(())
})
.create_cap(t);
ds.assert(t, language(), &dataspace::Observe {
// TODO: codegen plugin to generate pattern constructors
pattern: pattern!{
<bind <noise $spec:NoiseServiceSpec{ { service: #(&service_selector) } }> $service _>
},
observer: handler,
});
Ok(())
}
type NoiseHandshakeState = noise_protocol::HandshakeState<X25519, ChaCha20Poly1305, Blake2s>;
struct HandshakeState {
peer: Arc<Cap>,
hs: NoiseHandshakeState,
initial_ref: Option<Arc<Cap>>,
initial_oid: Option<sturdy::Oid>,
}
struct TransportState {
relay_input: Arc<Mutex<Option<TunnelRelay>>>,
c_recv: CipherState<ChaCha20Poly1305>,
}
enum ResponderState {
Invalid, // used during state transitions
Introduction {
service: Arc<Cap>,
hs: NoiseHandshakeState,
},
Handshake(HandshakeState),
Transport(TransportState),
}
impl Entity<noise::SessionItem> for ResponderState {
fn assert(&mut self, _t: &mut Activation, item: noise::SessionItem, _handle: Handle) -> ActorResult {
let initiator_session = match item {
noise::SessionItem::Initiator(i_box) => i_box.initiator_session,
noise::SessionItem::Packet(_) => Err("Unexpected Packet assertion")?,
};
match std::mem::replace(self, ResponderState::Invalid) {
ResponderState::Introduction { service, hs } => {
*self = ResponderState::Handshake(HandshakeState {
peer: initiator_session,
hs,
initial_ref: Some(service.clone()),
initial_oid: None,
});
Ok(())
}
_ =>
Err("Received second Initiator")?,
}
}
fn message(&mut self, t: &mut Activation, item: noise::SessionItem) -> ActorResult {
let p = match item {
noise::SessionItem::Initiator(_) => Err("Unexpected Initiator message")?,
noise::SessionItem::Packet(p_box) => *p_box,
};
match self {
ResponderState::Invalid | ResponderState::Introduction { .. } =>
Err("Received Packet in invalid ResponderState")?,
ResponderState::Handshake(hss) => {
if let Some((None, ts)) = hss.handle_packet(t, p)? {
*self = ResponderState::Transport(ts);
}
}
ResponderState::Transport(ts) => ts.handle_packet(t, p)?,
}
Ok(())
}
}
impl HandshakeState {
fn handle_packet(
&mut self,
t: &mut Activation,
p: noise::Packet,
) -> Result<Option<(Option<Arc<Cap>>, TransportState)>, ActorError> {
match p {
noise::Packet::Complete(bs) => {
if bs.len() < self.hs.get_next_message_overhead() {
Err("Invalid handshake message for pattern")?;
}
if bs.len() > self.hs.get_next_message_overhead() {
Err("Cannot accept payload during handshake")?;
}
self.hs.read_message(&bs, &mut [])?;
if self.hs.completed() {
self.complete_handshake(t)
} else {
self.send_handshake_packet(t)
}
}
_ => Err("Fragmented handshake is not allowed")?,
}
}
fn send_handshake_packet(
&mut self,
t: &mut Activation,
) -> Result<Option<(Option<Arc<Cap>>, TransportState)>, ActorError> {
let mut reply = vec![0u8; self.hs.get_next_message_overhead()];
self.hs.write_message(&[], &mut reply[..])?;
self.peer.message(t, language(), &noise::Packet::Complete(reply.into()));
if self.hs.completed() {
self.complete_handshake(t)
} else {
Ok(None)
}
}
fn complete_handshake(
&mut self,
t: &mut Activation,
) -> Result<Option<(Option<Arc<Cap>>, TransportState)>, ActorError> {
let (c_i_to_r, c_r_to_i) = self.hs.get_ciphers();
let (c_recv, mut c_send) = if self.hs.get_is_initiator() {
(c_r_to_i, c_i_to_r)
} else {
(c_i_to_r, c_r_to_i)
};
let (peer_service, relay_input, mut relay_output) =
TunnelRelay::_run(t, self.initial_ref.clone(), self.initial_oid.clone(), false);
let trace_collector = t.trace_collector();
let peer = self.peer.clone();
let relay_output_name = Some(AnyValue::symbol("relay_output"));
let transport_facet = t.facet_ref();
t.linked_task(relay_output_name.clone(), async move {
let account = Account::new(relay_output_name, trace_collector);
let cause = TurnCause::external("relay_output");
loop {
match relay_output.recv().await {
None => return Ok(LinkedTaskTermination::KeepFacet),
Some(loaned_item) => {
const MAXSIZE: usize = 65535 - 16; /* Noise tag length is 16 */
let p = if loaned_item.item.len() > MAXSIZE {
noise::Packet::Fragmented(
loaned_item.item
.chunks(MAXSIZE)
.map(|c| c_send.encrypt_vec(c))
.collect())
} else {
noise::Packet::Complete(c_send.encrypt_vec(&loaned_item.item))
};
if !transport_facet.activate(&account, Some(cause.clone()), |t| {
peer.message(t, language(), &p);
Ok(())
}) {
break;
}
}
}
}
Ok(LinkedTaskTermination::Normal)
});
Ok(Some((peer_service, TransportState { relay_input, c_recv })))
}
}
impl TransportState {
fn handle_packet(
&mut self,
t: &mut Activation,
p: noise::Packet,
) -> ActorResult {
let bs = match p {
noise::Packet::Complete(bs) =>
self.c_recv.decrypt_vec(&bs[..]).map_err(|_| "Cannot decrypt packet")?,
noise::Packet::Fragmented(pieces) => {
let mut result = Vec::with_capacity(1024);
for piece in pieces {
result.extend(self.c_recv.decrypt_vec(&piece[..])
.map_err(|_| "Cannot decrypt packet fragment")?);
}
result
}
};
let mut g = self.relay_input.lock();
let tr = g.as_mut().expect("initialized");
tr.handle_inbound_datagram(t, &bs[..])?;
Ok(())
}
}
fn lookup_pattern(name: &str) -> Option<HandshakePattern> {
use noise_protocol::patterns::*;
Some(match name {
"N" => noise_n(),
"K" => noise_k(),
"X" => noise_x(),
"NN" => noise_nn(),
"NK" => noise_nk(),
"NX" => noise_nx(),
"XN" => noise_xn(),
"XK" => noise_xk(),
"XX" => noise_xx(),
"KN" => noise_kn(),
"KK" => noise_kk(),
"KX" => noise_kx(),
"IN" => noise_in(),
"IK" => noise_ik(),
"IX" => noise_ix(),
"Npsk0" => noise_n_psk0(),
"Kpsk0" => noise_k_psk0(),
"Xpsk1" => noise_x_psk1(),
"NNpsk0" => noise_nn_psk0(),
"NNpsk2" => noise_nn_psk2(),
"NKpsk0" => noise_nk_psk0(),
"NKpsk2" => noise_nk_psk2(),
"NXpsk2" => noise_nx_psk2(),
"XNpsk3" => noise_xn_psk3(),
"XKpsk3" => noise_xk_psk3(),
"XXpsk3" => noise_xx_psk3(),
"KNpsk0" => noise_kn_psk0(),
"KNpsk2" => noise_kn_psk2(),
"KKpsk0" => noise_kk_psk0(),
"KKpsk2" => noise_kk_psk2(),
"KXpsk2" => noise_kx_psk2(),
"INpsk1" => noise_in_psk1(),
"INpsk2" => noise_in_psk2(),
"IKpsk1" => noise_ik_psk1(),
"IKpsk2" => noise_ik_psk2(),
"IXpsk2" => noise_ix_psk2(),
"NNpsk0+psk2" => noise_nn_psk0_psk2(),
"NXpsk0+psk1+psk2" => noise_nx_psk0_psk1_psk2(),
"XNpsk1+psk3" => noise_xn_psk1_psk3(),
"XKpsk0+psk3" => noise_xk_psk0_psk3(),
"KNpsk1+psk2" => noise_kn_psk1_psk2(),
"KKpsk0+psk2" => noise_kk_psk0_psk2(),
"INpsk1+psk2" => noise_in_psk1_psk2(),
"IKpsk0+psk2" => noise_ik_psk0_psk2(),
"IXpsk0+psk2" => noise_ix_psk0_psk2(),
"XXpsk0+psk1" => noise_xx_psk0_psk1(),
"XXpsk0+psk2" => noise_xx_psk0_psk2(),
"XXpsk0+psk3" => noise_xx_psk0_psk3(),
"XXpsk0+psk1+psk2+psk3" => noise_xx_psk0_psk1_psk2_psk3(),
_ => return None,
})
}
fn make_handshake(
spec: &ValidatedNoiseSpec,
is_initiator: bool,
) -> Result<NoiseHandshakeState, ActorError> {
let mut builder = noise_protocol::HandshakeStateBuilder::new();
builder.set_pattern(spec.pattern.clone());
builder.set_is_initiator(is_initiator);
let prologue = PackedWriter::encode(&mut NoEmbeddedDomainCodec, &spec.service)?;
builder.set_prologue(&prologue);
match spec.secret_key.clone() {
None => (),
Some(sk) => {
let sk: [u8; 32] = sk.try_into().map_err(|_| "Bad secret key length")?;
builder.set_s(U8Array::from_slice(&sk));
},
}
builder.set_rs(U8Array::from_slice(&spec.public_key));
let mut hs = builder.build_handshake_state();
for psk in spec.psks.iter() {
hs.push_psk(psk);
}
Ok(hs)
}
pub fn handle_noise_path_steps(t: &mut Activation, ds: Arc<Cap>) -> ActorResult {
during!(t, ds, language(),
<q <resolve-path-step $origin <noise $spec0>>>,
enclose!((ds) move |t: &mut Activation| {
if let Ok(spec) = language().parse::<noise::NoiseSpec>(&spec0) {
if let Some(origin) = origin.value().as_embedded().cloned() {
t.spawn_link(None, move |t| run_noise_initiator(t, ds, origin, spec));
}
}
Ok(())
}));
Ok(())
}
fn run_noise_initiator(
t: &mut Activation,
ds: Arc<Cap>,
origin: Arc<Cap>,
spec: noise::NoiseSpec,
) -> ActorResult {
let q = language().unparse(&gatekeeper::ResolvePathStep {
origin: origin.clone(),
path_step: gatekeeper::PathStep {
step_type: "noise".to_string(),
detail: language().unparse(&spec),
}
});
let service = spec.service.clone();
let validated = validate_noise_spec(spec)?;
let observer = Cap::guard(&language().syndicate, t.create(
syndicate::entity(()).on_asserted_facet(
enclose!((ds, q) move |_, t, r: gatekeeper::Resolved| {
match r {
gatekeeper::Resolved::Rejected(b) => {
ds.assert(t, language(), &rpc::answer(
language(), q.clone(), R::Result::Error {
error: b.detail }));
}
gatekeeper::Resolved::Accepted { responder_session } =>
run_initiator_session(
t, ds.clone(), q.clone(), &validated, responder_session)?,
}
Ok(())
}))));
origin.assert(t, language(), &gatekeeper::Resolve {
step: gatekeeper::Step {
step_type: "noise".to_string(),
detail: language().unparse(&service),
},
observer,
});
Ok(())
}
fn run_initiator_session(
t: &mut Activation,
ds: Arc<Cap>,
question: AnyValue,
spec: &ValidatedNoiseSpec,
responder_session: Arc<Cap>,
) -> ActorResult {
let initiator_session_ref = t.create_inert();
let initiator_session = Cap::guard(crate::Language::arc(), initiator_session_ref.clone());
responder_session.assert(t, language(), &noise::Initiator { initiator_session });
let mut hss = HandshakeState {
peer: responder_session.clone(),
hs: make_handshake(spec, true)?,
initial_ref: None,
initial_oid: Some(sturdy::Oid(0.into())),
};
if !hss.hs.completed() {
if hss.send_handshake_packet(t)?.is_some() {
// TODO: this might be a valid pattern, check
panic!("Unexpected complete handshake after no messages");
}
}
initiator_session_ref.become_entity(InitiatorState::Handshake { ds, question, hss });
Ok(())
}
enum InitiatorState {
Handshake {
ds: Arc<Cap>,
question: AnyValue,
hss: HandshakeState,
},
Transport(TransportState),
}
impl Entity<noise::Packet> for InitiatorState {
fn message(&mut self, t: &mut Activation, p: noise::Packet) -> ActorResult {
match self {
InitiatorState::Handshake { hss, ds, question } => {
if let Some((Some(peer_service), ts)) = hss.handle_packet(t, p)? {
let ds = ds.clone();
let question = question.clone();
*self = InitiatorState::Transport(ts);
ds.assert(t, language(), &rpc::answer(language(), question, R::Result::Ok {
value: AnyValue::domain(peer_service) }));
}
}
InitiatorState::Transport(ts) => ts.handle_packet(t, p)?,
}
Ok(())
}
}

View File

@ -1,140 +0,0 @@
use std::sync::Arc;
use preserves_schema::Codec;
use syndicate::actor::*;
use syndicate::rpc;
use syndicate::value::NestedValue;
use syndicate::enclose;
use syndicate_macros::during;
use syndicate_macros::pattern;
use syndicate::schemas::dataspace;
use syndicate::schemas::gatekeeper;
use syndicate::schemas::sturdy;
use syndicate::schemas::rpc as R;
use crate::language;
fn sturdy_step_type() -> String {
language().unparse(&sturdy::SturdyStepType).value().to_symbol().unwrap().clone()
}
pub fn handle_sturdy_binds(t: &mut Activation, ds: &Arc<Cap>) -> ActorResult {
during!(t, ds, language(), <bind <ref $desc> $target $observer>, |t: &mut Activation| {
t.spawn_link(None, move |t| {
target.value().to_embedded()?;
let observer = language().parse::<gatekeeper::BindObserver>(&observer)?;
let desc = language().parse::<sturdy::SturdyDescriptionDetail>(&desc)?;
let sr = sturdy::SturdyRef::mint(desc.oid, &desc.key);
if let gatekeeper::BindObserver::Present(o) = observer {
o.assert(t, language(), &gatekeeper::Bound::Bound {
path_step: Box::new(gatekeeper::PathStep {
step_type: sturdy_step_type(),
detail: language().unparse(&sr.parameters),
}),
});
}
Ok(())
});
Ok(())
});
Ok(())
}
pub fn take_sturdy_step(t: &mut Activation, ds: &mut Arc<Cap>, a: &gatekeeper::Resolve, detail: &mut &'static str) -> Result<bool, ActorError> {
if a.step.step_type == sturdy_step_type() {
*detail = "invalid";
if let Ok(s) = language().parse::<sturdy::SturdyStepDetail>(&a.step.detail) {
t.facet(|t| {
let f = super::handle_direct_resolution(ds, t, a.clone())?;
await_bind_sturdyref(ds, t, sturdy::SturdyRef { parameters: s.0 }, a.observer.clone(), f)
})?;
return Ok(true);
}
}
Ok(false)
}
fn await_bind_sturdyref(
ds: &mut Arc<Cap>,
t: &mut Activation,
sturdyref: sturdy::SturdyRef,
observer: Arc<Cap>,
direct_resolution_facet: FacetId,
) -> ActorResult {
let queried_oid = sturdyref.parameters.oid.clone();
let handler = syndicate::entity(observer)
.on_asserted(move |observer, t, a: AnyValue| {
t.stop_facet(direct_resolution_facet);
let bindings = a.value().to_sequence()?;
let key = bindings[0].value().to_bytestring()?;
let unattenuated_target = bindings[1].value().to_embedded()?;
match sturdyref.validate_and_attenuate(key, unattenuated_target) {
Err(e) => {
tracing::warn!(sturdyref = ?language().unparse(&sturdyref),
"sturdyref failed validation: {}", e);
observer.assert(t, language(), &gatekeeper::Resolved::Rejected(
Box::new(gatekeeper::Rejected {
detail: AnyValue::symbol("sturdyref-failed-validation"),
})));
},
Ok(target) => {
tracing::trace!(sturdyref = ?language().unparse(&sturdyref),
?target,
"sturdyref resolved");
observer.assert(t, language(), &gatekeeper::Resolved::Accepted {
responder_session: target,
});
}
}
Ok(None)
})
.create_cap(t);
ds.assert(t, language(), &dataspace::Observe {
// TODO: codegen plugin to generate pattern constructors
pattern: pattern!{<bind <ref { oid: #(&queried_oid), key: $ }> $ _>},
observer: handler,
});
Ok(())
}
pub fn handle_sturdy_path_steps(t: &mut Activation, ds: Arc<Cap>) -> ActorResult {
during!(t, ds, language(),
<q <resolve-path-step $origin <ref $parameters: sturdy::SturdyPathStepDetail::<AnyValue>>>>,
enclose!((ds) move |t: &mut Activation| {
if let Some(origin) = origin.value().as_embedded().cloned() {
let observer = Cap::guard(&language().syndicate, t.create(
syndicate::entity(()).on_asserted_facet(
enclose!((origin, parameters) move |_, t, r: gatekeeper::Resolved| {
ds.assert(t, language(), &rpc::answer(
language(),
gatekeeper::ResolvePathStep {
origin: origin.clone(),
path_step: gatekeeper::PathStep {
step_type: "ref".to_string(),
detail: language().unparse(&parameters),
},
},
match r {
gatekeeper::Resolved::Accepted { responder_session } =>
R::Result::Ok { value: language().unparse(
&gatekeeper::ResolvedPathStep(responder_session)) },
gatekeeper::Resolved::Rejected(b) =>
R::Result::Error { error: b.detail },
}));
Ok(())
}))));
origin.assert(t, language(), &gatekeeper::Resolve {
step: gatekeeper::Step {
step_type: "ref".to_string(),
detail: language().unparse(&parameters),
},
observer,
});
}
Ok(())
}));
Ok(())
}

View File

@ -1,93 +0,0 @@
use preserves_schema::Codec;
use syndicate::relay;
use syndicate::schemas::trace;
use std::convert::TryFrom;
use std::sync::Arc;
use syndicate::actor::*;
use syndicate::rpc;
use syndicate::supervise::{Supervisor, SupervisorConfiguration};
use syndicate::value::NestedValue;
use syndicate::schemas::transport_address::Tcp;
use syndicate::schemas::rpc as R;
use syndicate::schemas::gatekeeper as G;
use tokio::net::TcpStream;
use crate::language;
use syndicate::enclose;
use syndicate::preserves::rec;
use syndicate_macros::during;
struct TransportControl;
impl Entity<G::TransportControl> for TransportControl {
fn message(&mut self, t: &mut Activation, c: G::TransportControl) -> ActorResult {
let G::TransportControl(G::ForceDisconnect) = c;
t.stop_root();
Ok(())
}
}
pub fn on_demand(t: &mut Activation, ds: Arc<Cap>) {
t.spawn(Some(AnyValue::symbol("transport_connector")), move |t| {
during!(t, ds, language(), <q <connect-transport $addr: Tcp>>, |t| {
Supervisor::start(
t,
Some(rec![AnyValue::symbol("relay"), language().unparse(&addr)]),
SupervisorConfiguration::default(),
|_t, _s| Ok(()),
enclose!((ds) move |t| enclose!((ds, addr) run(t, ds, addr))))
});
Ok(())
});
}
fn run(t: &mut Activation, ds: Arc<Cap>, addr: Tcp) -> ActorResult {
tracing::info!(?addr, "Connecting");
let name = AnyValue::new(vec![AnyValue::symbol("connector"), language().unparse(&addr)]);
let trace_collector = t.trace_collector();
let facet = t.facet_ref();
t.linked_task(Some(name.clone()), async move {
let port = u16::try_from(&addr.port).map_err(|_| "Invalid TCP port number")?;
let account = Account::new(Some(name), trace_collector.clone());
let cause = trace_collector.as_ref().map(|_| trace::TurnCause::external("connect"));
match TcpStream::connect((addr.host.clone(), port)).await {
Ok(stream) => {
let (i, o) = stream.into_split();
let i = relay::Input::Bytes(Box::pin(i));
let o = relay::Output::Bytes(Box::pin(o));
let initial_oid = Some(syndicate::sturdy::Oid(0.into()));
facet.activate(&account, cause, |t| {
let peer = relay::TunnelRelay::run(t, i, o, None, initial_oid, false)
.expect("missing initial cap on connection");
let control = Cap::guard(&language().syndicate, t.create(TransportControl));
ds.assert(t, language(), &rpc::answer(
language(),
G::ConnectTransport { addr: language().unparse(&addr) },
R::Result::Ok { value: language().unparse(&G::ConnectedTransport {
addr: language().unparse(&addr),
control,
responder_session: peer,
}) }));
Ok(())
});
Ok(LinkedTaskTermination::KeepFacet)
}
Err(e) => {
facet.activate(&account, cause, |t| {
ds.assert(t, language(), &rpc::answer(
language(),
G::ConnectTransport { addr: language().unparse(&addr) },
R::Result::Error { error: AnyValue::symbol(&format!("{:?}", e.kind())) }));
Ok(())
});
Ok(LinkedTaskTermination::Normal)
}
}
});
Ok(())
}

View File

@ -9,7 +9,7 @@ use syndicate::actor::*;
use syndicate::dataspace::Dataspace; use syndicate::dataspace::Dataspace;
use syndicate::during; use syndicate::during;
use syndicate::enclose; use syndicate::enclose;
use syndicate::pattern::{lift_literal, drop_literal, pattern_seq_from_dictionary}; use syndicate::pattern::{lift_literal, drop_literal};
use syndicate::schemas::dataspace; use syndicate::schemas::dataspace;
use syndicate::schemas::dataspace_patterns as P; use syndicate::schemas::dataspace_patterns as P;
use syndicate::schemas::sturdy; use syndicate::schemas::sturdy;
@ -173,7 +173,7 @@ fn bad_instruction(message: &str) -> io::Error {
} }
fn discard() -> P::Pattern { fn discard() -> P::Pattern {
P::Pattern::Discard P::Pattern::DDiscard(Box::new(P::DDiscard))
} }
fn dlit(value: AnyValue) -> P::Pattern { fn dlit(value: AnyValue) -> P::Pattern {
@ -261,6 +261,7 @@ impl<'env> PatternInstantiator<'env> {
fn instantiate_pattern(&mut self, template: &AnyValue) -> io::Result<P::Pattern> { fn instantiate_pattern(&mut self, template: &AnyValue) -> io::Result<P::Pattern> {
Ok(match template.value() { Ok(match template.value() {
Value::Boolean(_) | Value::Boolean(_) |
Value::Float(_) |
Value::Double(_) | Value::Double(_) |
Value::SignedInteger(_) | Value::SignedInteger(_) |
Value::String(_) | Value::String(_) |
@ -272,7 +273,7 @@ impl<'env> PatternInstantiator<'env> {
Symbolic::Discard => discard(), Symbolic::Discard => discard(),
Symbolic::Binder(s) => { Symbolic::Binder(s) => {
self.binding_names.push(s); self.binding_names.push(s);
P::Pattern::Bind { pattern: Box::new(discard()) } P::Pattern::DBind(Box::new(P::DBind { pattern: discard() }))
} }
Symbolic::Reference(s) => Symbolic::Reference(s) =>
dlit(self.env.lookup(&s, "pattern-template variable")?.clone()), dlit(self.env.lookup(&s, "pattern-template variable")?.clone()),
@ -287,47 +288,43 @@ impl<'env> PatternInstantiator<'env> {
Some(pat) => pat, Some(pat) => pat,
None => { None => {
let label = self.instantiate_pattern(r.label())?; let label = self.instantiate_pattern(r.label())?;
let entries = r.fields().iter().enumerate() let fields = r.fields().iter().map(|p| self.instantiate_pattern(p))
.map(|(i, p)| Ok((AnyValue::new(i), self.instantiate_pattern(p)?))) .collect::<io::Result<Vec<P::Pattern>>>()?;
.collect::<io::Result<Map<AnyValue, P::Pattern>>>()?; P::Pattern::DCompound(Box::new(P::DCompound::Rec {
P::Pattern::Group { label: drop_literal(&label)
type_: Box::new(P::GroupType::Rec { .ok_or(bad_instruction("Record pattern must have literal label"))?,
label: drop_literal(&label) fields,
.ok_or(bad_instruction("Record pattern must have literal label"))?, }))
}),
entries,
}
} }
} }
}, },
Value::Sequence(v) => Value::Sequence(v) =>
P::Pattern::Group { P::Pattern::DCompound(Box::new(P::DCompound::Arr {
type_: Box::new(P::GroupType::Arr), items: v.iter()
entries: v.iter().enumerate() .map(|p| self.instantiate_pattern(p))
.map(|(i, p)| Ok((AnyValue::new(i), self.instantiate_pattern(p)?))) .collect::<io::Result<Vec<P::Pattern>>>()?,
.collect::<io::Result<Map<AnyValue, P::Pattern>>>()?, })),
},
Value::Set(_) => Value::Set(_) =>
Err(bad_instruction(&format!("Sets not permitted in patterns: {:?}", template)))?, Err(bad_instruction(&format!("Sets not permitted in patterns: {:?}", template)))?,
Value::Dictionary(v) => Value::Dictionary(v) =>
P::Pattern::Group { P::Pattern::DCompound(Box::new(P::DCompound::Dict {
type_: Box::new(P::GroupType::Dict),
entries: v.iter() entries: v.iter()
.map(|(a, b)| Ok((a.clone(), self.instantiate_pattern(b)?))) .map(|(a, b)| Ok((a.clone(), self.instantiate_pattern(b)?)))
.collect::<io::Result<Map<AnyValue, P::Pattern>>>()?, .collect::<io::Result<Map<AnyValue, P::Pattern>>>()?,
}, })),
}) })
} }
fn maybe_binder_with_pattern(&mut self, r: &Record<AnyValue>) -> io::Result<Option<P::Pattern>> { fn maybe_binder_with_pattern(&mut self, r: &Record<AnyValue>) -> io::Result<Option<P::Pattern>> {
match r.label().value().as_symbol().map(|s| analyze(&s)) { match r.label().value().as_symbol().map(|s| analyze(&s)) {
Some(Symbolic::Binder(formal)) if r.fields().len() == 1 => { Some(Symbolic::Binder(formal)) => if r.fields().len() == 1 {
let pattern = self.instantiate_pattern(&r.fields()[0])?; let pattern = self.instantiate_pattern(&r.fields()[0])?;
self.binding_names.push(formal); self.binding_names.push(formal);
Ok(Some(P::Pattern::Bind { pattern: Box::new(pattern) })) return Ok(Some(P::Pattern::DBind(Box::new(P::DBind { pattern }))));
}, },
_ => Ok(None), _ => (),
} }
Ok(None)
} }
} }
@ -375,6 +372,7 @@ impl Env {
fn instantiate_value(&self, template: &AnyValue) -> io::Result<AnyValue> { fn instantiate_value(&self, template: &AnyValue) -> io::Result<AnyValue> {
Ok(match template.value() { Ok(match template.value() {
Value::Boolean(_) | Value::Boolean(_) |
Value::Float(_) |
Value::Double(_) | Value::Double(_) |
Value::SignedInteger(_) | Value::SignedInteger(_) |
Value::String(_) | Value::String(_) |
@ -557,7 +555,7 @@ impl Env {
RewriteTemplate::Accept { pattern_template } => { RewriteTemplate::Accept { pattern_template } => {
let (_binding_names, pattern) = self.instantiate_pattern(pattern_template)?; let (_binding_names, pattern) = self.instantiate_pattern(pattern_template)?;
Ok(sturdy::Rewrite { Ok(sturdy::Rewrite {
pattern: embed_pattern(&P::Pattern::Bind { pattern: Box::new(pattern) }), pattern: embed_pattern(&P::Pattern::DBind(Box::new(P::DBind { pattern }))),
template: sturdy::Template::TRef(Box::new(sturdy::TRef { binding: 0.into() })), template: sturdy::Template::TRef(Box::new(sturdy::TRef { binding: 0.into() })),
}) })
} }
@ -607,6 +605,7 @@ impl Env {
Ok(match template.value() { Ok(match template.value() {
Value::Boolean(_) | Value::Boolean(_) |
Value::Float(_) |
Value::Double(_) | Value::Double(_) |
Value::SignedInteger(_) | Value::SignedInteger(_) |
Value::String(_) | Value::String(_) |
@ -678,26 +677,24 @@ impl Env {
fn embed_pattern(p: &P::Pattern) -> sturdy::Pattern { fn embed_pattern(p: &P::Pattern) -> sturdy::Pattern {
match p { match p {
P::Pattern::Discard => sturdy::Pattern::PDiscard(Box::new(sturdy::PDiscard)), P::Pattern::DDiscard(_) => sturdy::Pattern::PDiscard(Box::new(sturdy::PDiscard)),
P::Pattern::Bind { pattern } => sturdy::Pattern::PBind(Box::new(sturdy::PBind { P::Pattern::DBind(b) => sturdy::Pattern::PBind(Box::new(sturdy::PBind {
pattern: embed_pattern(&**pattern), pattern: embed_pattern(&b.pattern),
})), })),
P::Pattern::Lit { value } => sturdy::Pattern::Lit(Box::new(sturdy::Lit { P::Pattern::DLit(b) => sturdy::Pattern::Lit(Box::new(sturdy::Lit {
value: language().unparse(&**value), value: language().unparse(&b.value),
})), })),
P::Pattern::Group { type_, entries } => sturdy::Pattern::PCompound(Box::new(match &**type_ { P::Pattern::DCompound(b) => sturdy::Pattern::PCompound(Box::new(match &**b {
P::GroupType::Rec { label } => P::DCompound::Rec { label, fields } =>
sturdy::PCompound::Rec { sturdy::PCompound::Rec {
label: label.clone(), label: label.clone(),
fields: pattern_seq_from_dictionary(entries).expect("correct field entries") fields: fields.iter().map(embed_pattern).collect(),
.into_iter().map(embed_pattern).collect(),
}, },
P::GroupType::Arr => P::DCompound::Arr { items } =>
sturdy::PCompound::Arr { sturdy::PCompound::Arr {
items: pattern_seq_from_dictionary(entries).expect("correct element entries") items: items.iter().map(embed_pattern).collect(),
.into_iter().map(embed_pattern).collect(),
}, },
P::GroupType::Dict => P::DCompound::Dict { entries } =>
sturdy::PCompound::Dict { sturdy::PCompound::Dict {
entries: entries.iter().map(|(k, v)| (k.clone(), embed_pattern(v))).collect(), entries: entries.iter().map(|(k, v)| (k.clone(), embed_pattern(v))).collect(),
}, },

View File

@ -36,7 +36,7 @@ use syndicate_macros::during;
pub fn on_demand(t: &mut Activation, config_ds: Arc<Cap>) { pub fn on_demand(t: &mut Activation, config_ds: Arc<Cap>) {
t.spawn(Some(AnyValue::symbol("config_watcher")), move |t| { t.spawn(Some(AnyValue::symbol("config_watcher")), move |t| {
Ok(during!(t, config_ds, language(), <run-service $spec: internal_services::ConfigWatcher::<AnyValue>>, |t| { Ok(during!(t, config_ds, language(), <run-service $spec: internal_services::ConfigWatcher>, |t| {
Supervisor::start( Supervisor::start(
t, t,
Some(rec![AnyValue::symbol("config"), AnyValue::new(spec.path.clone())]), Some(rec![AnyValue::symbol("config"), AnyValue::new(spec.path.clone())]),
@ -184,7 +184,7 @@ fn run(
let mut watcher = watcher(tx, Duration::from_millis(100)).map_err(convert_notify_error)?; let mut watcher = watcher(tx, Duration::from_millis(100)).map_err(convert_notify_error)?;
watcher.watch(&env.path, RecursiveMode::Recursive).map_err(convert_notify_error)?; watcher.watch(&env.path, RecursiveMode::Recursive).map_err(convert_notify_error)?;
let facet = t.facet_ref(); let facet = t.facet.clone();
let trace_collector = t.trace_collector(); let trace_collector = t.trace_collector();
let span = tracing::Span::current(); let span = tracing::Span::current();
thread::spawn(move || { thread::spawn(move || {

View File

@ -24,7 +24,7 @@ use syndicate_macros::during;
pub fn on_demand(t: &mut Activation, config_ds: Arc<Cap>, root_ds: Arc<Cap>) { pub fn on_demand(t: &mut Activation, config_ds: Arc<Cap>, root_ds: Arc<Cap>) {
t.spawn(Some(AnyValue::symbol("daemon_listener")), move |t| { t.spawn(Some(AnyValue::symbol("daemon_listener")), move |t| {
Ok(during!(t, config_ds, language(), <run-service $spec: DaemonService::<AnyValue>>, Ok(during!(t, config_ds, language(), <run-service $spec: DaemonService>,
enclose!((config_ds, root_ds) move |t: &mut Activation| { enclose!((config_ds, root_ds) move |t: &mut Activation| {
supervise_daemon(t, config_ds, root_ds, spec) supervise_daemon(t, config_ds, root_ds, spec)
}))) })))
@ -41,7 +41,7 @@ fn supervise_daemon(
lifecycle::on_service_restart(t, &config_ds, &spec, enclose!( lifecycle::on_service_restart(t, &config_ds, &spec, enclose!(
(config_ds, root_ds, spec) move |t| { (config_ds, root_ds, spec) move |t| {
tracing::info!(id = ?spec.id, "Terminating to restart"); tracing::info!(id = ?spec.id, "Terminating to restart");
t.stop_facet_and_continue(t.facet_id(), Some( t.stop_facet_and_continue(t.facet.facet_id, Some(
enclose!((config_ds, root_ds, spec) move |t: &mut Activation| { enclose!((config_ds, root_ds, spec) move |t: &mut Activation| {
supervise_daemon(t, config_ds, root_ds, spec) supervise_daemon(t, config_ds, root_ds, spec)
}))) })))
@ -176,7 +176,7 @@ impl DaemonInstance {
fn handle_exit(self, t: &mut Activation, error_message: Option<String>) -> ActorResult { fn handle_exit(self, t: &mut Activation, error_message: Option<String>) -> ActorResult {
let delay = let delay =
std::time::Duration::from_millis(if let None = error_message { 200 } else { 1000 }); std::time::Duration::from_millis(if let None = error_message { 200 } else { 1000 });
t.stop_facet_and_continue(t.facet_id(), Some(move |t: &mut Activation| { t.stop_facet_and_continue(t.facet.facet_id, Some(move |t: &mut Activation| {
#[derive(Debug)] #[derive(Debug)]
enum NextStep { enum NextStep {
SleepAndRestart, SleepAndRestart,
@ -230,7 +230,7 @@ impl DaemonInstance {
kind: &str kind: &str
) -> ActorResult { ) -> ActorResult {
t.facet(|t| { t.facet(|t| {
let facet = t.facet_ref(); let facet = t.facet.clone();
let log_ds = self.log_ds.clone(); let log_ds = self.log_ds.clone();
let service = self.service.clone(); let service = self.service.clone();
let kind = AnyValue::symbol(kind); let kind = AnyValue::symbol(kind);
@ -290,7 +290,7 @@ impl DaemonInstance {
let pid = child.id(); let pid = child.id();
tracing::debug!(?pid, cmd = ?self.cmd, "started"); tracing::debug!(?pid, cmd = ?self.cmd, "started");
let facet = t.facet_ref(); let facet = t.facet.clone();
if let Some(r) = child.stderr.take() { if let Some(r) = child.stderr.take() {
self.log(t, pid, r, "stderr")?; self.log(t, pid, r, "stderr")?;
@ -401,7 +401,7 @@ fn run(
Ok(config) => { Ok(config) => {
tracing::info!(?config); tracing::info!(?config);
let config = config.elaborate(); let config = config.elaborate();
let facet = t.facet_ref(); let facet = t.facet.clone();
t.linked_task(Some(AnyValue::symbol("subprocess")), async move { t.linked_task(Some(AnyValue::symbol("subprocess")), async move {
let mut cmd = config.process.build_command().ok_or("Cannot start daemon process")?; let mut cmd = config.process.build_command().ok_or("Cannot start daemon process")?;

View File

@ -1,61 +0,0 @@
use preserves_schema::Codec;
use std::sync::Arc;
use syndicate::actor::*;
use syndicate::enclose;
use syndicate::preserves::rec;
use syndicate::preserves::value::NestedValue;
use syndicate::schemas::gatekeeper;
use syndicate_macros::during;
use crate::language::Language;
use crate::language::language;
use crate::lifecycle;
use crate::resolution::sturdy;
use crate::resolution::noise;
use crate::schemas::internal_services::Gatekeeper;
pub fn on_demand(t: &mut Activation, ds: Arc<Cap>) {
t.spawn(Some(AnyValue::symbol("gatekeeper_listener")), move |t| {
Ok(during!(t, ds, language(), <run-service $spec: Gatekeeper::<AnyValue>>, |t: &mut Activation| {
t.spawn_link(Some(rec![AnyValue::symbol("gatekeeper"), language().unparse(&spec)]),
enclose!((ds) |t| run(t, ds, spec)));
Ok(())
}))
});
}
pub fn create_gatekeeper(t: &mut Activation, bindspace: &Arc<Cap>) -> Result<Arc<Cap>, ActorError> {
sturdy::handle_sturdy_binds(t, bindspace)?;
noise::handle_noise_binds(t, bindspace)?;
Ok(Cap::guard(Language::arc(), t.create(
syndicate::entity(Arc::clone(bindspace))
.on_asserted_facet(facet_handle_resolve))))
}
fn run(t: &mut Activation, ds: Arc<Cap>, spec: Gatekeeper<AnyValue>) -> ActorResult {
let gk = create_gatekeeper(t, &spec.bindspace)?;
ds.assert(t, language(), &syndicate::schemas::service::ServiceObject {
service_name: language().unparse(&spec),
object: AnyValue::domain(gk),
});
ds.assert(t, language(), &lifecycle::started(&spec));
ds.assert(t, language(), &lifecycle::ready(&spec));
Ok(())
}
fn facet_handle_resolve(
ds: &mut Arc<Cap>,
t: &mut Activation,
a: gatekeeper::Resolve,
) -> ActorResult {
let mut detail: &'static str = "unsupported";
if sturdy::take_sturdy_step(t, ds, &a, &mut detail)? { return Ok(()); }
if noise::take_noise_step(t, ds, &a, &mut detail)? { return Ok(()); }
a.observer.assert(t, language(), &gatekeeper::Rejected {
detail: AnyValue::symbol(detail),
});
Ok(())
}

View File

@ -1,348 +0,0 @@
use preserves_schema::Codec;
use std::convert::TryFrom;
use std::io::Read;
use std::sync::Arc;
use syndicate::actor::*;
use syndicate::enclose;
use syndicate::error::Error;
use syndicate::preserves::rec;
use syndicate::preserves::value::Map;
use syndicate::preserves::value::NestedValue;
use syndicate::schemas::http;
use syndicate::value::signed_integer::SignedInteger;
use crate::language::language;
use crate::lifecycle;
use crate::schemas::internal_services::HttpRouter;
use crate::schemas::internal_services::HttpStaticFileServer;
use syndicate_macros::during;
lazy_static::lazy_static! {
pub static ref MIME_TABLE: Map<String, String> = load_mime_table("/etc/mime.types").unwrap_or_default();
}
pub fn load_mime_table(path: &str) -> Result<Map<String, String>, std::io::Error> {
let mut table = Map::new();
let file = std::fs::read_to_string(path)?;
for line in file.split('\n') {
if line.starts_with('#') {
continue;
}
let pieces = line.split(&[' ', '\t'][..]).collect::<Vec<&str>>();
for i in 1..pieces.len() {
table.insert(pieces[i].to_string(), pieces[0].to_string());
}
}
Ok(table)
}
pub fn on_demand(t: &mut Activation, ds: Arc<Cap>) {
t.spawn(Some(AnyValue::symbol("http_router_listener")), move |t| {
enclose!((ds) during!(t, ds, language(), <run-service $spec: HttpRouter::<AnyValue>>, |t: &mut Activation| {
t.spawn_link(Some(rec![AnyValue::symbol("http_router"), language().unparse(&spec)]),
enclose!((ds) |t| run(t, ds, spec)));
Ok(())
}));
enclose!((ds) during!(t, ds, language(), <run-service $spec: HttpStaticFileServer>, |t: &mut Activation| {
t.spawn_link(Some(rec![AnyValue::symbol("http_static_file_server"), language().unparse(&spec)]),
enclose!((ds) |t| run_static_file_server(t, ds, spec)));
Ok(())
}));
Ok(())
});
}
#[derive(Debug, Clone)]
struct ActiveHandler {
cap: Arc<Cap>,
terminated: Arc<Field<bool>>,
}
type MethodTable = Map<http::MethodPattern, Vec<ActiveHandler>>;
type HostTable = Map<http::HostPattern, Map<http::PathPattern, MethodTable>>;
type RoutingTable = Map<SignedInteger, HostTable>;
fn request_host(value: &http::RequestHost) -> Option<String> {
match value {
http::RequestHost::Present(h) => Some(h.to_owned()),
http::RequestHost::Absent => None,
}
}
fn run(t: &mut Activation, ds: Arc<Cap>, spec: HttpRouter) -> ActorResult {
ds.assert(t, language(), &lifecycle::started(&spec));
ds.assert(t, language(), &lifecycle::ready(&spec));
let httpd = spec.httpd;
let routes: Arc<Field<RoutingTable>> = t.named_field("routes", Map::new());
enclose!((httpd, routes) during!(t, httpd, language(), <http-bind _ $port _ _ _>, |t: &mut Activation| {
let port1 = port.clone();
enclose!((httpd, routes) during!(t, httpd, language(), <http-listener #(&port1)>, enclose!((routes, port) |t: &mut Activation| {
let port2 = port.clone();
during!(t, httpd, language(), <http-bind $host #(&port2) $method $path $handler>, |t: &mut Activation| {
tracing::debug!("+HTTP binding {:?} {:?} {:?} {:?} {:?}", host, port, method, path, handler);
let port = port.value().to_signedinteger()?;
let host = language().parse::<http::HostPattern>(&host)?;
let path = language().parse::<http::PathPattern>(&path)?;
let method = language().parse::<http::MethodPattern>(&method)?;
let handler_cap = handler.value().to_embedded()?.clone();
let handler_terminated = t.named_field("handler-terminated", false);
t.get_mut(&routes)
.entry(port.clone()).or_default()
.entry(host.clone()).or_default()
.entry(path.clone()).or_default()
.entry(method.clone()).or_default()
.push(ActiveHandler {
cap: handler_cap.clone(),
terminated: handler_terminated,
});
t.on_stop(enclose!((routes, method, path, host, port) move |t| {
tracing::debug!("-HTTP binding {:?} {:?} {:?} {:?} {:?}", host, port, method, path, handler);
let port_map = t.get_mut(&routes);
let host_map = port_map.entry(port.clone()).or_default();
let path_map = host_map.entry(host.clone()).or_default();
let method_map = path_map.entry(path.clone()).or_default();
let handler_vec = method_map.entry(method.clone()).or_default();
let handler = {
let i = handler_vec.iter().position(|a| a.cap == handler_cap)
.expect("Expected an index of an active handler to remove");
handler_vec.swap_remove(i)
};
if handler_vec.is_empty() {
method_map.remove(&method);
}
if method_map.is_empty() {
path_map.remove(&path);
}
if path_map.is_empty() {
host_map.remove(&host);
}
if host_map.is_empty() {
port_map.remove(&port);
}
*t.get_mut(&handler.terminated) = true;
Ok(())
}));
Ok(())
});
Ok(())
})));
Ok(())
}));
during!(t, httpd, language(), <request $req $res>, |t: &mut Activation| {
let req = match language().parse::<http::HttpRequest>(&req) { Ok(v) => v, Err(_) => return Ok(()) };
let res = match res.value().to_embedded() { Ok(v) => v, Err(_) => return Ok(()) };
tracing::trace!("Looking up handler for {:#?} in {:#?}", &req, &t.get(&routes));
let host_map = match t.get(&routes).get(&req.port) {
Some(host_map) => host_map,
None => return send_empty(t, res, 404, "Not found"),
};
let methods = match request_host(&req.host).and_then(|h| try_hostname(host_map, http::HostPattern::Host(h), &req.path).transpose()).transpose()? {
Some(methods) => methods,
None => match try_hostname(host_map, http::HostPattern::Any, &req.path)? {
Some(methods) => methods,
None => return send_empty(t, res, 404, "Not found"),
}
};
let handlers = match methods.get(&http::MethodPattern::Specific(req.method.clone())) {
Some(handlers) => handlers,
None => match methods.get(&http::MethodPattern::Any) {
Some(handlers) => handlers,
None => {
let allowed = methods.keys().map(|k| match k {
http::MethodPattern::Specific(m) => m.to_uppercase(),
http::MethodPattern::Any => unreachable!(),
}).collect::<Vec<String>>().join(", ");
res.message(t, language(), &http::HttpResponse::Status {
code: 405.into(), message: "Method Not Allowed".into() });
res.message(t, language(), &http::HttpResponse::Header {
name: "allow".into(), value: allowed });
return send_done(t, res);
}
}
};
if handlers.len() > 1 {
tracing::warn!(?req, "Too many handlers available");
}
let ActiveHandler { cap, terminated } = handlers.first().expect("Nonempty handler set").clone();
tracing::trace!("Handler for {:?} is {:?}", &req, &cap);
t.dataflow(enclose!((terminated, req, res) move |t| {
if *t.get(&terminated) {
tracing::trace!("Handler for {:?} terminated", &req);
send_empty(t, &res, 500, "Internal Server Error")?;
}
Ok(())
}))?;
cap.assert(t, language(), &http::HttpContext { req, res: res.clone() });
Ok(())
});
Ok(())
}
fn send_done(t: &mut Activation, res: &Arc<Cap>) -> ActorResult {
res.message(t, language(), &http::HttpResponse::Done {
chunk: Box::new(http::Chunk::Bytes(vec![])) });
Ok(())
}
fn send_empty(t: &mut Activation, res: &Arc<Cap>, code: u16, message: &str) -> ActorResult {
res.message(t, language(), &http::HttpResponse::Status {
code: code.into(), message: message.into() });
send_done(t, res)
}
fn path_pattern_matches(path_pat: &http::PathPattern, path: &Vec<String>) -> bool {
let mut path_iter = path.iter();
for pat_elem in path_pat.0.iter() {
match pat_elem {
http::PathPatternElement::Label(v) => match path_iter.next() {
Some(path_elem) => {
if v != path_elem {
return false;
}
}
None => return false,
},
http::PathPatternElement::Wildcard => match path_iter.next() {
Some(_) => (),
None => return false,
},
http::PathPatternElement::Rest => return true,
}
}
match path_iter.next() {
Some(_more) => false,
None => true,
}
}
fn try_hostname<'table>(
host_map: &'table HostTable,
host_pat: http::HostPattern,
path: &Vec<String>,
) -> Result<Option<&'table MethodTable>, Error> {
match host_map.get(&host_pat) {
None => Ok(None),
Some(path_table) => {
for (path_pat, method_table) in path_table.iter() {
tracing::trace!("Checking path {:?} against pattern {:?}", &path, &path_pat);
if path_pattern_matches(path_pat, path) {
return Ok(Some(method_table));
}
}
Ok(None)
}
}
}
fn render_dir(path: std::path::PathBuf) -> Result<(Vec<u8>, Option<&'static str>), Error> {
let mut body = String::new();
for entry in std::fs::read_dir(&path)? {
if let Ok(entry) = entry {
let is_dir = entry.metadata().map(|m| m.is_dir()).unwrap_or(false);
let name = entry.file_name().to_string_lossy()
.replace('&', "&amp;")
.replace('<', "&lt;")
.replace('>', "&gt;")
.replace('\'', "&apos;")
.replace('"', "&quot;") + (if is_dir { "/" } else { "" });
body.push_str(&format!("<a href=\"{}\">{}</a><br>\n", name, name));
}
}
Ok((body.into_bytes(), Some("text/html")))
}
impl HttpStaticFileServer {
fn respond(&mut self, t: &mut Activation, req: &http::HttpRequest, res: &Arc<Cap>) -> ActorResult {
let path_prefix_elements = usize::try_from(&self.path_prefix_elements)
.map_err(|_| "Bad pathPrefixElements")?;
let mut is_index = false;
let mut path = req.path[path_prefix_elements..].iter().cloned().collect::<Vec<String>>();
if let Some(e) = path.last_mut() {
if e.len() == 0 {
*e = "index.html".into();
is_index = true;
}
}
let mut realpath = std::path::PathBuf::from(&self.dir);
for element in path.into_iter() {
if element.contains('/') || element.starts_with('.') { Err("Invalid path element")?; }
realpath.push(element);
}
let (body, mime_type) = match std::fs::File::open(&realpath) {
Err(_) => {
if is_index {
realpath.pop();
}
if std::fs::metadata(&realpath).is_ok_and(|m| m.is_dir()) {
render_dir(realpath)?
} else {
return send_empty(t, res, 404, "Not found")
}
},
Ok(mut fh) => {
if fh.metadata().is_ok_and(|m| m.is_dir()) {
drop(fh);
res.message(t, language(), &http::HttpResponse::Status {
code: 301.into(), message: "Moved permanently".into() });
res.message(t, language(), &http::HttpResponse::Header {
name: "location".into(), value: format!("/{}/", req.path.join("/")) });
return send_done(t, res);
} else {
let mut buf = Vec::new();
fh.read_to_end(&mut buf)?;
if let Some(extension) = realpath.extension().and_then(|e| e.to_str()) {
(buf, MIME_TABLE.get(extension).map(|m| m.as_str()))
} else {
(buf, None)
}
}
}
};
res.message(t, language(), &http::HttpResponse::Status {
code: 200.into(), message: "OK".into() });
if let Some(mime_type) = mime_type {
res.message(t, language(), &http::HttpResponse::Header {
name: "content-type".into(), value: mime_type.to_owned() });
}
res.message(t, language(), &http::HttpResponse::Done {
chunk: Box::new(http::Chunk::Bytes(body)) });
Ok(())
}
}
impl Entity<http::HttpContext<AnyValue>> for HttpStaticFileServer {
fn assert(&mut self, t: &mut Activation, assertion: http::HttpContext<AnyValue>, _handle: Handle) -> ActorResult {
let http::HttpContext { req, res } = assertion;
if let Err(e) = self.respond(t, &req, &res) {
tracing::error!(?req, error=?e);
send_empty(t, &res, 500, "Internal server error")?;
}
Ok(())
}
}
fn run_static_file_server(t: &mut Activation, ds: Arc<Cap>, spec: HttpStaticFileServer) -> ActorResult {
let object = Cap::guard(&language().syndicate, t.create(spec.clone()));
ds.assert(t, language(), &syndicate::schemas::service::ServiceObject {
service_name: language().unparse(&spec),
object: AnyValue::domain(object),
});
Ok(())
}

View File

@ -1,7 +1,5 @@
pub mod config_watcher; pub mod config_watcher;
pub mod daemon; pub mod daemon;
pub mod debt_reporter; pub mod debt_reporter;
pub mod gatekeeper;
pub mod http_router;
pub mod tcp_relay_listener; pub mod tcp_relay_listener;
pub mod unix_relay_listener; pub mod unix_relay_listener;

View File

@ -15,57 +15,28 @@ use tokio::net::TcpListener;
use crate::language::language; use crate::language::language;
use crate::lifecycle; use crate::lifecycle;
use crate::protocol::detect_protocol; use crate::protocol::detect_protocol;
use crate::schemas::internal_services::TcpWithoutHttp; use crate::schemas::internal_services::TcpRelayListener;
use syndicate_macros::during; use syndicate_macros::during;
pub fn on_demand(t: &mut Activation, ds: Arc<Cap>) { pub fn on_demand(t: &mut Activation, ds: Arc<Cap>) {
t.spawn(Some(AnyValue::symbol("tcp_relay_listener")), move |t| { t.spawn(Some(AnyValue::symbol("tcp_relay_listener")), move |t| {
enclose!((ds) during!(t, ds, language(), <run-service $spec: TcpWithoutHttp::<AnyValue>>, |t| { Ok(during!(t, ds, language(), <run-service $spec: TcpRelayListener>, |t| {
run_supervisor(t, ds.clone(), spec) Supervisor::start(
})); t,
Ok(()) Some(rec![AnyValue::symbol("relay"), language().unparse(&spec)]),
SupervisorConfiguration::default(),
enclose!((ds, spec) lifecycle::updater(ds, spec)),
enclose!((ds) move |t| enclose!((ds, spec) run(t, ds, spec))))
}))
}); });
} }
fn run_supervisor(t: &mut Activation, ds: Arc<Cap>, spec: TcpWithoutHttp) -> ActorResult { fn run(t: &mut Activation, ds: Arc<Cap>, spec: TcpRelayListener) -> ActorResult {
Supervisor::start(
t,
Some(rec![AnyValue::symbol("relay"), language().unparse(&spec)]),
SupervisorConfiguration::default(),
enclose!((ds, spec) lifecycle::updater(ds, spec)),
enclose!((ds) move |t| enclose!((ds, spec) run(t, ds, spec))))
}
fn run(t: &mut Activation, ds: Arc<Cap>, spec: TcpWithoutHttp) -> ActorResult {
lifecycle::terminate_on_service_restart(t, &ds, &spec); lifecycle::terminate_on_service_restart(t, &ds, &spec);
let host = spec.addr.host.clone();
let httpd = t.named_field("httpd", None::<Arc<Cap>>); let port = u16::try_from(&spec.addr.port).map_err(|_| "Invalid TCP port number")?;
let facet = t.facet.clone();
{
let ad = spec.addr.clone();
let ad2 = ad.clone();
let gk = spec.gatekeeper.clone();
enclose!((ds, httpd) during!(t, ds, language(),
<run-service <relay-listener #(&language().unparse(&ad)) #(&AnyValue::domain(gk)) $h>>, |t: &mut Activation| {
if let Some(h) = h.value().as_embedded().cloned() {
h.assert(t, language(), &syndicate::schemas::http::HttpListener { port: ad2.port.clone() });
*t.get_mut(&httpd) = Some(h.clone());
t.on_stop(enclose!((httpd) move |t| {
let f = t.get_mut(&httpd);
if *f == Some(h.clone()) { *f = None; }
Ok(())
}));
}
Ok(())
}));
}
let TcpWithoutHttp { addr, gatekeeper } = spec.clone();
let host = addr.host.clone();
let port = u16::try_from(&addr.port).map_err(|_| "Invalid TCP port number")?;
let facet = t.facet_ref();
let trace_collector = t.trace_collector(); let trace_collector = t.trace_collector();
t.linked_task(Some(AnyValue::symbol("listener")), async move { t.linked_task(Some(AnyValue::symbol("listener")), async move {
let listen_addr = format!("{}:{}", host, port); let listen_addr = format!("{}:{}", host, port);
@ -87,24 +58,17 @@ fn run(t: &mut Activation, ds: Arc<Cap>, spec: TcpWithoutHttp) -> ActorResult {
loop { loop {
let (stream, addr) = listener.accept().await?; let (stream, addr) = listener.accept().await?;
let gatekeeper = gatekeeper.clone(); let gatekeeper = spec.gatekeeper.clone();
let name = Some(rec![AnyValue::symbol("tcp"), AnyValue::new(format!("{}", &addr))]); let name = Some(rec![AnyValue::symbol("tcp"), AnyValue::new(format!("{}", &addr))]);
let cause = trace_collector.as_ref().map(|_| trace::TurnCause::external("connect")); let cause = trace_collector.as_ref().map(|_| trace::TurnCause::external("connect"));
let account = Account::new(name.clone(), trace_collector.clone()); let account = Account::new(name.clone(), trace_collector.clone());
if !facet.activate( if !facet.activate(
&account, cause, enclose!((trace_collector, httpd) move |t| { &account, cause, enclose!((trace_collector) move |t| {
let httpd = t.get(&httpd).clone();
t.spawn(name, move |t| { t.spawn(name, move |t| {
Ok(t.linked_task(None, { Ok(t.linked_task(None, {
let facet = t.facet_ref(); let facet = t.facet.clone();
async move { async move {
detect_protocol(trace_collector, detect_protocol(trace_collector, facet, stream, gatekeeper, addr).await?;
facet,
stream,
gatekeeper,
httpd,
addr,
port).await?;
Ok(LinkedTaskTermination::KeepFacet) Ok(LinkedTaskTermination::KeepFacet)
} }
})) }))

View File

@ -25,7 +25,7 @@ use syndicate_macros::during;
pub fn on_demand(t: &mut Activation, ds: Arc<Cap>) { pub fn on_demand(t: &mut Activation, ds: Arc<Cap>) {
t.spawn(Some(AnyValue::symbol("unix_relay_listener")), move |t| { t.spawn(Some(AnyValue::symbol("unix_relay_listener")), move |t| {
Ok(during!(t, ds, language(), <run-service $spec: UnixRelayListener::<AnyValue>>, |t| { Ok(during!(t, ds, language(), <run-service $spec: UnixRelayListener>, |t| {
Supervisor::start( Supervisor::start(
t, t,
Some(rec![AnyValue::symbol("relay"), language().unparse(&spec)]), Some(rec![AnyValue::symbol("relay"), language().unparse(&spec)]),
@ -39,7 +39,7 @@ pub fn on_demand(t: &mut Activation, ds: Arc<Cap>) {
fn run(t: &mut Activation, ds: Arc<Cap>, spec: UnixRelayListener) -> ActorResult { fn run(t: &mut Activation, ds: Arc<Cap>, spec: UnixRelayListener) -> ActorResult {
lifecycle::terminate_on_service_restart(t, &ds, &spec); lifecycle::terminate_on_service_restart(t, &ds, &spec);
let path_str = spec.addr.path.clone(); let path_str = spec.addr.path.clone();
let facet = t.facet_ref(); let facet = t.facet.clone();
let trace_collector = t.trace_collector(); let trace_collector = t.trace_collector();
t.linked_task(Some(AnyValue::symbol("listener")), async move { t.linked_task(Some(AnyValue::symbol("listener")), async move {
let listener = bind_unix_listener(&PathBuf::from(path_str)).await?; let listener = bind_unix_listener(&PathBuf::from(path_str)).await?;
@ -71,7 +71,7 @@ fn run(t: &mut Activation, ds: Arc<Cap>, spec: UnixRelayListener) -> ActorResult
&account, cause, enclose!((trace_collector) move |t| { &account, cause, enclose!((trace_collector) move |t| {
t.spawn(name, |t| { t.spawn(name, |t| {
Ok(t.linked_task(None, { Ok(t.linked_task(None, {
let facet = t.facet_ref(); let facet = t.facet.clone();
async move { async move {
tracing::info!(protocol = %"unix"); tracing::info!(protocol = %"unix");
let (i, o) = stream.into_split(); let (i, o) = stream.into_split();

View File

@ -1,6 +1,6 @@
[package] [package]
name = "syndicate-tools" name = "syndicate-tools"
version = "0.19.0" version = "0.5.0"
authors = ["Tony Garnock-Jones <tonyg@leastfixedpoint.com>"] authors = ["Tony Garnock-Jones <tonyg@leastfixedpoint.com>"]
edition = "2018" edition = "2018"
@ -10,14 +10,11 @@ repository = "https://git.syndicate-lang.org/syndicate-lang/syndicate-rs"
license = "Apache-2.0" license = "Apache-2.0"
[dependencies] [dependencies]
preserves = "4.995" preserves = "3.0"
syndicate = { path = "../syndicate", version = "0.41.0"} syndicate = { path = "../syndicate", version = "0.27.0"}
clap = { version = "^4.0", features = ["derive"] } clap = { version = "^4.0", features = ["derive"] }
clap_complete = "^4.0" clap_complete = "^4.0"
noise-protocol = "0.1"
noise-rust-crypto = "0.5"
[package.metadata.workspaces] [package.metadata.workspaces]
independent = true independent = true

View File

@ -7,10 +7,6 @@ use clap::Parser;
use clap::Subcommand; use clap::Subcommand;
use clap::arg; use clap::arg;
use clap_complete::{generate, Shell}; use clap_complete::{generate, Shell};
use noise_protocol::DH;
use noise_protocol::Hash;
use noise_rust_crypto::Blake2s;
use noise_rust_crypto::X25519;
use preserves::hex::HexParser; use preserves::hex::HexParser;
use preserves::value::BytesBinarySource; use preserves::value::BytesBinarySource;
use preserves::value::NestedValue; use preserves::value::NestedValue;
@ -22,9 +18,6 @@ use preserves::value::TextWriter;
use syndicate::language; use syndicate::language;
use syndicate::preserves_schema::Codec; use syndicate::preserves_schema::Codec;
use syndicate::preserves_schema::ParseError; use syndicate::preserves_schema::ParseError;
use syndicate::schemas::noise;
use syndicate::sturdy::Caveat;
use syndicate::sturdy::SturdyRef;
use syndicate::sturdy::_Any; use syndicate::sturdy::_Any;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -46,34 +39,6 @@ enum Action {
#[arg(long, group="key")] #[arg(long, group="key")]
/// Key bytes, encoded as hex /// Key bytes, encoded as hex
hex: Option<String>, hex: Option<String>,
#[arg(long)]
/// Caveats to add
caveat: Vec<Preserves<_Any>>,
},
#[command(group(ArgGroup::new("key").required(true)))]
/// Generate a fresh NoiseServiceSpec from a service selector and a key
Noise {
#[arg(long, value_name="VALUE")]
/// Preserves value to use as the service selector
service: Preserves<_Any>,
#[arg(long, value_name="PROTOCOL")]
/// Noise handshake protocol name
protocol: Option<String>,
#[arg(long, group="key")]
/// Key phrase
phrase: Option<String>,
#[arg(long, group="key")]
/// Key bytes, encoded as hex
hex: Option<String>,
#[arg(long, group="key")]
/// Generate a random key
random: bool,
}, },
/// Emit shell completion code /// Emit shell completion code
@ -108,41 +73,7 @@ fn main() -> io::Result<()> {
generate(shell, &mut cmd, name, &mut io::stdout()); generate(shell, &mut cmd, name, &mut io::stdout());
} }
Action::Noise { service, protocol, phrase, hex, random } => { Action::Mint { oid, phrase, hex } => {
let key =
if random {
X25519::genkey()
} else if let Some(hex) = hex {
let mut hash = Blake2s::default();
hash.input(hex.as_bytes());
hash.result()
} else if let Some(phrase) = phrase {
let mut hash = Blake2s::default();
hash.input(phrase.as_bytes());
hash.result()
} else {
unreachable!()
};
let n = noise::NoiseServiceSpec {
base: noise::NoiseSpec {
key: X25519::pubkey(&key).to_vec(),
service: noise::ServiceSelector(service.0),
pre_shared_keys: noise::NoisePreSharedKeys::Absent,
protocol: if let Some(p) = protocol {
noise::NoiseProtocol::Present { protocol: p }
} else {
noise::NoiseProtocol::Absent
},
},
secret_key: noise::SecretKeyField::Present {
secret_key: key.to_vec(),
},
};
println!("{}", TextWriter::encode(&mut NoEmbeddedDomainCodec,
&language().unparse(&n))?);
}
Action::Mint { oid, phrase, hex, caveat: caveats } => {
let key = let key =
if let Some(hex) = hex { if let Some(hex) = hex {
HexParser::Liberal.decode(&hex).expect("hex encoded sturdyref") HexParser::Liberal.decode(&hex).expect("hex encoded sturdyref")
@ -151,14 +82,7 @@ fn main() -> io::Result<()> {
} else { } else {
unreachable!() unreachable!()
}; };
let attenuation = caveats.into_iter().map(|c| { let m = syndicate::sturdy::SturdyRef::mint(oid.0, &key);
let r = language().parse(&c.0);
if let Ok(Caveat::Unknown(_)) = &r {
eprintln!("Warning: Unknown caveat format: {:?}", &c.0);
}
r
}).collect::<Result<Vec<Caveat>, _>>()?;
let m = SturdyRef::mint(oid.0, &key).attenuate(&attenuation)?;
println!("{}", TextWriter::encode(&mut NoEmbeddedDomainCodec, println!("{}", TextWriter::encode(&mut NoEmbeddedDomainCodec,
&language().unparse(&m))?); &language().unparse(&m))?);
} }

View File

@ -1,6 +1,6 @@
[package] [package]
name = "syndicate" name = "syndicate"
version = "0.41.1" version = "0.27.0"
authors = ["Tony Garnock-Jones <tonyg@leastfixedpoint.com>"] authors = ["Tony Garnock-Jones <tonyg@leastfixedpoint.com>"]
edition = "2018" edition = "2018"
@ -13,14 +13,13 @@ license = "Apache-2.0"
vendored-openssl = ["openssl/vendored"] vendored-openssl = ["openssl/vendored"]
[build-dependencies] [build-dependencies]
preserves-schema = "5.995" preserves-schema = "3.2"
syndicate-schema-plugin = { path = "../syndicate-schema-plugin", version = "0.10.0"}
[dependencies] [dependencies]
preserves = "4.995" preserves = "3.0"
preserves-schema = "5.995" preserves-schema = "3.2"
tokio = { version = "1.10", features = ["io-std", "io-util", "macros", "rt", "rt-multi-thread", "time"] } tokio = { version = "1.10", features = ["io-util", "macros", "rt", "rt-multi-thread", "time"] }
tokio-util = "0.6" tokio-util = "0.6"
bytes = "1.0" bytes = "1.0"

View File

@ -11,7 +11,6 @@ use syndicate::during::entity;
use syndicate::dataspace::Dataspace; use syndicate::dataspace::Dataspace;
use syndicate::schemas::dataspace::Observe; use syndicate::schemas::dataspace::Observe;
use syndicate::schemas::dataspace_patterns as p; use syndicate::schemas::dataspace_patterns as p;
use syndicate::value::Map;
use syndicate::value::NestedValue; use syndicate::value::NestedValue;
use syndicate::value::Value; use syndicate::value::Value;
@ -89,11 +88,11 @@ pub fn bench_pub(c: &mut Criterion) {
.create_cap(t); .create_cap(t);
ds.assert(t, language(), &Observe { ds.assert(t, language(), &Observe {
pattern: p::Pattern::Bind { pattern: p::Pattern::DBind(Box::new(p::DBind {
pattern: Box::new(p::Pattern::Lit { pattern: p::Pattern::DLit(Box::new(p::DLit {
value: Box::new(p::AnyAtom::Symbol("consumer".to_owned())), value: p::AnyAtom::Symbol("consumer".to_owned()),
}), })),
}, })),
observer: shutdown, observer: shutdown,
}); });
@ -111,27 +110,24 @@ pub fn bench_pub(c: &mut Criterion) {
ds.assert(t, &(), &AnyValue::symbol("consumer")); ds.assert(t, &(), &AnyValue::symbol("consumer"));
ds.assert(t, language(), &Observe { ds.assert(t, language(), &Observe {
pattern: p::Pattern::Group { pattern: p::Pattern::DCompound(Box::new(p::DCompound::Rec {
type_: Box::new(p::GroupType::Rec { label: AnyValue::symbol("Says"),
label: AnyValue::symbol("Says"), fields: vec![
}), p::Pattern::DLit(Box::new(p::DLit {
entries: Map::from([ value: p::AnyAtom::String("bench_pub".to_owned()),
(p::_Any::new(0), p::Pattern::Lit { })),
value: Box::new(p::AnyAtom::String("bench_pub".to_owned())), p::Pattern::DBind(Box::new(p::DBind {
}), pattern: p::Pattern::DDiscard(Box::new(p::DDiscard)),
(p::_Any::new(1), p::Pattern::Bind { })),
pattern: Box::new(p::Pattern::Discard), ]})),
}),
]),
},
observer: receiver, observer: receiver,
}); });
ds.assert(t, language(), &Observe { ds.assert(t, language(), &Observe {
pattern: p::Pattern::Bind { pattern: p::Pattern::DBind(Box::new(p::DBind {
pattern: Box::new(p::Pattern::Lit { pattern: p::Pattern::DLit(Box::new(p::DLit {
value: Box::new(p::AnyAtom::Bool(true)), value: p::AnyAtom::Bool(true),
}), })),
}, })),
observer: shutdown, observer: shutdown,
}); });

View File

@ -1,18 +1,36 @@
use preserves_schema::compiler::*; use preserves_schema::compiler::*;
mod syndicate_plugins {
use preserves_schema::compiler::*;
use preserves_schema::gen::schema::*;
// use preserves_schema::syntax::block::constructors::*;
#[derive(Debug)]
pub(super) struct PatternPlugin;
impl Plugin for PatternPlugin {
fn generate_definition(
&self,
_m: &mut context::ModuleContext,
_definition_name: &str,
_definition: &Definition,
) {
// TODO: Emit code for building instances of sturdy.Pattern and sturdy.Template
}
}
}
fn main() -> std::io::Result<()> { fn main() -> std::io::Result<()> {
let buildroot = std::path::PathBuf::from(std::env::var_os("OUT_DIR").unwrap()); let buildroot = std::path::PathBuf::from(std::env::var_os("OUT_DIR").unwrap());
let mut gen_dir = buildroot.clone(); let mut gen_dir = buildroot.clone();
gen_dir.push("src/schemas"); gen_dir.push("src/schemas");
let mut c = CompilerConfig::new("crate::schemas".to_owned()); let mut c = CompilerConfig::new(gen_dir, "crate::schemas".to_owned());
c.plugins.push(Box::new(syndicate_schema_plugin::PatternPlugin { c.plugins.push(Box::new(syndicate_plugins::PatternPlugin));
syndicate_crate: "crate".to_string(),
}));
c.add_external_module(ExternalModule::new(vec!["EntityRef".to_owned()], "crate::actor")); c.add_external_module(ExternalModule::new(vec!["EntityRef".to_owned()], "crate::actor"));
let inputs = expand_inputs(&vec!["protocols/schema-bundle.bin".to_owned()])?; let inputs = expand_inputs(&vec!["protocols/schema-bundle.bin".to_owned()])?;
c.load_schemas_and_bundles(&inputs, &vec![])?; c.load_schemas_and_bundles(&inputs, &vec![])?;
compile(&c, &mut CodeCollector::files(gen_dir)) compile(&c)
} }

View File

@ -1,44 +1,34 @@
´³bundle·µ³rpc„´³schema·³version°³ definitions·³Answer´³rec´³lit³a„´³tupleµ´³named³request³any„´³named³response³any„„„„³Result´³orµµ±ok´³rec´³lit³ok„´³tupleµ´³named³value³any„„„„„µ±error´³rec´³lit³error„´³tupleµ´³named³error³any„„„„„„„³Question´³rec´³lit³q„´³tupleµ´³named³request³any„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³tcp„´³schema·³version°³ definitions·³TcpLocal´³rec´³lit³ tcp-local„´³tupleµ´³named³host´³atom³String„„´³named³port´³atom³ SignedInteger„„„„„³ TcpRemote´³rec´³lit³ ´³bundle·µ³tcp„´³schema·³version³ definitions·³TcpLocal´³rec´³lit³ tcp-local„´³tupleµ´³named³host´³atom³String„„´³named³port´³atom³ SignedInteger„„„„„³ TcpRemote´³rec´³lit³
tcp-remote„´³tupleµ´³named³host´³atom³String„„´³named³port´³atom³ SignedInteger„„„„„³ TcpPeerInfo´³rec´³lit³tcp-peer„´³tupleµ´³named³handle´³embedded³any„„´³named³local´³refµ„³TcpLocal„„´³named³remote´³refµ„³ TcpRemote„„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³http„´³schema·³version°³ definitions·³Chunk´³orµµ±string´³atom³String„„µ±bytes´³atom³ tcp-remote„´³tupleµ´³named³host´³atom³String„„´³named³port´³atom³ SignedInteger„„„„„³ TcpPeerInfo´³rec´³lit³tcp-peer„´³tupleµ´³named³handle´³embedded³any„„´³named³local´³refµ„³TcpLocal„„´³named³remote´³refµ„³ TcpRemote„„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³http„´³schema·³version³ definitions·³Chunk´³orµµ±string´³atom³String„„µ±bytes´³atom³
ByteString„„„„³Headers´³dictof´³atom³Symbol„´³atom³String„„³MimeType´³atom³Symbol„³ ByteString„„„„³Headers´³dictof´³atom³Symbol„´³atom³String„„³MimeType´³atom³Symbol„³
QueryValue´³orµµ±string´³atom³String„„µ±file´³rec´³lit³file„´³tupleµ´³named³filename´³atom³String„„´³named³headers´³refµ„³Headers„„´³named³body´³atom³ QueryValue´³orµµ±string´³atom³String„„µ±file´³rec´³lit³file„´³tupleµ´³named³filename´³atom³String„„´³named³headers´³refµ„³Headers„„´³named³body´³atom³
ByteString„„„„„„„„³ HostPattern´³orµµ±host´³atom³String„„µ±any´³lit€„„„„³ HttpBinding´³rec´³lit³ http-bind„´³tupleµ´³named³host´³refµ„³ HostPattern„„´³named³port´³atom³ SignedInteger„„´³named³method´³refµ„³ MethodPattern„„´³named³path´³refµ„³ PathPattern„„´³named³handler´³embedded´³refµ„³ HttpRequest„„„„„„³ HttpContext´³rec´³lit³request„´³tupleµ´³named³req´³refµ„³ HttpRequest„„´³named³res´³embedded´³refµ„³ HttpResponse„„„„„„³ HttpRequest´³rec´³lit³ http-request„´³tupleµ´³named³sequenceNumber´³atom³ SignedInteger„„´³named³host´³refµ„³ RequestHost„„´³named³port´³atom³ SignedInteger„„´³named³method´³atom³Symbol„„´³named³path´³seqof´³atom³String„„„´³named³headers´³refµ„³Headers„„´³named³query´³dictof´³atom³Symbol„´³seqof´³refµ„³ ByteString„„„„„„„„³ HostPattern´³orµµ±host´³atom³String„„µ±any´³lit€„„„„³ HttpBinding´³rec´³lit³ http-bind„´³tupleµ´³named³host´³refµ„³ HostPattern„„´³named³port´³atom³ SignedInteger„„´³named³method´³refµ„³ MethodPattern„„´³named³path´³refµ„³ PathPattern„„´³named³handler´³embedded´³refµ„³ HttpRequest„„„„„„³ HttpContext´³rec´³lit³request„´³tupleµ´³named³req´³refµ„³ HttpRequest„„´³named³res´³embedded´³refµ„³ HttpResponse„„„„„„³ HttpRequest´³rec´³lit³ http-request„´³tupleµ´³named³sequenceNumber´³atom³ SignedInteger„„´³named³host´³atom³String„„´³named³port´³atom³ SignedInteger„„´³named³method´³atom³Symbol„„´³named³path´³seqof´³atom³String„„„´³named³headers´³refµ„³Headers„„´³named³query´³dictof´³atom³Symbol„´³seqof´³refµ„³
QueryValue„„„„´³named³body´³refµ„³ RequestBody„„„„„³ HttpService´³rec´³lit³ http-service„´³tupleµ´³named³host´³refµ„³ HostPattern„„´³named³port´³atom³ SignedInteger„„´³named³method´³refµ„³ MethodPattern„„´³named³path´³refµ„³ PathPattern„„„„„³ PathPattern´³seqof´³refµ„³PathPatternElement„„³ RequestBody´³orµµ±absent´³lit€„„µ±present´³atom³ QueryValue„„„„´³named³body´³refµ„³ RequestBody„„„„„³ HttpService´³rec´³lit³ http-service„´³tupleµ´³named³host´³refµ„³ HostPattern„„´³named³port´³atom³ SignedInteger„„´³named³method´³refµ„³ MethodPattern„„´³named³path´³refµ„³ PathPattern„„„„„³ PathPattern´³seqof´³refµ„³PathPatternElement„„³ RequestBody´³orµµ±present´³atom³
ByteString„„„„³ RequestHost´³orµµ±absent´³lit€„„µ±present´³atom³String„„„„³ HttpListener´³rec´³lit³ http-listener„´³tupleµ´³named³port´³atom³ SignedInteger„„„„„³ HttpResponse´³orµµ±status´³rec´³lit³status„´³tupleµ´³named³code´³atom³ SignedInteger„„´³named³message´³atom³String„„„„„„µ±header´³rec´³lit³header„´³tupleµ´³named³name´³atom³Symbol„„´³named³value´³atom³String„„„„„„µ±chunk´³rec´³lit³chunk„´³tupleµ´³named³chunk´³refµ„³Chunk„„„„„„µ±done´³rec´³lit³done„´³tupleµ´³named³chunk´³refµ„³Chunk„„„„„„„„³ MethodPattern´³orµµ±any´³lit€„„µ±specific´³atom³Symbol„„„„³PathPatternElement´³orµµ±label´³atom³String„„µ±wildcard´³lit³_„„µ±rest´³lit³...„„„„„³ embeddedType€„„µ³noise„´³schema·³version°³ definitions·³Packet´³orµµ±complete´³atom³ ByteString„„µ±absent´³lit€„„„„³ HttpListener´³rec´³lit³ http-listener„´³tupleµ´³named³port´³atom³ SignedInteger„„„„„³ HttpResponse´³orµµ±status´³rec´³lit³status„´³tupleµ´³named³code´³atom³ SignedInteger„„´³named³message´³atom³String„„„„„„µ±header´³rec´³lit³header„´³tupleµ´³named³name´³atom³Symbol„„´³named³value´³atom³String„„„„„„µ±chunk´³rec´³lit³chunk„´³tupleµ´³named³chunk´³refµ„³Chunk„„„„„„µ±done´³rec´³lit³done„´³tupleµ´³named³chunk´³refµ„³Chunk„„„„„„„„³ MethodPattern´³orµµ±any´³lit€„„µ±specific´³atom³Symbol„„„„³PathPatternElement´³orµµ±label´³atom³String„„µ±wildcard´³lit³_„„µ±rest´³lit³...„„„„„³ embeddedType€„„µ³noise„´³schema·³version³ definitions·³Route´³rec´³lit³route„´³ tuplePrefixµ´³named³
transports´³seqof³any„„„´³named³steps´³seqof´³refµ„³ RouteStep„„„„„³Accept´³rec´³lit³accept„´³tupleµ´³named³responderSession´³embedded³any„„„„„³Packet´³orµµ±complete´³atom³
ByteString„„µ± ByteString„„µ±
fragmented´³seqof´³atom³ fragmented´³seqof´³atom³
ByteString„„„„„³ Initiator´³rec´³lit³ initiator„´³tupleµ´³named³initiatorSession´³embedded´³refµ„³Packet„„„„„„³ NoiseSpec´³andµ´³dict·³key´³named³key´³atom³ ByteString„„„„„³Connect´³rec´³lit³connect„´³tupleµ´³named³serviceSelector³any„´³named³initiatorSession´³embedded³any„„„„„³ NoiseSpec´³andµ´³dict·³key´³named³key´³atom³
ByteString„„³service´³named³service´³refµ„³ServiceSelector„„„„´³named³protocol´³refµ„³ NoiseProtocol„„´³named³ preSharedKeys´³refµ„³NoisePreSharedKeys„„„„³ SessionItem´³orµµ± Initiator´³refµ„³ Initiator„„µ±Packet´³refµ„³Packet„„„„³ NoiseProtocol´³orµµ±present´³dict·³protocol´³named³protocol´³atom³String„„„„„µ±invalid´³dict·³protocol´³named³protocol³any„„„„µ±absent´³dict·„„„„„³ NoiseStepType´³lit³noise„³SecretKeyField´³orµµ±present´³dict·³ secretKey´³named³ secretKey´³atom³ ByteString„„³service´³named³service³any„„„´³named³protocol´³refµ„³ NoiseProtocol„„´³named³ preSharedKeys´³refµ„³NoisePreSharedKeys„„„„³ NoiseStep´³rec´³lit³noise„´³tupleµ´³named³spec´³refµ„³ NoiseSpec„„„„„³ RouteStep´³orµµ± NoiseStep´³refµ„³ NoiseStep„„µ±GatekeeperStep´³refµ„³GatekeeperStep„„„„³ NoiseProtocol´³orµµ±present´³dict·³protocol´³named³protocol´³atom³String„„„„„µ±invalid´³dict·³protocol´³named³protocol³any„„„„µ±absent´³dict·„„„„„³GatekeeperStep´³refµ³sturdy„³ SturdyRef„³DefaultProtocol´³lit±!Noise_NK_25519_ChaChaPoly_BLAKE2s„³NoisePreSharedKeys´³orµµ±present´³dict·³ preSharedKeys´³named³ preSharedKeys´³seqof´³atom³
ByteString„„„„„µ±invalid´³dict·³ secretKey´³named³ secretKey³any„„„„µ±absent´³dict·„„„„„³DefaultProtocol´³lit±!Noise_NK_25519_ChaChaPoly_BLAKE2s„³NoiseStepDetail´³refµ„³ServiceSelector„³ServiceSelector³any³NoiseServiceSpec´³andµ´³named³base´³refµ„³ NoiseSpec„„´³named³ secretKey´³refµ„³SecretKeyField„„„„³NoisePreSharedKeys´³orµµ±present´³dict·³ preSharedKeys´³named³ preSharedKeys´³seqof´³atom³ ByteString„„„„„„µ±invalid´³dict·³ preSharedKeys´³named³ preSharedKeys³any„„„„µ±absent´³dict·„„„„„„³ embeddedType€„„µ³timer„´³schema·³version³ definitions·³SetTimer´³rec´³lit³ set-timer„´³tupleµ´³named³label³any„´³named³seconds´³atom³Double„„´³named³kind´³refµ„³ TimerKind„„„„„³ LaterThan´³rec´³lit³
ByteString„„„„„„µ±invalid´³dict·³ preSharedKeys´³named³ preSharedKeys³any„„„„µ±absent´³dict·„„„„„³NoisePathStepDetail´³refµ„³ NoiseSpec„³NoiseDescriptionDetail´³refµ„³NoiseServiceSpec„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³timer„´³schema·³version°³ definitions·³SetTimer´³rec´³lit³ set-timer„´³tupleµ´³named³label³any„´³named³seconds´³atom³Double„„´³named³kind´³refµ„³ TimerKind„„„„„³ LaterThan´³rec´³lit³ later-than„´³tupleµ´³named³seconds´³atom³Double„„„„„³ TimerKind´³orµµ±relative´³lit³relative„„µ±absolute´³lit³absolute„„µ±clear´³lit³clear„„„„³ TimerExpired´³rec´³lit³ timer-expired„´³tupleµ´³named³label³any„´³named³seconds´³atom³Double„„„„„„³ embeddedType€„„µ³trace„´³schema·³version³ definitions·³Oid³any³Name´³orµµ± anonymous´³rec´³lit³ anonymous„´³tupleµ„„„„µ±named´³rec´³lit³named„´³tupleµ´³named³name³any„„„„„„„³Target´³rec´³lit³entity„´³tupleµ´³named³actor´³refµ„³ActorId„„´³named³facet´³refµ„³FacetId„„´³named³oid´³refµ„³Oid„„„„„³TaskId³any³TurnId³any³ActorId³any³FacetId³any³ TurnCause´³orµµ±turn´³rec´³lit³ caused-by„´³tupleµ´³named³id´³refµ„³TurnId„„„„„„µ±cleanup´³rec´³lit³cleanup„´³tupleµ„„„„µ±linkedTaskRelease´³rec´³lit³linked-task-release„´³tupleµ´³named³id´³refµ„³TaskId„„´³named³reason´³refµ„³LinkedTaskReleaseReason„„„„„„µ±periodicActivation´³rec´³lit³periodic-activation„´³tupleµ´³named³period´³atom³Double„„„„„„µ±delay´³rec´³lit³delay„´³tupleµ´³named³ causingTurn´³refµ„³TurnId„„´³named³amount´³atom³Double„„„„„„µ±external´³rec´³lit³external„´³tupleµ´³named³ description³any„„„„„„„³ TurnEvent´³orµµ±assert´³rec´³lit³assert„´³tupleµ´³named³ assertion´³refµ„³AssertionDescription„„´³named³handle´³refµ³protocol„³Handle„„„„„„µ±retract´³rec´³lit³retract„´³tupleµ´³named³handle´³refµ³protocol„³Handle„„„„„„µ±message´³rec´³lit³message„´³tupleµ´³named³body´³refµ„³AssertionDescription„„„„„„µ±sync´³rec´³lit³sync„´³tupleµ´³named³peer´³refµ„³Target„„„„„„µ± breakLink´³rec´³lit³
later-than„´³tupleµ´³named³seconds´³atom³Double„„„„„³ TimerKind´³orµµ±relative´³lit³relative„„µ±absolute´³lit³absolute„„µ±clear´³lit³clear„„„„³ TimerExpired´³rec´³lit³ timer-expired„´³tupleµ´³named³label³any„´³named³seconds´³atom³Double„„„„„„³ embeddedType€„„µ³trace„´³schema·³version°³ definitions·³Oid³any³Name´³orµµ± anonymous´³rec´³lit³ anonymous„´³tupleµ„„„„µ±named´³rec´³lit³named„´³tupleµ´³named³name³any„„„„„„„³Target´³rec´³lit³entity„´³tupleµ´³named³actor´³refµ„³ActorId„„´³named³facet´³refµ„³FacetId„„´³named³oid´³refµ„³Oid„„„„„³TaskId³any³TurnId³any³ActorId³any³FacetId³any³ TurnCause´³orµµ±turn´³rec´³lit³ caused-by„´³tupleµ´³named³id´³refµ„³TurnId„„„„„„µ±cleanup´³rec´³lit³cleanup„´³tupleµ„„„„µ±linkedTaskRelease´³rec´³lit³linked-task-release„´³tupleµ´³named³id´³refµ„³TaskId„„´³named³reason´³refµ„³LinkedTaskReleaseReason„„„„„„µ±periodicActivation´³rec´³lit³periodic-activation„´³tupleµ´³named³period´³atom³Double„„„„„„µ±delay´³rec´³lit³delay„´³tupleµ´³named³ causingTurn´³refµ„³TurnId„„´³named³amount´³atom³Double„„„„„„µ±external´³rec´³lit³external„´³tupleµ´³named³ description³any„„„„„„„³ TurnEvent´³orµµ±assert´³rec´³lit³assert„´³tupleµ´³named³ assertion´³refµ„³AssertionDescription„„´³named³handle´³refµ³protocol„³Handle„„„„„„µ±retract´³rec´³lit³retract„´³tupleµ´³named³handle´³refµ³protocol„³Handle„„„„„„µ±message´³rec´³lit³message„´³tupleµ´³named³body´³refµ„³AssertionDescription„„„„„„µ±sync´³rec´³lit³sync„´³tupleµ´³named³peer´³refµ„³Target„„„„„„µ± breakLink´³rec´³lit³
break-link„´³tupleµ´³named³source´³refµ„³ActorId„„´³named³handle´³refµ³protocol„³Handle„„„„„„„„³ break-link„´³tupleµ´³named³source´³refµ„³ActorId„„´³named³handle´³refµ³protocol„³Handle„„„„„„„„³
ExitStatus´³orµµ±ok´³lit³ok„„µ±Error´³refµ³protocol„³Error„„„„³ ExitStatus´³orµµ±ok´³lit³ok„„µ±Error´³refµ³protocol„³Error„„„„³
TraceEntry´³rec´³lit³trace„´³tupleµ´³named³ timestamp´³atom³Double„„´³named³actor´³refµ„³ActorId„„´³named³item´³refµ„³ActorActivation„„„„„³ActorActivation´³orµµ±start´³rec´³lit³start„´³tupleµ´³named³ actorName´³refµ„³Name„„„„„„µ±turn´³refµ„³TurnDescription„„µ±stop´³rec´³lit³stop„´³tupleµ´³named³status´³refµ„³ TraceEntry´³rec´³lit³trace„´³tupleµ´³named³ timestamp´³atom³Double„„´³named³actor´³refµ„³ActorId„„´³named³item´³refµ„³ActorActivation„„„„„³ActorActivation´³orµµ±start´³rec´³lit³start„´³tupleµ´³named³ actorName´³refµ„³Name„„„„„„µ±turn´³refµ„³TurnDescription„„µ±stop´³rec´³lit³stop„´³tupleµ´³named³status´³refµ„³
ExitStatus„„„„„„„„³FacetStopReason´³orµµ±explicitAction´³lit³explicit-action„„µ±inert´³lit³inert„„µ±parentStopping´³lit³parent-stopping„„µ± actorStopping´³lit³actor-stopping„„„„³TurnDescription´³rec´³lit³turn„´³tupleµ´³named³id´³refµ„³TurnId„„´³named³cause´³refµ„³ TurnCause„„´³named³actions´³seqof´³refµ„³ActionDescription„„„„„„³ActionDescription´³orµµ±dequeue´³rec´³lit³dequeue„´³tupleµ´³named³event´³refµ„³TargetedTurnEvent„„„„„„µ±enqueue´³rec´³lit³enqueue„´³tupleµ´³named³event´³refµ„³TargetedTurnEvent„„„„„„µ±dequeueInternal´³rec´³lit³dequeue-internal„´³tupleµ´³named³event´³refµ„³TargetedTurnEvent„„„„„„µ±enqueueInternal´³rec´³lit³enqueue-internal„´³tupleµ´³named³event´³refµ„³TargetedTurnEvent„„„„„„µ±spawn´³rec´³lit³spawn„´³tupleµ´³named³link´³atom³Boolean„„´³named³id´³refµ„³ActorId„„„„„„µ±link´³rec´³lit³link„´³tupleµ´³named³ parentActor´³refµ„³ActorId„„´³named³ childToParent´³refµ³protocol„³Handle„„´³named³ ExitStatus„„„„„„„„³FacetStopReason´³orµµ±explicitAction´³lit³explicit-action„„µ±inert´³lit³inert„„µ±parentStopping´³lit³parent-stopping„„µ± actorStopping´³lit³actor-stopping„„„„³TurnDescription´³rec´³lit³turn„´³tupleµ´³named³id´³refµ„³TurnId„„´³named³cause´³refµ„³ TurnCause„„´³named³actions´³seqof´³refµ„³ActionDescription„„„„„„³ActionDescription´³orµµ±dequeue´³rec´³lit³dequeue„´³tupleµ´³named³event´³refµ„³TargetedTurnEvent„„„„„„µ±enqueue´³rec´³lit³enqueue„´³tupleµ´³named³event´³refµ„³TargetedTurnEvent„„„„„„µ±dequeueInternal´³rec´³lit³dequeue-internal„´³tupleµ´³named³event´³refµ„³TargetedTurnEvent„„„„„„µ±enqueueInternal´³rec´³lit³enqueue-internal„´³tupleµ´³named³event´³refµ„³TargetedTurnEvent„„„„„„µ±spawn´³rec´³lit³spawn„´³tupleµ´³named³link´³atom³Boolean„„´³named³id´³refµ„³ActorId„„„„„„µ±link´³rec´³lit³link„´³tupleµ´³named³ parentActor´³refµ„³ActorId„„´³named³ childToParent´³refµ³protocol„³Handle„„´³named³
childActor´³refµ„³ActorId„„´³named³ parentToChild´³refµ³protocol„³Handle„„„„„„µ± childActor´³refµ„³ActorId„„´³named³ parentToChild´³refµ³protocol„³Handle„„„„„„µ±
facetStart´³rec´³lit³ facet-start„´³tupleµ´³named³path´³seqof´³refµ„³FacetId„„„„„„„µ± facetStop´³rec´³lit³ facetStart´³rec´³lit³ facet-start„´³tupleµ´³named³path´³seqof´³refµ„³FacetId„„„„„„„µ± facetStop´³rec´³lit³
facet-stop„´³tupleµ´³named³path´³seqof´³refµ„³FacetId„„„´³named³reason´³refµ„³FacetStopReason„„„„„„µ±linkedTaskStart´³rec´³lit³linked-task-start„´³tupleµ´³named³taskName´³refµ„³Name„„´³named³id´³refµ„³TaskId„„„„„„„„³TargetedTurnEvent´³rec´³lit³event„´³tupleµ´³named³target´³refµ„³Target„„´³named³detail´³refµ„³ TurnEvent„„„„„³AssertionDescription´³orµµ±value´³rec´³lit³value„´³tupleµ´³named³value³any„„„„„µ±opaque´³rec´³lit³opaque„´³tupleµ´³named³ description³any„„„„„„„³LinkedTaskReleaseReason´³orµµ± cancelled´³lit³ cancelled„„µ±normal´³lit³normal„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³stdenv„´³schema·³version°³ definitions·³ StandardRoute´³orµµ±standard´³ tuplePrefixµ´³named³ facet-stop„´³tupleµ´³named³path´³seqof´³refµ„³FacetId„„„´³named³reason´³refµ„³FacetStopReason„„„„„„µ±linkedTaskStart´³rec´³lit³linked-task-start„´³tupleµ´³named³taskName´³refµ„³Name„„´³named³id´³refµ„³TaskId„„„„„„„„³TargetedTurnEvent´³rec´³lit³event„´³tupleµ´³named³target´³refµ„³Target„„´³named³detail´³refµ„³ TurnEvent„„„„„³AssertionDescription´³orµµ±value´³rec´³lit³value„´³tupleµ´³named³value³any„„„„„µ±opaque´³rec´³lit³opaque„´³tupleµ´³named³ description³any„„„„„„„³LinkedTaskReleaseReason´³orµµ± cancelled´³lit³ cancelled„„µ±normal´³lit³normal„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³stream„´³schema·³version³ definitions·³Mode´³orµµ±bytes´³lit³bytes„„µ±lines´³refµ„³LineMode„„µ±packet´³rec´³lit³packet„´³tupleµ´³named³size´³atom³ SignedInteger„„„„„„µ±object´³rec´³lit³object„´³tupleµ´³named³ description³any„„„„„„„³Sink´³orµµ±source´³rec´³lit³source„´³tupleµ´³named³
transports´³seqof´³refµ„³StandardTransport„„„´³named³key´³atom³
ByteString„„´³named³service³any„´³named³sig´³atom³
ByteString„„´³named³oid³any„„´³named³caveats´³seqof´³refµ³sturdy„³Caveat„„„„„µ±general´³refµ³
gatekeeper„³Route„„„„³StandardTransport´³orµµ±wsUrl´³atom³String„„µ±other³any„„„„³ embeddedType€„„µ³stream„´³schema·³version°³ definitions·³Mode´³orµµ±bytes´³lit³bytes„„µ±lines´³refµ„³LineMode„„µ±packet´³rec´³lit³packet„´³tupleµ´³named³size´³atom³ SignedInteger„„„„„„µ±object´³rec´³lit³object„´³tupleµ´³named³ description³any„„„„„„„³Sink´³orµµ±source´³rec´³lit³source„´³tupleµ´³named³
controller´³embedded´³refµ„³Source„„„„„„„µ± StreamError´³refµ„³ StreamError„„µ±data´³rec´³lit³data„´³tupleµ´³named³payload³any„´³named³mode´³refµ„³Mode„„„„„„µ±eof´³rec´³lit³eof„´³tupleµ„„„„„„³Source´³orµµ±sink´³rec´³lit³sink„´³tupleµ´³named³ controller´³embedded´³refµ„³Source„„„„„„„µ± StreamError´³refµ„³ StreamError„„µ±data´³rec´³lit³data„´³tupleµ´³named³payload³any„´³named³mode´³refµ„³Mode„„„„„„µ±eof´³rec´³lit³eof„´³tupleµ„„„„„„³Source´³orµµ±sink´³rec´³lit³sink„´³tupleµ´³named³
controller´³embedded´³refµ„³Sink„„„„„„„µ± StreamError´³refµ„³ StreamError„„µ±credit´³rec´³lit³credit„´³tupleµ´³named³amount´³refµ„³ CreditAmount„„´³named³mode´³refµ„³Mode„„„„„„„„³LineMode´³orµµ±lf´³lit³lf„„µ±crlf´³lit³crlf„„„„³ StreamError´³rec´³lit³error„´³tupleµ´³named³message´³atom³String„„„„„³ CreditAmount´³orµµ±count´³atom³ SignedInteger„„µ± unbounded´³lit³ unbounded„„„„³StreamConnection´³rec´³lit³stream-connection„´³tupleµ´³named³source´³embedded´³refµ„³Source„„„´³named³sink´³embedded´³refµ„³Sink„„„´³named³spec³any„„„„³StreamListenerError´³rec´³lit³stream-listener-error„´³tupleµ´³named³spec³any„´³named³message´³atom³String„„„„„³StreamListenerReady´³rec´³lit³stream-listener-ready„´³tupleµ´³named³spec³any„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³sturdy„´³schema·³version°³ definitions·³Lit´³rec´³lit³lit„´³tupleµ´³named³value³any„„„„³Oid´³atom³ SignedInteger„³Alts´³rec´³lit³or„´³tupleµ´³named³ alternatives´³seqof´³refµ„³Rewrite„„„„„„³PAnd´³rec´³lit³and„´³tupleµ´³named³patterns´³seqof´³refµ„³Pattern„„„„„„³PNot´³rec´³lit³not„´³tupleµ´³named³pattern´³refµ„³Pattern„„„„„³TRef´³rec´³lit³ref„´³tupleµ´³named³binding´³atom³ SignedInteger„„„„„³PAtom´³orµµ±Boolean´³lit³Boolean„„µ±Double´³lit³Double„„µ± SignedInteger´³lit³ SignedInteger„„µ±String´³lit³String„„µ± controller´³embedded´³refµ„³Sink„„„„„„„µ± StreamError´³refµ„³ StreamError„„µ±credit´³rec´³lit³credit„´³tupleµ´³named³amount´³refµ„³ CreditAmount„„´³named³mode´³refµ„³Mode„„„„„„„„³LineMode´³orµµ±lf´³lit³lf„„µ±crlf´³lit³crlf„„„„³ StreamError´³rec´³lit³error„´³tupleµ´³named³message´³atom³String„„„„„³ CreditAmount´³orµµ±count´³atom³ SignedInteger„„µ± unbounded´³lit³ unbounded„„„„³StreamConnection´³rec´³lit³stream-connection„´³tupleµ´³named³source´³embedded´³refµ„³Source„„„´³named³sink´³embedded´³refµ„³Sink„„„´³named³spec³any„„„„³StreamListenerError´³rec´³lit³stream-listener-error„´³tupleµ´³named³spec³any„´³named³message´³atom³String„„„„„³StreamListenerReady´³rec´³lit³stream-listener-ready„´³tupleµ´³named³spec³any„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³sturdy„´³schema·³version³ definitions·³Lit´³rec´³lit³lit„´³tupleµ´³named³value³any„„„„³Oid´³atom³ SignedInteger„³Alts´³rec´³lit³or„´³tupleµ´³named³ alternatives´³seqof´³refµ„³Rewrite„„„„„„³PAnd´³rec´³lit³and„´³tupleµ´³named³patterns´³seqof´³refµ„³Pattern„„„„„„³PNot´³rec´³lit³not„´³tupleµ´³named³pattern´³refµ„³Pattern„„„„„³TRef´³rec´³lit³ref„´³tupleµ´³named³binding´³atom³ SignedInteger„„„„„³PAtom´³orµµ±Boolean´³lit³Boolean„„µ±Float´³lit³Float„„µ±Double´³lit³Double„„µ± SignedInteger´³lit³ SignedInteger„„µ±String´³lit³String„„µ±
ByteString´³lit³ ByteString´³lit³
ByteString„„µ±Symbol´³lit³Symbol„„„„³PBind´³rec´³lit³bind„´³tupleµ´³named³pattern´³refµ„³Pattern„„„„„³Caveat´³orµµ±Rewrite´³refµ„³Rewrite„„µ±Alts´³refµ„³Alts„„µ±Reject´³refµ„³Reject„„µ±unknown³any„„„³Reject´³rec´³lit³reject„´³tupleµ´³named³pattern´³refµ„³Pattern„„„„„³Pattern´³orµµ±PDiscard´³refµ„³PDiscard„„µ±PAtom´³refµ„³PAtom„„µ± PEmbedded´³refµ„³ PEmbedded„„µ±PBind´³refµ„³PBind„„µ±PAnd´³refµ„³PAnd„„µ±PNot´³refµ„³PNot„„µ±Lit´³refµ„³Lit„„µ± PCompound´³refµ„³ PCompound„„„„³Rewrite´³rec´³lit³rewrite„´³tupleµ´³named³pattern´³refµ„³Pattern„„´³named³template´³refµ„³Template„„„„„³WireRef´³orµµ±mine´³tupleµ´³lit°„´³named³oid´³refµ„³Oid„„„„„µ±yours´³ tuplePrefixµ´³lit°„´³named³oid´³refµ„³Oid„„„´³named³ attenuation´³seqof´³refµ„³Caveat„„„„„„„³PDiscard´³rec´³lit³_„´³tupleµ„„„³Template´³orµµ± ByteString„„µ±Symbol´³lit³Symbol„„„„³PBind´³rec´³lit³bind„´³tupleµ´³named³pattern´³refµ„³Pattern„„„„„³Caveat´³orµµ±Rewrite´³refµ„³Rewrite„„µ±Alts´³refµ„³Alts„„µ±Reject´³refµ„³Reject„„µ±unknown³any„„„³Reject´³rec´³lit³reject„´³tupleµ´³named³pattern´³refµ„³Pattern„„„„„³Pattern´³orµµ±PDiscard´³refµ„³PDiscard„„µ±PAtom´³refµ„³PAtom„„µ± PEmbedded´³refµ„³ PEmbedded„„µ±PBind´³refµ„³PBind„„µ±PAnd´³refµ„³PAnd„„µ±PNot´³refµ„³PNot„„µ±Lit´³refµ„³Lit„„µ± PCompound´³refµ„³ PCompound„„„„³Rewrite´³rec´³lit³rewrite„´³tupleµ´³named³pattern´³refµ„³Pattern„„´³named³template´³refµ„³Template„„„„„³WireRef´³orµµ±mine´³tupleµ´³lit<69>´³named³oid´³refµ„³Oid„„„„„µ±yours´³ tuplePrefixµ´³lit´³named³oid´³refµ„³Oid„„„´³named³ attenuation´³seqof´³refµ„³Caveat„„„„„„„³PDiscard´³rec´³lit³_„´³tupleµ„„„³Template´³orµµ±
TAttenuate´³refµ„³ TAttenuate´³refµ„³
TAttenuate„„µ±TRef´³refµ„³TRef„„µ±Lit´³refµ„³Lit„„µ± TCompound´³refµ„³ TCompound„„„„³ PCompound´³orµµ±rec´³rec´³lit³rec„´³tupleµ´³named³label³any„´³named³fields´³seqof´³refµ„³Pattern„„„„„„„µ±arr´³rec´³lit³arr„´³tupleµ´³named³items´³seqof´³refµ„³Pattern„„„„„„„µ±dict´³rec´³lit³dict„´³tupleµ´³named³entries´³dictof³any´³refµ„³Pattern„„„„„„„„„³ PEmbedded´³lit³Embedded„³ SturdyRef´³rec´³lit³ref„´³tupleµ´³named³ TAttenuate„„µ±TRef´³refµ„³TRef„„µ±Lit´³refµ„³Lit„„µ± TCompound´³refµ„³ TCompound„„„„³ PCompound´³orµµ±rec´³rec´³lit³rec„´³tupleµ´³named³label³any„´³named³fields´³seqof´³refµ„³Pattern„„„„„„„µ±arr´³rec´³lit³arr„´³tupleµ´³named³items´³seqof´³refµ„³Pattern„„„„„„„µ±dict´³rec´³lit³dict„´³tupleµ´³named³entries´³dictof³any´³refµ„³Pattern„„„„„„„„„³ PEmbedded´³lit³Embedded„³ SturdyRef´³rec´³lit³ref„´³tupleµ´³named³oid³any„´³named³ caveatChain´³seqof´³refµ„³Caveat„„„´³named³sig´³atom³
parameters´³refµ„³ ByteString„„„„„³ TCompound´³orµµ±rec´³rec´³lit³rec„´³tupleµ´³named³label³any„´³named³fields´³seqof´³refµ„³Template„„„„„„„µ±arr´³rec´³lit³arr„´³tupleµ´³named³items´³seqof´³refµ„³Template„„„„„„„µ±dict´³rec´³lit³dict„´³tupleµ´³named³entries´³dictof³any´³refµ„³Template„„„„„„„„„³
Parameters„„„„„³ TCompound´³orµµ±rec´³rec´³lit³rec„´³tupleµ´³named³label³any„´³named³fields´³seqof´³refµ„³Template„„„„„„„µ±arr´³rec´³lit³arr„´³tupleµ´³named³items´³seqof´³refµ„³Template„„„„„„„µ±dict´³rec´³lit³dict„´³tupleµ´³named³entries´³dictof³any´³refµ„³Template„„„„„„„„„³ TAttenuate´³rec´³lit³ attenuate„´³tupleµ´³named³template´³refµ„³Template„„´³named³ attenuation´³seqof´³refµ„³Caveat„„„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³worker„´³schema·³version³ definitions·³Instance´³rec´³lit³Instance„´³tupleµ´³named³name´³atom³String„„´³named³argument³any„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³service„´³schema·³version³ definitions·³State´³orµµ±started´³lit³started„„µ±ready´³lit³ready„„µ±failed´³lit³failed„„µ±complete´³lit³complete„„µ± userDefined³any„„„³
Parameters´³andµ´³dict·³oid´³named³oid³any„³sig´³named³sig´³atom³
ByteString„„„„´³named³caveats´³refµ„³ CaveatsField„„„„³
TAttenuate´³rec´³lit³ attenuate„´³tupleµ´³named³template´³refµ„³Template„„´³named³ attenuation´³seqof´³refµ„³Caveat„„„„„„³ CaveatsField´³orµµ±present´³dict·³caveats´³named³caveats´³seqof´³refµ„³Caveat„„„„„„µ±invalid´³dict·³caveats´³named³caveats³any„„„„µ±absent´³dict·„„„„„³SturdyStepType´³lit³ref„³SturdyStepDetail´³refµ„³
Parameters„³SturdyPathStepDetail´³refµ„³
Parameters„³SturdyDescriptionDetail´³dict·³key´³named³key´³atom³
ByteString„„³oid´³named³oid³any„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³worker„´³schema·³version°³ definitions·³Instance´³rec´³lit³Instance„´³tupleµ´³named³name´³atom³String„„´³named³argument³any„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³service„´³schema·³version°³ definitions·³State´³orµµ±started´³lit³started„„µ±ready´³lit³ready„„µ±failed´³lit³failed„„µ±complete´³lit³complete„„µ± userDefined³any„„„³
RunService´³rec´³lit³ run-service„´³tupleµ´³named³ serviceName³any„„„„³ ServiceState´³rec´³lit³ service-state„´³tupleµ´³named³ serviceName³any„´³named³state´³refµ„³State„„„„„³ ServiceObject´³rec´³lit³service-object„´³tupleµ´³named³ serviceName³any„´³named³object³any„„„„³RequireService´³rec´³lit³require-service„´³tupleµ´³named³ serviceName³any„„„„³RestartService´³rec´³lit³restart-service„´³tupleµ´³named³ serviceName³any„„„„³ServiceDependency´³rec´³lit³ RunService´³rec´³lit³ run-service„´³tupleµ´³named³ serviceName³any„„„„³ ServiceState´³rec´³lit³ service-state„´³tupleµ´³named³ serviceName³any„´³named³state´³refµ„³State„„„„„³ ServiceObject´³rec´³lit³service-object„´³tupleµ´³named³ serviceName³any„´³named³object³any„„„„³RequireService´³rec´³lit³require-service„´³tupleµ´³named³ serviceName³any„„„„³RestartService´³rec´³lit³restart-service„´³tupleµ´³named³ serviceName³any„„„„³ServiceDependency´³rec´³lit³
depends-on„´³tupleµ´³named³depender³any„´³named³dependee´³refµ„³ ServiceState„„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³protocol„´³schema·³version°³ definitions·³Nop´³lit€„³Oid´³atom³ SignedInteger„³Sync´³rec´³lit³S„´³tupleµ´³named³peer´³embedded´³lit<69>„„„„„„³Turn´³seqof´³refµ„³ TurnEvent„„³Error´³rec´³lit³error„´³tupleµ´³named³message´³atom³String„„´³named³detail³any„„„„³Event´³orµµ±Assert´³refµ„³Assert„„µ±Retract´³refµ„³Retract„„µ±Message´³refµ„³Message„„µ±Sync´³refµ„³Sync„„„„³Assert´³rec´³lit³A„´³tupleµ´³named³ assertion´³refµ„³ Assertion„„´³named³handle´³refµ„³Handle„„„„„³Handle´³atom³ SignedInteger„³Packet´³orµµ±Turn´³refµ„³Turn„„µ±Error´³refµ„³Error„„µ± Extension´³refµ„³ Extension„„µ±Nop´³refµ„³Nop„„„„³Message´³rec´³lit³M„´³tupleµ´³named³body´³refµ„³ Assertion„„„„„³Retract´³rec´³lit³R„´³tupleµ´³named³handle´³refµ„³Handle„„„„„³ Assertion³any³ Extension´³rec´³named³label³any„´³named³fields´³seqof³any„„„³ TurnEvent´³tupleµ´³named³oid´³refµ„³Oid„„´³named³event´³refµ„³Event„„„„„³ embeddedType€„„µ³ dataspace„´³schema·³version°³ definitions·³Observe´³rec´³lit³Observe„´³tupleµ´³named³pattern´³refµ³dataspacePatterns„³Pattern„„´³named³observer´³embedded³any„„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³ depends-on„´³tupleµ´³named³depender³any„´³named³dependee´³refµ„³ ServiceState„„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³protocol„´³schema·³version³ definitions·³Oid´³atom³ SignedInteger„³Sync´³rec´³lit³sync„´³tupleµ´³named³peer´³embedded´³lit<69>„„„„„„³Turn´³seqof´³refµ„³ TurnEvent„„³Error´³rec´³lit³error„´³tupleµ´³named³message´³atom³String„„´³named³detail³any„„„„³Event´³orµµ±Assert´³refµ„³Assert„„µ±Retract´³refµ„³Retract„„µ±Message´³refµ„³Message„„µ±Sync´³refµ„³Sync„„„„³Assert´³rec´³lit³assert„´³tupleµ´³named³ assertion´³refµ„³ Assertion„„´³named³handle´³refµ„³Handle„„„„„³Handle´³atom³ SignedInteger„³Packet´³orµµ±Turn´³refµ„³Turn„„µ±Error´³refµ„³Error„„µ± Extension´³refµ„³ Extension„„„„³Message´³rec´³lit³message„´³tupleµ´³named³body´³refµ„³ Assertion„„„„„³Retract´³rec´³lit³retract„´³tupleµ´³named³handle´³refµ„³Handle„„„„„³ Assertion³any³ Extension´³rec´³named³label³any„´³named³fields´³seqof³any„„„³ TurnEvent´³tupleµ´³named³oid´³refµ„³Oid„„´³named³event´³refµ„³Event„„„„„³ embeddedType€„„µ³ dataspace„´³schema·³version³ definitions·³Observe´³rec´³lit³Observe„´³tupleµ´³named³pattern´³refµ³dataspacePatterns„³Pattern„„´³named³observer´³embedded³any„„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³
gatekeeper„´³schema·³version°³ definitions·³Bind´³rec´³lit³bind„´³tupleµ´³named³ description´³refµ„³ Description„„´³named³target´³embedded³any„„´³named³observer´³refµ„³ BindObserver„„„„„³Step´³rec´³named³stepType´³atom³Symbol„„´³tupleµ´³named³detail³any„„„„³Bound´³orµµ±bound´³rec´³lit³bound„´³tupleµ´³named³pathStep´³refµ„³PathStep„„„„„„µ±Rejected´³refµ„³Rejected„„„„³Route´³rec´³lit³route„´³ tuplePrefixµ´³named³ gatekeeper„´³schema·³version³ definitions·³Bind´³rec´³lit³bind„´³tupleµ´³named³oid³any„´³named³key´³atom³
transports´³seqof³any„„„´³named³ pathSteps´³seqof´³refµ„³PathStep„„„„„³Resolve´³rec´³lit³resolve„´³tupleµ´³named³step´³refµ„³Step„„´³named³observer´³embedded´³refµ„³Resolved„„„„„„³PathStep´³rec´³named³stepType´³atom³Symbol„„´³tupleµ´³named³detail³any„„„„³Rejected´³rec´³lit³rejected„´³tupleµ´³named³detail³any„„„„³Resolved´³orµµ±accepted´³rec´³lit³accepted„´³tupleµ´³named³responderSession´³embedded³any„„„„„„µ±Rejected´³refµ„³Rejected„„„„³ Description´³rec´³named³stepType´³atom³Symbol„„´³tupleµ´³named³detail³any„„„„³ ResolvePath´³rec´³lit³ resolve-path„´³tupleµ´³named³route´³refµ„³Route„„„„„³ BindObserver´³orµµ±present´³embedded´³refµ„³Bound„„„µ±absent´³lit€„„„„³ ResolvedPath´³rec´³lit³ resolved-path„´³tupleµ´³named³addr³any„´³named³control´³embedded´³refµ„³TransportControl„„„´³named³responderSession´³embedded³any„„„„„³ForceDisconnect´³rec´³lit³force-disconnect„´³tupleµ„„„³ResolvePathStep´³rec´³lit³resolve-path-step„´³tupleµ´³named³origin´³embedded´³refµ„³Resolve„„„´³named³pathStep´³refµ„³PathStep„„„„„³ConnectTransport´³rec´³lit³connect-transport„´³tupleµ´³named³addr³any„„„„³ResolvedPathStep´³embedded³any„³TransportControl´³refµ„³ForceDisconnect„³ConnectedTransport´³rec´³lit³connected-transport„´³tupleµ´³named³addr³any„´³named³control´³embedded´³refµ„³TransportControl„„„´³named³responderSession´³embedded³any„„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³transportAddress„´³schema·³version°³ definitions·³Tcp´³rec´³lit³tcp„´³tupleµ´³named³host´³atom³String„„´³named³port´³atom³ SignedInteger„„„„„³Unix´³rec´³lit³unix„´³tupleµ´³named³path´³atom³String„„„„„³Stdio´³rec´³lit³stdio„´³tupleµ„„„³ WebSocket´³rec´³lit³ws„´³tupleµ´³named³url´³atom³String„„„„„„³ embeddedType€„„µ³dataspacePatterns„´³schema·³version°³ definitions·³AnyAtom´³orµµ±bool´³atom³Boolean„„µ±double´³atom³Double„„µ±int´³atom³ SignedInteger„„µ±string´³atom³String„„µ±bytes´³atom³ ByteString„„´³named³target´³embedded³any„„„„„³Resolve´³rec´³lit³resolve„´³tupleµ´³named³ sturdyref´³refµ³sturdy„³ SturdyRef„„´³named³observer´³embedded´³embedded³any„„„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„µ³transportAddress„´³schema·³version³ definitions·³Tcp´³rec´³lit³tcp„´³tupleµ´³named³host´³atom³String„„´³named³port´³atom³ SignedInteger„„„„„³Unix´³rec´³lit³unix„´³tupleµ´³named³path´³atom³String„„„„„³Stdio´³rec´³lit³stdio„´³tupleµ„„„³ WebSocket´³rec´³lit³ws„´³tupleµ´³named³url´³atom³String„„„„„„³ embeddedType€„„µ³dataspacePatterns„´³schema·³version³ definitions·³DLit´³rec´³lit³lit„´³tupleµ´³named³value´³refµ„³AnyAtom„„„„„³DBind´³rec´³lit³bind„´³tupleµ´³named³pattern´³refµ„³Pattern„„„„„³AnyAtom´³orµµ±bool´³atom³Boolean„„µ±float´³atom³Float„„µ±double´³atom³Double„„µ±int´³atom³ SignedInteger„„µ±string´³atom³String„„µ±bytes´³atom³
ByteString„„µ±symbol´³atom³Symbol„„µ±embedded´³embedded³any„„„„³Pattern´³orµµ±discard´³rec´³lit³_„´³tupleµ„„„„µ±bind´³rec´³lit³bind„´³tupleµ´³named³pattern´³refµ„³Pattern„„„„„„µ±lit´³rec´³lit³lit„´³tupleµ´³named³value´³refµ„³AnyAtom„„„„„„µ±group´³rec´³lit³group„´³tupleµ´³named³type´³refµ„³ GroupType„„´³named³entries´³dictof³any´³refµ„³Pattern„„„„„„„„„³ GroupType´³orµµ±rec´³rec´³lit³rec„´³tupleµ´³named³label³any„„„„„µ±arr´³rec´³lit³arr„´³tupleµ„„„„µ±dict´³rec´³lit³dict„´³tupleµ„„„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„„„ ByteString„„µ±symbol´³atom³Symbol„„µ±embedded´³embedded³any„„„„³Pattern´³orµµ±DDiscard´³refµ„³DDiscard„„µ±DBind´³refµ„³DBind„„µ±DLit´³refµ„³DLit„„µ± DCompound´³refµ„³ DCompound„„„„³DDiscard´³rec´³lit³_„´³tupleµ„„„³ DCompound´³orµµ±rec´³rec´³lit³rec„´³tupleµ´³named³label³any„´³named³fields´³seqof´³refµ„³Pattern„„„„„„„µ±arr´³rec´³lit³arr„´³tupleµ´³named³items´³seqof´³refµ„³Pattern„„„„„„„µ±dict´³rec´³lit³dict„´³tupleµ´³named³entries´³dictof³any´³refµ„³Pattern„„„„„„„„„„³ embeddedType´³refµ³ EntityRef„³Cap„„„„„

View File

@ -1,4 +1,4 @@
version 1 . version 1 .
embeddedType EntityRef.Cap . embeddedType EntityRef.Cap .
Observe = <Observe @pattern dataspacePatterns.Pattern @observer #:any>. Observe = <Observe @pattern dataspacePatterns.Pattern @observer #!any>.

View File

@ -1,30 +1,23 @@
version 1 . version 1 .
embeddedType EntityRef.Cap . embeddedType EntityRef.Cap .
# Dataspace patterns: *almost* a sublanguage of attenuation patterns. ; Dataspace patterns: a sublanguage of attenuation patterns.
# Pattern = DDiscard / DBind / DLit / DCompound .
# One key difference is that Dataspace patterns are extensible, in that
# they ignore fields not mentioned in group patterns.
Pattern = DDiscard = <_>.
/ @discard <_> DBind = <bind @pattern Pattern>.
/ <bind @pattern Pattern> DLit = <lit @value AnyAtom>.
/ <lit @value AnyAtom> DCompound = <rec @label any @fields [Pattern ...]>
/ <group @type GroupType @entries { any: Pattern ...:... }> / <arr @items [Pattern ...]>
. / <dict @entries { any: Pattern ...:... }> .
GroupType =
/ <rec @label any>
/ <arr>
/ <dict>
.
AnyAtom = AnyAtom =
/ @bool bool / @bool bool
/ @float float
/ @double double / @double double
/ @int int / @int int
/ @string string / @string string
/ @bytes bytes / @bytes bytes
/ @symbol symbol / @symbol symbol
/ @embedded #:any / @embedded #!any
. .

View File

@ -1,96 +1,5 @@
version 1 . version 1 .
embeddedType EntityRef.Cap . embeddedType EntityRef.Cap .
# --------------------------------------------------------------------------- Resolve = <resolve @sturdyref sturdy.SturdyRef @observer #!#!any>.
# Protocol at *gatekeeper* entities Bind = <bind @oid any @key bytes @target #!any>.
# Assertion. Gatekeeper will attempt to resolve `step`, responding with a `Resolved` to
# `observer`.
Resolve = <resolve @step Step @observer #:Resolved> .
Resolved = <accepted @responderSession #:any> / Rejected .
Step = <<rec> @stepType symbol [@detail any]> .
# ---------------------------------------------------------------------------
# Protocol at dataspaces *associated* with gatekeeper entities
# ## Handling `Resolve` requests
#
# When the gatekeeper entity receives a `Resolve` assertion (call it R1), it
#
# 1. asserts a `Resolve` (call it R2) into its associated dataspace that
# is the same as R1 except it has a different `observer`; and
#
# 2. observes a `Bind` with `description` matching the `step` of R1/R2
# according to `stepType` (e.g. treatment of SturdyStepType is not the
# same as treatment of NoiseStepType).
#
# Normally, an appropriate `Bind` is expected to exist. If the gatekeeper
# sees the `Bind` first, it takes the `target` from it and does whatever
# `stepType` mandates before replying to R1's observer.
#
# However, if a `Resolved` is asserted to R2's observer before a `Bind`
# appears, that resolution is relayed on to R1's observer directly, be it
# positive or negative, and the gatekeeper stops waiting for a `Bind`.
#
# This way, entities can keep an eye out for `Resolve` requests that will
# never complete, and answer `Rejected` to them even when no matching
# `Bind` exists. Entities could also use `Resolve` requests to synthesize a
# `Bind` in a "just-in-time" fashion.
#
# ## General treatment of `Bind` assertions
#
# When the gatekeeper sees a `Bind`, independently of any potential
# `Resolve` requests, it computes an appropriate PathStep from
# `description` pointing at `target`, and responds with a `Bound` to
# `observer` (if supplied).
#
Bind = <bind @description Description @target #:any @observer BindObserver> .
Description = <<rec> @stepType symbol [@detail any]> .
BindObserver = @present #:Bound / @absent #f .
Bound = <bound @pathStep PathStep> / Rejected .
# ---------------------------------------------------------------------------
# Protocol at client-side dataspaces, for resolution utilities
# The client-side operates using `rpc.Question`s and `rpc.Answer`s.
# Assert `rpc.Question` with `ResolvePath` to request resolution of `Route`. The resolution
# utility will continuously try to satisfy the request, following `route.pathSteps` starting
# from one of the `route.transports`, ultimately asserting `rpc.Answer` with an `rpc.Result` in
# response. If the process completes successfully, the `rpc.Result.ok` will carry a
# `ResolvedPath`.
ResolvePath = <resolve-path @route Route> .
ResolvedPath = <resolved-path @addr any @control #:TransportControl @responderSession #:any> .
# Assertions. As `ResolvePath`/`ResolvedPath`, but just for an initial transport link setup.
ConnectTransport = <connect-transport @addr any> .
ConnectedTransport = <connected-transport @addr any @control #:TransportControl @responderSession #:any> .
# Assertions. Like `ResolvePath`/`ResolvedPath`, but for incremental resolution along a route.
ResolvePathStep = <resolve-path-step @origin #:Resolve @pathStep PathStep> .
ResolvedPathStep = #:any .
PathStep = <<rec> @stepType symbol [@detail any]> .
# A `Route` describes a network path that can be followed to reach some target entity.
#
# It starts with a set of zero or more possible non-Syndicate `transports`. These could be
# `transportAddress.Tcp` values or similar. They are just suggestions; it's quite possible the
# endpoint is reachable by some means not listed. The network outside Syndicate is, after all,
# pretty diverse! In particular, *zero* `transports` may be provided, in which case some
# out-of-band means has to be used to make that first connection.
#
# The `transports` give instructions for contacting the first entity in the `Route` path. Often
# this will be a `gatekeeper`, or a `noise` protocol endpoint, or both. Occasionally, it may
# even be the desired target entity. Subsequent `pathSteps` describe how to proceed from the
# initial entity to the target.
#
# (`transports` should by rights be a set, not a sequence, but that opens up a Can Of Worms
# regarding dataspace patterns including literal sets that I can't deal with right now.)
Route = <route @transports [any ...] @pathSteps PathStep ...> .
TransportControl = ForceDisconnect .
ForceDisconnect = <force-disconnect> .
# ---------------------------------------------------------------------------
Rejected = <rejected @detail any> .

View File

@ -1,15 +1,15 @@
version 1 . version 1 .
# Assertion in driver DS ; Assertion in driver DS
# Causes creation of server and route ; Causes creation of server and route
HttpBinding = <http-bind @host HostPattern @port int @method MethodPattern @path PathPattern @handler #:HttpRequest> . HttpBinding = <http-bind @host HostPattern @port int @method MethodPattern @path PathPattern @handler #!HttpRequest> .
# Assertion in driver DS ; Assertion in driver DS
# Describes active server and route ; Describes active server and route
HttpService = <http-service @host HostPattern @port int @method MethodPattern @path PathPattern> . HttpService = <http-service @host HostPattern @port int @method MethodPattern @path PathPattern> .
# Assertion in driver DS ; Assertion in driver DS
# Describes active listener ; Describes active listener
HttpListener = <http-listener @port int> . HttpListener = <http-listener @port int> .
HostPattern = @host string / @any #f . HostPattern = @host string / @any #f .
@ -18,10 +18,10 @@ PathPatternElement = @label string / @wildcard =_ / @rest =... .
MethodPattern = @any #f / @specific @"Lowercase" symbol . MethodPattern = @any #f / @specific @"Lowercase" symbol .
# Assertion in driver DS ; Assertion in driver DS
HttpRequest = <http-request HttpRequest = <http-request
@sequenceNumber int @sequenceNumber int
@host RequestHost @host string
@port int @port int
@method @"Lowercase" symbol @method @"Lowercase" symbol
@path [string ...] @path [string ...]
@ -31,25 +31,14 @@ HttpRequest = <http-request
Headers = {@"Lowercase" symbol: string ...:...} . Headers = {@"Lowercase" symbol: string ...:...} .
QueryValue = @string string / <file @filename string @headers Headers @body bytes> . QueryValue = @string string / <file @filename string @headers Headers @body bytes> .
RequestBody = @absent #f / @present bytes . RequestBody = @present bytes / @absent #f .
RequestHost = @absent #f / @present string .
# Assertion to handler entity ; Assertion to handler entity
HttpContext = <request @req HttpRequest @res #:HttpResponse> . HttpContext = <request @req HttpRequest @res #!HttpResponse> .
# HttpResponse protocol. Delivered to the `res` ref in `HttpContext`.
#
# (status | header)* . chunk* . done
#
# Done triggers completion of the response and retraction of the frame by the peer. If the
# HttpBinding responsible for the request is withdrawn mid-way through a response (i.e. when
# chunked transfer is used and at least one chunk has been sent) the request is abruptly
# closed; if it is withdrawn at any other moment in the lifetime of the request, a 500 Internal
# Server Error is send to the client.
#
@<TODO "trailers?"> @<TODO "trailers?">
; Messages
HttpResponse = HttpResponse =
# Messages.
/ <status @code int @message string> / <status @code int @message string>
/ <header @name symbol @value string> / <header @name symbol @value string>
/ <chunk @chunk Chunk> / <chunk @chunk Chunk>
@ -58,5 +47,5 @@ HttpResponse =
Chunk = @string string / @bytes bytes . Chunk = @string string / @bytes bytes .
# e.g. text/plain, text/html, application/json ; e.g. text/plain, text/html, application/json
MimeType = symbol . MimeType = symbol .

View File

@ -1,83 +1,66 @@
version 1 . version 1 .
embeddedType EntityRef.Cap .
# https://noiseprotocol.org/ ; https://noiseprotocol.org/
# --------------------------------------------------------------------------- ; Assertion.
# Binding and connection Connect = <connect @serviceSelector any @initiatorSession #!any> .
NoiseStepType = =noise . ; Assertion (to initiatorSession).
Accept = <accept @responderSession #!any> .
# In a gatekeeper.Step, use ServiceSelector as detail. ; Sessions proceed by sending Packets to the initiatorSession and responderSession according to
NoiseStepDetail = ServiceSelector . ; the Noise protocol definition. Each Packet represents a complete logical unit of
; communication; for example, a complete Turn when layering the Syndicate protocol over Noise.
; Note well the restriction on Noise messages: no individual complete packet or packet fragment
; may exceed 65535 bytes (N.B. not 65536!). When `fragmented`, each portion of a Packet is a
; complete Noise "transport message"; when `complete`, the whole thing is likewise a complete
; "transport message".
Packet = @complete bytes / @fragmented [bytes ...] .
# In a gatekeeper.PathStep, use a NoiseSpec as detail. ; When layering Syndicate protocol over noise,
NoisePathStepDetail = NoiseSpec . ;
; - the canonical encoding of the serviceSelector is the prologue
; - protocol.Packets MUST be encoded using the machine-oriented Preserves syntax
; - zero or more Turns are permitted per noise.Packet
; - each Turn must fit inside a single noise.Packet (fragment if needed)
; - payloads inside a noise.Packet may be padded at the end with byte 0x80 (128), which
; encodes `#f` in the machine-oriented Preserves syntax.
;
; In summary, each noise.Packet, once (reassembled and) decrypted, will be a sequence of zero
; or more machine-encoded protocol.Packets, followed by zero or more 0x80 bytes.
# In a gatekeeper.Description, use a NoiseServiceSpec as detail. ; A `Route` describes a network path that can be followed to reach some target entity.
NoiseDescriptionDetail = NoiseServiceSpec . ;
; It starts with zero or more possible non-Syndicate `transports`, in preference order. These
; could be `transportAddress.Tcp` values or similar. They are just suggestions; it's quite
; possible the endpoint is reachable by some means not listed. The network outside Syndicate
; is, after all, pretty diverse! In particular, *zero* `transports` may be provided, in which
; case some out-of-band means has to be used to make that first connection.
;
; The `transports` give instructions for contacting the first entity in the `Route` path. Often
; this will be a `gatekeeper`, or a `noise` protocol endpoint, or both. Occasionally, it may
; even be the desired target entity. Subsequent `steps` describe how to proceed from the
; initial entity to the target.
Route = <route @transports [any ...] @steps RouteStep ...> .
RouteStep = NoiseStep / GatekeeperStep .
# --------------------------------------------------------------------------- GatekeeperStep = sturdy.SturdyRef .
# Specification of target and bind addresses
ServiceSelector = any .
NoiseStep = <noise @spec NoiseSpec> .
NoiseSpec = { NoiseSpec = {
# The `serviceSelector` to use in a `NoiseStep` for `gatekeeper.Resolve`. ; The `serviceSelector` to use in a `Connect`.
service: ServiceSelector, service: any,
# The responder's static public key. If not required (uncommon!), supply the empty ByteString. ; The responder's static public key. If not required (uncommon!), supply the empty ByteString.
key: bytes, key: bytes,
} }
& @protocol NoiseProtocol & @protocol NoiseProtocol
& @preSharedKeys NoisePreSharedKeys & @preSharedKeys NoisePreSharedKeys
. .
NoiseServiceSpec = @base NoiseSpec & @secretKey SecretKeyField . ; If absent, a default of DefaultProtocol is used. Most services will speak the default.
SecretKeyField = @present { secretKey: bytes } / @invalid { secretKey: any } / @absent {} .
# If absent, a default of DefaultProtocol is used. Most services will speak the default.
NoiseProtocol = @present { protocol: string } / @invalid { protocol: any } / @absent {} . NoiseProtocol = @present { protocol: string } / @invalid { protocol: any } / @absent {} .
DefaultProtocol = "Noise_NK_25519_ChaChaPoly_BLAKE2s" . DefaultProtocol = "Noise_NK_25519_ChaChaPoly_BLAKE2s" .
# If present, Noise pre-shared-keys (PSKs) are drawn from the sequence as required; if the ; If present, Noise pre-shared-keys (PSKs) are drawn from the sequence as required; if the
# sequence is exhausted or not supplied, an all-zeros key is used each time a PSK is needed. ; sequence is exhausted or not supplied, an all-zeros key is used each time a PSK is needed.
NoisePreSharedKeys = @present { preSharedKeys: [bytes ...] } / @invalid { preSharedKeys: any } / @absent {} . NoisePreSharedKeys = @present { preSharedKeys: [bytes ...] } / @invalid { preSharedKeys: any } / @absent {} .
# ---------------------------------------------------------------------------
# Handshaking and running a session
# 1. initiator asserts <resolve <noise ServiceSelector> #:A> at Gatekeeper
# 2. gatekeeper asserts <accepted #:B> at #:A
# 3. initiator asserts <initiator #:C> at #:B and then sends `Packet`s to #:B
# 4. responder sends `Packet`s to #:C
#
# Sessions begin with introduction of initiator (#:C) and responder (#:B) to each other, and
# then proceed by sending `Packet`s (from #:C) to #:B and (from #:B) to #:C according to
# the Noise protocol definition. Each `Packet` represents a complete logical unit of
# communication; for example, a complete Turn when layering the Syndicate protocol over Noise.
# Note well the restriction on Noise messages: no individual complete packet or packet fragment
# may exceed 65535 bytes (N.B. not 65536!). When `fragmented`, each portion of a `Packet` is a
# complete Noise "transport message"; when `complete`, the whole thing is likewise a complete
# "transport message".
#
# Retraction of the `Initiator` ends the session from the initiator-side; retraction of the
# `<accepted ...>` assertion ends the session from the responder-side.
SessionItem = Initiator / Packet .
# Assertion
Initiator = <initiator @initiatorSession #:Packet> .
# Message
Packet = @complete bytes / @fragmented [bytes ...] .
# When layering Syndicate protocol over noise,
#
# - the canonical encoding of the serviceSelector is the prologue
# - protocol.Packets MUST be encoded using the machine-oriented Preserves syntax
# - zero or more Turns are permitted per noise.Packet
# - each Turn must fit inside a single noise.Packet (fragment if needed)
# - payloads inside a noise.Packet may be padded at the end with byte 0x80 (128), which
# encodes `#f` in the machine-oriented Preserves syntax.
#
# In summary, each noise.Packet, once (reassembled and) decrypted, will be a sequence of zero
# or more machine-encoded protocol.Packets, followed by zero or more 0x80 bytes.
.

View File

@ -1,9 +1,8 @@
version 1 . version 1 .
Packet = Turn / Error / Extension / Nop . Packet = Turn / Error / Extension .
Extension = <<rec> @label any @fields [any ...]> . Extension = <<rec> @label any @fields [any ...]> .
Nop = #f .
Error = <error @message string @detail any>. Error = <error @message string @detail any>.
@ -14,7 +13,7 @@ Oid = int .
Turn = [TurnEvent ...]. Turn = [TurnEvent ...].
TurnEvent = [@oid Oid @event Event]. TurnEvent = [@oid Oid @event Event].
Assert = <A @assertion Assertion @handle Handle>. Assert = <assert @assertion Assertion @handle Handle>.
Retract = <R @handle Handle>. Retract = <retract @handle Handle>.
Message = <M @body Assertion>. Message = <message @body Assertion>.
Sync = <S @peer #:#t>. Sync = <sync @peer #!#t>.

View File

@ -1,11 +0,0 @@
version 1 .
embeddedType EntityRef.Cap .
# Assertion. Establishes a frame for a request.
Question = <q @request any> .
# Assertion or message. Responds to a question.
Answer = <a @request any @response any> .
# Value. Captures a common "ok-or-error" pattern as seen in e.g. Rust.
Result = <ok @value any> / <error @error any> .

View File

@ -1,51 +1,51 @@
version 1 . version 1 .
embeddedType EntityRef.Cap . embeddedType EntityRef.Cap .
# Asserts that a service should begin (and stay) running after waiting ; Asserts that a service should begin (and stay) running after waiting
# for its dependencies and considering reverse-dependencies, blocks, ; for its dependencies and considering reverse-dependencies, blocks,
# and so on. ; and so on.
RequireService = <require-service @serviceName any>. RequireService = <require-service @serviceName any>.
# Asserts that a service should begin (and stay) running RIGHT NOW, ; Asserts that a service should begin (and stay) running RIGHT NOW,
# without considering its dependencies. ; without considering its dependencies.
RunService = <run-service @serviceName any>. RunService = <run-service @serviceName any>.
# Asserts one or more current states of service `serviceName`. The ; Asserts one or more current states of service `serviceName`. The
# overall state of the service is the union of asserted `state`s. ; overall state of the service is the union of asserted `state`s.
# ;
# Only a few combinations make sense: ; Only a few combinations make sense:
# - `started` ; - `started`
# - `started` + `ready` ; - `started` + `ready`
# - `failed` ; - `failed`
# - `complete` ; - `complete`
# ;
ServiceState = <service-state @serviceName any @state State>. ServiceState = <service-state @serviceName any @state State>.
# A running service publishes zero or more of these. The details of ; A running service publishes zero or more of these. The details of
# the object vary by service. ; the object vary by service.
# ;
ServiceObject = <service-object @serviceName any @object any>. ServiceObject = <service-object @serviceName any @object any>.
# Possible service states. ; Possible service states.
State = State =
/ # The service has begun its startup routine, and may or may not be / ; The service has begun its startup routine, and may or may not be
# ready to take requests from other parties. ; ready to take requests from other parties.
=started =started
/ # The service is ready to take requests from other parties. / ; The service is ready to take requests from other parties.
# (This state is special in that it is asserted *in addition* to `started`.) ; (This state is special in that it is asserted *in addition* to `started`.)
=ready =ready
/ # The service has failed. / ; The service has failed.
=failed =failed
/ # The service has completed execution. / ; The service has completed execution.
=complete =complete
/ # Extension or user-defined state / ; Extension or user-defined state
@userDefined any @userDefined any
. .
# Asserts that, when `depender` is `require-service`d, it should not be started until ; Asserts that, when `depender` is `require-service`d, it should not be started until
# `dependee` has been asserted, and also that `dependee`'s `serviceName` should be ; `dependee` has been asserted, and also that `dependee`'s `serviceName` should be
# `require-service`d. ; `require-service`d.
ServiceDependency = <depends-on @depender any @dependee ServiceState>. ServiceDependency = <depends-on @depender any @dependee ServiceState>.
# Message. Triggers a service restart. ; Message. Triggers a service restart.
RestartService = <restart-service @serviceName any>. RestartService = <restart-service @serviceName any>.

View File

@ -1,31 +0,0 @@
version 1 .
# A "standard" route is
#
# - a collection of websocket urls, for transport.
# - a noise tunnel, for server authentication, confidentiality and integrity.
# - a macaroon, for authorization.
#
# Making these choices allows a compact representation. Encoding a binary-syntax representation
# of a standard route using base64 produces a somewhat-convenient blob of text representing
# access to a network object that users can cut and paste.
#
# A `stdenv.StandardRoute.standard` can be rewritten to a `gatekeeper.Route` like this (with
# `$caveats`, if any, added as appropriate):
#
# <route $transports <noise { service: $service key: $key }> <ref { sig: $sig oid: $oid }>>
#
StandardRoute =
/ @standard [@transports [StandardTransport ...]
@key bytes
@service any
@sig bytes
@oid any
@caveats sturdy.Caveat ...]
/ @general gatekeeper.Route
.
StandardTransport =
/ @wsUrl string
/ @other any
.

View File

@ -1,38 +1,38 @@
version 1 . version 1 .
embeddedType EntityRef.Cap . embeddedType EntityRef.Cap .
# Assertion: ; Assertion:
StreamConnection = <stream-connection @source #:Source @sink #:Sink @spec any>. StreamConnection = <stream-connection @source #!Source @sink #!Sink @spec any>.
# Assertions: ; Assertions:
StreamListenerReady = <stream-listener-ready @spec any>. StreamListenerReady = <stream-listener-ready @spec any>.
StreamListenerError = <stream-listener-error @spec any @message string>. StreamListenerError = <stream-listener-error @spec any @message string>.
# Assertion: ; Assertion:
StreamError = <error @message string>. StreamError = <error @message string>.
Source = Source =
# Assertions: ; Assertions:
/ <sink @controller #:Sink> / <sink @controller #!Sink>
/ StreamError / StreamError
# Messages: ; Messages:
/ <credit @amount CreditAmount @mode Mode> / <credit @amount CreditAmount @mode Mode>
. .
Sink = Sink =
# Assertions: ; Assertions:
/ <source @controller #:Source> / <source @controller #!Source>
/ StreamError / StreamError
# Messages: ; Messages:
/ <data @payload any @mode Mode> / <data @payload any @mode Mode>
/ <eof> / <eof>
. .
# Value: ; Value:
CreditAmount = @count int / @unbounded =unbounded . CreditAmount = @count int / @unbounded =unbounded .
# Value: ; Value:
Mode = =bytes / @lines LineMode / <packet @size int> / <object @description any>. Mode = =bytes / @lines LineMode / <packet @size int> / <object @description any>.
LineMode = =lf / =crlf . LineMode = =lf / =crlf .

View File

@ -1,43 +1,19 @@
version 1 . version 1 .
embeddedType EntityRef.Cap . embeddedType EntityRef.Cap .
# --------------------------------------------------------------------------- ; The sequence of Caveats is run RIGHT-TO-LEFT.
# Binding and connection ; That is, the newest Caveats are at the right.
;
; Let f = HMAC-BLAKE2s, e = canonical machine-oriented serialization of some preserves value,
; and k = the original secret key for the ref.
;
; The `sig` is then f(f(f(f(k, e(oid)), ...), Caveat), ...).
;
SturdyRef = <ref @oid any @caveatChain [Caveat ...] @sig bytes>.
SturdyStepType = =ref . ; embodies 1st-party caveats over assertion structure, but nothing else
; can add 3rd-party caveats and richer predicates later
# In a gatekeeper.Step or gatekeeper.PathStep, use Parameters as detail. Caveat = Rewrite / Alts / Reject /@unknown any .
SturdyStepDetail = Parameters .
SturdyPathStepDetail = Parameters .
# In a gatekeeper.Description, use the following detail.
SturdyDescriptionDetail = {
oid: any,
key: bytes,
} .
# ---------------------------------------------------------------------------
# Macaroons
# The sequence of Caveats is run RIGHT-TO-LEFT.
# That is, the newest Caveats are at the right.
#
# Let f(k,d) = HMAC-BLAKE2s-256(k,d)[0..16),
# e = canonical machine-oriented serialization of some preserves value, and
# k = the original secret key for the ref.
#
# The `sig` is then f(f(f(f(k, e(oid)), ...), e(Caveat)), ...).
#
SturdyRef = <ref @parameters Parameters> .
Parameters = {
oid: any,
sig: bytes,
} & @caveats CaveatsField .
CaveatsField = @present { caveats: [Caveat ...] } / @invalid { caveats: any } / @absent {} .
# embodies 1st-party caveats over assertion structure, but nothing else
# can add 3rd-party caveats and richer predicates later
Caveat = Rewrite / Alts / Reject / @unknown any .
Rewrite = <rewrite @pattern Pattern @template Template> . Rewrite = <rewrite @pattern Pattern @template Template> .
Reject = <reject @pattern Pattern> . Reject = <reject @pattern Pattern> .
Alts = <or @alternatives [Rewrite ...]>. Alts = <or @alternatives [Rewrite ...]>.
@ -45,13 +21,13 @@ Alts = <or @alternatives [Rewrite ...]>.
Oid = int . Oid = int .
WireRef = @mine [0 @oid Oid] / @yours [1 @oid Oid @attenuation Caveat ...]. WireRef = @mine [0 @oid Oid] / @yours [1 @oid Oid @attenuation Caveat ...].
# --------------------------------------------------------------------------- ;---------------------------------------------------------------------------
Lit = <lit @value any>. Lit = <lit @value any>.
Pattern = PDiscard / PAtom / PEmbedded / PBind / PAnd / PNot / Lit / PCompound . Pattern = PDiscard / PAtom / PEmbedded / PBind / PAnd / PNot / Lit / PCompound .
PDiscard = <_>. PDiscard = <_>.
PAtom = =Boolean / =Double / =SignedInteger / =String / =ByteString / =Symbol . PAtom = =Boolean / =Float / =Double / =SignedInteger / =String / =ByteString / =Symbol .
PEmbedded = =Embedded . PEmbedded = =Embedded .
PBind = <bind @pattern Pattern>. PBind = <bind @pattern Pattern>.
PAnd = <and @patterns [Pattern ...]>. PAnd = <and @patterns [Pattern ...]>.

View File

@ -4,4 +4,4 @@ embeddedType EntityRef.Cap .
TcpRemote = <tcp-remote @host string @port int>. TcpRemote = <tcp-remote @host string @port int>.
TcpLocal = <tcp-local @host string @port int>. TcpLocal = <tcp-local @host string @port int>.
TcpPeerInfo = <tcp-peer @handle #:any @local TcpLocal @remote TcpRemote>. TcpPeerInfo = <tcp-peer @handle #!any @local TcpLocal @remote TcpRemote>.

View File

@ -25,10 +25,10 @@ TurnId = any .
ExitStatus = =ok / protocol.Error . ExitStatus = =ok / protocol.Error .
# Trace information associated with a turn. ; Trace information associated with a turn.
TurnDescription = <turn @id TurnId @cause TurnCause @actions [ActionDescription ...]> . TurnDescription = <turn @id TurnId @cause TurnCause @actions [ActionDescription ...]> .
# The cause of a turn. ; The cause of a turn.
TurnCause = TurnCause =
/ @turn <caused-by @id TurnId> / @turn <caused-by @id TurnId>
/ <cleanup> / <cleanup>
@ -40,27 +40,27 @@ TurnCause =
LinkedTaskReleaseReason = =cancelled / =normal . LinkedTaskReleaseReason = =cancelled / =normal .
# An actual event carried within a turn. ; An actual event carried within a turn.
TurnEvent = TurnEvent =
/ <assert @assertion AssertionDescription @handle protocol.Handle> / <assert @assertion AssertionDescription @handle protocol.Handle>
/ <retract @handle protocol.Handle> / <retract @handle protocol.Handle>
/ <message @body AssertionDescription> / <message @body AssertionDescription>
/ <sync @peer Target> / <sync @peer Target>
/ # A souped-up, disguised, special-purpose `retract` event. / ; A souped-up, disguised, special-purpose `retract` event.
@breakLink <break-link @source ActorId @handle protocol.Handle> @breakLink <break-link @source ActorId @handle protocol.Handle>
. .
TargetedTurnEvent = <event @target Target @detail TurnEvent> . TargetedTurnEvent = <event @target Target @detail TurnEvent> .
# An action taken during a turn. ; An action taken during a turn.
ActionDescription = ActionDescription =
/ # The active party is processing a new `event` for `target` from the received Turn. / ; The active party is processing a new `event` for `target` from the received Turn.
<dequeue @event TargetedTurnEvent> <dequeue @event TargetedTurnEvent>
/ # The active party has queued a new `event` to be processed later by `target`. / ; The active party has queued a new `event` to be processed later by `target`.
<enqueue @event TargetedTurnEvent> <enqueue @event TargetedTurnEvent>
/ # The active party is processing an internally-queued event for one of its own entities. / ; The active party is processing an internally-queued event for one of its own entities.
@dequeueInternal <dequeue-internal @event TargetedTurnEvent> @dequeueInternal <dequeue-internal @event TargetedTurnEvent>
/ # The active party has scheduled an internally-queued event for one of its own entities. / ; The active party has scheduled an internally-queued event for one of its own entities.
@enqueueInternal <enqueue-internal @event TargetedTurnEvent> @enqueueInternal <enqueue-internal @event TargetedTurnEvent>
/ <spawn @link bool @id ActorId> / <spawn @link bool @id ActorId>
/ <link / <link
@ -73,9 +73,9 @@ ActionDescription =
/ @linkedTaskStart <linked-task-start @taskName Name @id TaskId> / @linkedTaskStart <linked-task-start @taskName Name @id TaskId>
. .
# An assertion or the body of a message: either a Preserves value, or ; An assertion or the body of a message: either a Preserves value, or
# some opaque system-internal value, represented according to the ; some opaque system-internal value, represented according to the
# system concerned. ; system concerned.
AssertionDescription = AssertionDescription =
/ <value @value any> / <value @value any>
/ <opaque @description any> / <opaque @description any>
@ -90,7 +90,7 @@ FacetStopReason =
Target = <entity @actor ActorId @facet FacetId @oid Oid> . Target = <entity @actor ActorId @facet FacetId @oid Oid> .
# For the future: consider including information about `protocol`-level `Turn`s etc sent to ; For the future: consider including information about `protocol`-level `Turn`s etc sent to
# peers over e.g. Websockets or TCP/IP, allowing cross-correlation of traces from different ; peers over e.g. Websockets or TCP/IP, allowing cross-correlation of traces from different
# processes and implementations with each other to form a large overall picture. ; processes and implementations with each other to form a large overall picture.
. .

File diff suppressed because it is too large Load Diff

View File

@ -4,7 +4,7 @@
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::collections::btree_map::{Iter, Keys, Entry}; use std::collections::btree_map::{Iter, Keys, Entry};
use std::iter::FromIterator; use std::iter::{FromIterator, IntoIterator};
/// Element counts in [`BTreeBag`]s are 32-bit signed integers. /// Element counts in [`BTreeBag`]s are 32-bit signed integers.
pub type Count = i32; pub type Count = i32;

View File

@ -12,6 +12,7 @@ use super::language;
use super::skeleton; use super::skeleton;
use super::actor::*; use super::actor::*;
use super::schemas::dataspace::*; use super::schemas::dataspace::*;
use super::schemas::dataspace::_Any;
use preserves::value::Map; use preserves::value::Map;
use preserves_schema::Codec; use preserves_schema::Codec;

View File

@ -175,7 +175,7 @@ where
t.on_stop_notify(&r); t.on_stop_notify(&r);
} }
if should_register_exit_hook { if should_register_exit_hook {
t.add_exit_hook(&r); t.state.add_exit_hook(&r);
} }
r r
} }

View File

@ -20,7 +20,6 @@ pub mod error;
pub mod pattern; pub mod pattern;
pub mod relay; pub mod relay;
pub mod rewrite; pub mod rewrite;
pub mod rpc;
pub mod supervise; pub mod supervise;
pub mod schemas { pub mod schemas {
@ -75,7 +74,7 @@ mod protocol_test {
use preserves_schema::Deserialize; use preserves_schema::Deserialize;
#[test] fn decode_sync() { #[test] fn decode_sync() {
let input_str = "[[2 <S #:[0 11]>]]"; let input_str = "[[2 <sync #![0 11]>]]";
let mut src = BytesBinarySource::new(input_str.as_bytes()); let mut src = BytesBinarySource::new(input_str.as_bytes());
let mut r = src.text::<IOValue, _>(ViaCodec::new(IOValueDomainCodec)); let mut r = src.text::<IOValue, _>(ViaCodec::new(IOValueDomainCodec));
let packet: schemas::protocol::Packet<IOValue> = schemas::protocol::Packet::deserialize(&mut r).unwrap(); let packet: schemas::protocol::Packet<IOValue> = schemas::protocol::Packet::deserialize(&mut r).unwrap();

View File

@ -1,5 +1,3 @@
use std::sync::Arc;
use crate::schemas::dataspace_patterns::*; use crate::schemas::dataspace_patterns::*;
use super::language; use super::language;
@ -10,25 +8,23 @@ use preserves::value::Record;
use preserves::value::Value; use preserves::value::Value;
use preserves_schema::Codec; use preserves_schema::Codec;
pub type PathStep = _Any; #[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq)]
pub enum PathStep {
Index(usize),
Key(_Any),
}
pub type Path = Vec<PathStep>; pub type Path = Vec<PathStep>;
pub type Paths = Vec<Path>; pub type Paths = Vec<Path>;
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct ConstantPositions {
pub with_values: Paths,
pub required_to_exist: Paths,
}
struct Analyzer { struct Analyzer {
pub const_paths: Paths, pub const_paths: Paths,
pub const_values: Vec<_Any>, pub const_values: Vec<_Any>,
pub checked_paths: Paths,
pub capture_paths: Paths, pub capture_paths: Paths,
} }
pub struct PatternAnalysis { pub struct PatternAnalysis {
pub const_positions: Arc<ConstantPositions>, pub const_paths: Paths,
pub const_values: _Any, pub const_values: _Any,
pub capture_paths: Paths, pub capture_paths: Paths,
} }
@ -42,15 +38,11 @@ impl PatternAnalysis {
let mut analyzer = Analyzer { let mut analyzer = Analyzer {
const_paths: Vec::new(), const_paths: Vec::new(),
const_values: Vec::new(), const_values: Vec::new(),
checked_paths: Vec::new(),
capture_paths: Vec::new(), capture_paths: Vec::new(),
}; };
analyzer.walk(&mut Vec::new(), p); analyzer.walk(&mut Vec::new(), p);
PatternAnalysis { PatternAnalysis {
const_positions: Arc::new(ConstantPositions { const_paths: analyzer.const_paths,
with_values: analyzer.const_paths,
required_to_exist: analyzer.checked_paths,
}),
const_values: _Any::new(analyzer.const_values), const_values: _Any::new(analyzer.const_values),
capture_paths: analyzer.capture_paths, capture_paths: analyzer.capture_paths,
} }
@ -66,21 +58,34 @@ impl Analyzer {
fn walk(&mut self, path: &mut Path, p: &Pattern) { fn walk(&mut self, path: &mut Path, p: &Pattern) {
match p { match p {
Pattern::Group { entries, .. } => { Pattern::DCompound(b) => match &**b {
for (k, p) in entries { DCompound::Rec { fields, .. } => {
self.walk_step(path, k.clone(), p) for (i, p) in fields.iter().enumerate() {
self.walk_step(path, PathStep::Index(i), p);
}
}
DCompound::Arr { items, .. } => {
for (i, p) in items.iter().enumerate() {
self.walk_step(path, PathStep::Index(i), p);
}
}
DCompound::Dict { entries, .. } => {
for (k, p) in entries {
self.walk_step(path, PathStep::Key(k.clone()), p);
}
} }
} }
Pattern::Bind { pattern } => { Pattern::DBind(b) => {
let DBind { pattern, .. } = &**b;
self.capture_paths.push(path.clone()); self.capture_paths.push(path.clone());
self.walk(path, &**pattern); self.walk(path, pattern)
} }
Pattern::Discard => { Pattern::DDiscard(_) =>
self.checked_paths.push(path.clone()); (),
} Pattern::DLit(b) => {
Pattern::Lit { value } => { let DLit { value } = &**b;
self.const_paths.push(path.clone()); self.const_paths.push(path.clone());
self.const_values.push(language().unparse(&**value)); self.const_values.push(language().unparse(value));
} }
} }
} }
@ -104,47 +109,52 @@ impl PatternMatcher {
} }
} }
fn run_seq<'a, F: 'a + Fn(usize) -> &'a _Any>(&mut self, entries: &Map<_Any, Pattern<_Any>>, values: F) -> bool {
for (k, p) in entries {
match k.value().as_usize() {
None => return false,
Some(i) => if !self.run(p, values(i)) {
return false;
}
}
}
true
}
fn run(&mut self, pattern: &Pattern<_Any>, value: &_Any) -> bool { fn run(&mut self, pattern: &Pattern<_Any>, value: &_Any) -> bool {
match pattern { match pattern {
Pattern::Discard => true, Pattern::DDiscard(_) => true,
Pattern::Bind { pattern } => { Pattern::DBind(b) => {
self.captures.push(value.clone()); self.captures.push(value.clone());
self.run(&**pattern, value) self.run(&b.pattern, value)
} }
Pattern::Lit { value: expected } => value == &language().unparse(&**expected), Pattern::DLit(b) => value == &language().unparse(&b.value),
Pattern::Group { type_, entries } => match &**type_ { Pattern::DCompound(b) => match &**b {
GroupType::Rec { label } => { DCompound::Rec { label, fields } => {
match value.value().as_record(None) { match value.value().as_record(Some(fields.len())) {
None => false, None => false,
Some(r) => Some(r) => {
r.label() == label && if r.label() != label {
self.run_seq(entries, |i| &r.fields()[i]) return false;
}
for (i, p) in fields.iter().enumerate() {
if !self.run(p, &r.fields()[i]) {
return false;
}
}
true
}
} }
} }
GroupType::Arr => { DCompound::Arr { items } => {
match value.value().as_sequence() { match value.value().as_sequence() {
None => false, None => false,
Some(vs) => Some(vs) => {
self.run_seq(entries, |i| &vs[i]) if vs.len() != items.len() {
return false;
}
for (i, p) in items.iter().enumerate() {
if !self.run(p, &vs[i]) {
return false;
}
}
true
}
} }
} }
GroupType::Dict => { DCompound::Dict { entries: expected_entries } => {
match value.value().as_dictionary() { match value.value().as_dictionary() {
None => false, None => false,
Some(actual_entries) => { Some(actual_entries) => {
for (k, p) in entries { for (k, p) in expected_entries.iter() {
if !actual_entries.get(k).map(|v| self.run(p, v)).unwrap_or(false) { if !actual_entries.get(k).map(|v| self.run(p, v)).unwrap_or(false) {
return false; return false;
} }
@ -160,68 +170,42 @@ impl PatternMatcher {
pub fn lift_literal(v: &_Any) -> Pattern { pub fn lift_literal(v: &_Any) -> Pattern {
match v.value() { match v.value() {
Value::Record(r) => Pattern::Group { Value::Record(r) => Pattern::DCompound(Box::new(DCompound::Rec {
type_: Box::new(GroupType::Rec { label: r.label().clone() }), label: r.label().clone(),
entries: r.fields().iter().enumerate() fields: r.fields().iter().map(lift_literal).collect(),
.map(|(i, v)| (_Any::new(i), lift_literal(v))) })),
.collect(), Value::Sequence(items) => Pattern::DCompound(Box::new(DCompound::Arr {
}, items: items.iter().map(lift_literal).collect(),
Value::Sequence(items) => Pattern::Group { })),
type_: Box::new(GroupType::Arr),
entries: items.iter().enumerate()
.map(|(i, v)| (_Any::new(i), lift_literal(v)))
.collect(),
},
Value::Set(_members) => panic!("Cannot express literal set in pattern"), Value::Set(_members) => panic!("Cannot express literal set in pattern"),
Value::Dictionary(entries) => Pattern::Group { Value::Dictionary(entries) => Pattern::DCompound(Box::new(DCompound::Dict {
type_: Box::new(GroupType::Dict), entries: entries.iter().map(|(k, v)| (k.clone(), lift_literal(v))).collect(),
entries: entries.iter() })),
.map(|(k, v)| (k.clone(), lift_literal(v))) _other => Pattern::DLit(Box::new(DLit {
.collect(), value: language().parse(v).expect("Non-compound datum can be converted to AnyAtom"),
}, })),
_other => Pattern::Lit {
value: Box::new(language().parse(v).expect("Non-compound datum can be converted to AnyAtom")),
},
} }
} }
const DISCARD: Pattern = Pattern::Discard;
pub fn pattern_seq_from_dictionary(entries: &Map<_Any, Pattern>) -> Option<Vec<&Pattern>> {
let mut max_k: Option<usize> = None;
for k in entries.keys() {
max_k = max_k.max(Some(k.value().as_usize()?));
}
let mut seq = vec![];
if let Some(max_k) = max_k {
seq.reserve(max_k + 1);
for i in 0..=max_k {
seq.push(entries.get(&_Any::new(i)).unwrap_or(&DISCARD));
}
}
return Some(seq);
}
fn drop_literal_entries_seq(mut seq: Vec<_Any>, entries: &Map<_Any, Pattern>) -> Option<Vec<_Any>> {
for p in pattern_seq_from_dictionary(entries)?.into_iter() {
seq.push(drop_literal(p)?);
}
Some(seq)
}
pub fn drop_literal(p: &Pattern) -> Option<_Any> { pub fn drop_literal(p: &Pattern) -> Option<_Any> {
match p { match p {
Pattern::Group { type_, entries } => match &**type_ { Pattern::DCompound(b) => match &**b {
GroupType::Rec { label } => DCompound::Rec { label, fields } => {
Some(Value::Record(Record(drop_literal_entries_seq(vec![label.clone()], entries)?)).wrap()), let mut r = vec![label.clone()];
GroupType::Arr => for f in fields.iter() {
Some(Value::Sequence(drop_literal_entries_seq(vec![], entries)?).wrap()), r.push(drop_literal(f)?);
GroupType::Dict => }
Some(Value::Dictionary(entries.iter() Some(Value::Record(Record(r)).wrap())
.map(|(k, p)| Some((k.clone(), drop_literal(p)?))) }
.collect::<Option<Map<_Any, _Any>>>()?).wrap()), DCompound::Arr { items } =>
Some(Value::Sequence(items.iter().map(drop_literal)
.collect::<Option<Vec<_Any>>>()?).wrap()),
DCompound::Dict { entries } =>
Some(Value::Dictionary(entries.iter()
.map(|(k, p)| Some((k.clone(), drop_literal(p)?)))
.collect::<Option<Map<_Any, _Any>>>()?).wrap()),
}, },
Pattern::Lit { value } => Some(language().unparse(&**value)), Pattern::DLit(b) => Some(language().unparse(&b.value)),
_ => None, _ => None,
} }
} }

View File

@ -1,7 +1,6 @@
use bytes::Buf; use bytes::Buf;
use bytes::BytesMut; use bytes::BytesMut;
use crate::Language;
use crate::language; use crate::language;
use crate::actor::*; use crate::actor::*;
use crate::during; use crate::during;
@ -29,7 +28,6 @@ use preserves::value::Map;
use preserves::value::NestedValue; use preserves::value::NestedValue;
use preserves::value::NoEmbeddedDomainCodec; use preserves::value::NoEmbeddedDomainCodec;
use preserves::value::PackedWriter; use preserves::value::PackedWriter;
use preserves::value::Set;
use preserves::value::TextWriter; use preserves::value::TextWriter;
use preserves::value::ViaCodec; use preserves::value::ViaCodec;
use preserves::value::Writer; use preserves::value::Writer;
@ -38,7 +36,6 @@ use preserves::value::signed_integer::SignedInteger;
use preserves_schema::Codec; use preserves_schema::Codec;
use preserves_schema::Deserialize; use preserves_schema::Deserialize;
use preserves_schema::ParseError; use preserves_schema::ParseError;
use preserves_schema::support::Unparse;
use std::io; use std::io;
use std::pin::Pin; use std::pin::Pin;
@ -77,7 +74,6 @@ struct Membranes {
exported: Membrane, exported: Membrane,
imported: Membrane, imported: Membrane,
next_export_oid: usize, next_export_oid: usize,
reimported_attenuations: Map<sturdy::Oid, Set<Arc<Cap>>>,
} }
pub enum Input { pub enum Input {
@ -114,8 +110,8 @@ struct TunnelRefEntity {
relay_ref: TunnelRelayRef, relay_ref: TunnelRelayRef,
} }
struct ActivatedMembranes<'a, 'm> { struct ActivatedMembranes<'a, 'activation, 'm> {
turn: &'a mut Activation, turn: &'a mut Activation<'activation>,
tr_ref: &'m TunnelRelayRef, tr_ref: &'m TunnelRelayRef,
membranes: &'m mut Membranes, membranes: &'m mut Membranes,
} }
@ -174,46 +170,36 @@ impl Membrane {
ws ws
} }
fn remove(&mut self, ws: &Arc<WireSymbol>) {
self.oid_map.remove(&ws.oid);
self.ref_map.remove(&ws.obj);
}
fn insert_inert_entity(&mut self, t: &mut Activation, oid: sturdy::Oid) -> Arc<WireSymbol> { fn insert_inert_entity(&mut self, t: &mut Activation, oid: sturdy::Oid) -> Arc<WireSymbol> {
self.insert(oid, Cap::new(&t.inert_entity())) self.insert(oid, Cap::new(&t.inert_entity()))
} }
} }
pub fn connect_stream<I, O, Step, E, F>( pub fn connect_stream<I, O, E, F>(
t: &mut Activation, t: &mut Activation,
i: I, i: I,
o: O, o: O,
output_text: bool, output_text: bool,
step: Step, sturdyref: sturdy::SturdyRef,
initial_state: E, initial_state: E,
mut f: F, mut f: F,
) -> ActorResult where ) where
I: 'static + Send + AsyncRead, I: 'static + Send + AsyncRead,
O: 'static + Send + AsyncWrite, O: 'static + Send + AsyncWrite,
Step: for<'a> Unparse<&'a Language<AnyValue>, AnyValue>,
E: 'static + Send, E: 'static + Send,
F: 'static + Send + FnMut(&mut E, &mut Activation, Arc<Cap>) -> during::DuringResult<E> F: 'static + Send + FnMut(&mut E, &mut Activation, Arc<Cap>) -> during::DuringResult<E>
{ {
let i = Input::Bytes(Box::pin(i)); let i = Input::Bytes(Box::pin(i));
let o = Output::Bytes(Box::pin(o)); let o = Output::Bytes(Box::pin(o));
let gatekeeper = TunnelRelay::run(t, i, o, None, Some(sturdy::Oid(0.into())), output_text).unwrap(); let gatekeeper = TunnelRelay::run(t, i, o, None, Some(sturdy::Oid(0.into())), output_text).unwrap();
let main_entity = t.create(during::entity(initial_state).on_asserted(move |state, t, a: gatekeeper::Resolved| { let main_entity = t.create(during::entity(initial_state).on_asserted(move |state, t, a: AnyValue| {
match a { let denotation = a.value().to_embedded()?;
gatekeeper::Resolved::Accepted { responder_session } => f(state, t, responder_session), f(state, t, Arc::clone(denotation))
gatekeeper::Resolved::Rejected(r) => Err(error("Resolve rejected", r.detail))?,
}
})); }));
let step = language().parse::<gatekeeper::Step>(&language().unparse(&step))?; gatekeeper.assert(t, language(), &gatekeeper::Resolve {
gatekeeper.assert(t, language(), &gatekeeper::Resolve::<AnyValue> { sturdyref,
step, observer: Cap::new(&main_entity),
observer: Cap::guard(Language::arc(), main_entity),
}); });
Ok(())
} }
impl std::fmt::Debug for Membrane { impl std::fmt::Debug for Membrane {
@ -228,57 +214,7 @@ impl std::fmt::Debug for Membrane {
macro_rules! dump_membranes { ($e:expr) => { tracing::trace!("membranes: {:#?}", $e); } } macro_rules! dump_membranes { ($e:expr) => { tracing::trace!("membranes: {:#?}", $e); } }
// macro_rules! dump_membranes { ($e:expr) => { (); } } // macro_rules! dump_membranes { ($e:expr) => { (); } }
/// Main entry point for stdio-based Syndicate services.
pub async fn stdio_service<F>(f: F) -> !
where
F: 'static + Send + FnOnce(&mut Activation) -> Result<Arc<Cap>, ActorError>
{
let result = Actor::top(None, move |t| {
let service = f(t)?;
Ok(TunnelRelay::stdio_service(t, service))
}).await;
// Because we're currently using tokio::io::stdin(), which can prevent shutdown of the
// runtime, this routine uses std::process::exit directly as a special case. It's a
// stopgap: eventually, we'd like to do things Properly, as indicated in the comment
// attached (at the time of writing) to tokio::io::stdin(), which reads in part:
//
// This handle is best used for non-interactive uses, such as when a file
// is piped into the application. For technical reasons, `stdin` is
// implemented by using an ordinary blocking read on a separate thread, and
// it is impossible to cancel that read. This can make shutdown of the
// runtime hang until the user presses enter.
//
// For interactive uses, it is recommended to spawn a thread dedicated to
// user input and use blocking IO directly in that thread.
//
// TODO: Revisit this.
match result {
Ok(Ok(())) => {
std::process::exit(0);
}
Ok(Err(e)) => {
tracing::error!("Main stdio_service actor failed: {}", e);
std::process::exit(1);
},
Err(e) => {
tracing::error!("Join of main stdio_service actor failed: {}", e);
std::process::exit(2);
}
}
}
impl TunnelRelay { impl TunnelRelay {
pub fn stdio_service(t: &mut Activation, service: Arc<Cap>) -> () {
TunnelRelay::run(t,
Input::Bytes(Box::pin(tokio::io::stdin())),
Output::Bytes(Box::pin(tokio::io::stdout())),
Some(service),
None,
false);
}
pub fn run( pub fn run(
t: &mut Activation, t: &mut Activation,
i: Input, i: Input,
@ -291,7 +227,7 @@ impl TunnelRelay {
t.linked_task(Some(AnyValue::symbol("writer")), t.linked_task(Some(AnyValue::symbol("writer")),
output_loop(o, output_rx)); output_loop(o, output_rx));
t.linked_task(Some(AnyValue::symbol("reader")), t.linked_task(Some(AnyValue::symbol("reader")),
input_loop(t.trace_collector(), t.facet_ref(), i, tr_ref)); input_loop(t.trace_collector(), t.facet.clone(), i, tr_ref));
result result
} }
@ -316,7 +252,6 @@ impl TunnelRelay {
exported: Membrane::new(WireSymbolSide::Exported), exported: Membrane::new(WireSymbolSide::Exported),
imported: Membrane::new(WireSymbolSide::Imported), imported: Membrane::new(WireSymbolSide::Imported),
next_export_oid: 0, next_export_oid: 0,
reimported_attenuations: Map::new(),
}, },
pending_outbound: Vec::new(), pending_outbound: Vec::new(),
}; };
@ -327,7 +262,7 @@ impl TunnelRelay {
|io| Arc::clone(&tr.membranes.import_oid(t, &tr_ref, io).inc_ref().obj)); |io| Arc::clone(&tr.membranes.import_oid(t, &tr_ref, io).inc_ref().obj));
dump_membranes!(tr.membranes); dump_membranes!(tr.membranes);
*tr_ref.lock() = Some(tr); *tr_ref.lock() = Some(tr);
t.add_exit_hook(&self_entity); t.state.add_exit_hook(&self_entity);
(result, tr_ref, output_rx) (result, tr_ref, output_rx)
} }
@ -385,10 +320,6 @@ impl TunnelRelay {
tracing::info!(?label, ?fields, "received Extension from peer"); tracing::info!(?label, ?fields, "received Extension from peer");
Ok(()) Ok(())
} }
P::Packet::Nop(_b) => {
tracing::trace!("received Nop from peer");
Ok(())
}
P::Packet::Error(b) => { P::Packet::Error(b) => {
tracing::info!(message = ?b.message.clone(), tracing::info!(message = ?b.message.clone(),
detail = ?b.detail.clone(), detail = ?b.detail.clone(),
@ -427,19 +358,14 @@ impl TunnelRelay {
} }
P::Event::Retract(b) => { P::Event::Retract(b) => {
let P::Retract { handle: remote_handle } = *b; let P::Retract { handle: remote_handle } = *b;
match self.inbound_assertions.remove(&remote_handle) { let (local_handle, previous_pins) = match self.inbound_assertions.remove(&remote_handle) {
None => { None => return Err(error("Retraction of nonexistent handle", language().unparse(&remote_handle)))?,
// This can happen when e.g. an assertion previously made Some(wss) => wss,
// failed to pass an attenuation filter };
tracing::debug!(?remote_handle, "Retraction of nonexistent handle"); self.membranes.release(previous_pins);
} self.membranes.release(pins);
Some((local_handle, previous_pins)) => { t.retract(local_handle);
self.membranes.release(previous_pins); dump_membranes!(self.membranes);
self.membranes.release(pins);
t.retract(local_handle);
dump_membranes!(self.membranes);
}
}
} }
P::Event::Message(b) => { P::Event::Message(b) => {
let P::Message { body: P::Assertion(a) } = *b; let P::Message { body: P::Assertion(a) } = *b;
@ -556,7 +482,6 @@ impl TunnelRelay {
} else { } else {
PackedWriter::encode(&mut self.membranes, &item)? PackedWriter::encode(&mut self.membranes, &item)?
}; };
tracing::trace!(buffer = ?bs, "outbound bytes");
let _ = self.output.send(LoanedItem::new(account, cost, bs)); let _ = self.output.send(LoanedItem::new(account, cost, bs));
Ok(()) Ok(())
@ -617,10 +542,9 @@ impl Membranes {
#[inline] #[inline]
fn release_one(&mut self, ws: Arc<WireSymbol>) -> bool { fn release_one(&mut self, ws: Arc<WireSymbol>) -> bool {
if ws.dec_ref() { if ws.dec_ref() {
if let WireSymbolSide::Exported = ws.side { let membrane = self.membrane(ws.side);
self.reimported_attenuations.remove(&ws.oid); membrane.oid_map.remove(&ws.oid);
} membrane.ref_map.remove(&ws.obj);
self.membrane(ws.side).remove(&ws);
true true
} else { } else {
false false
@ -641,47 +565,38 @@ impl Membranes {
src: &'src mut S, src: &'src mut S,
_read_annotations: bool, _read_annotations: bool,
) -> io::Result<Arc<Cap>> { ) -> io::Result<Arc<Cap>> {
match sturdy::WireRef::deserialize(&mut src.packed(NoEmbeddedDomainCodec))? { let ws = match sturdy::WireRef::deserialize(&mut src.packed(NoEmbeddedDomainCodec))? {
sturdy::WireRef::Mine{ oid: b } => { sturdy::WireRef::Mine{ oid: b } => {
let oid = *b; let oid = *b;
let ws = self.imported.oid_map.get(&oid).map(Arc::clone) self.imported.oid_map.get(&oid).map(Arc::clone)
.unwrap_or_else(|| self.import_oid(t, relay_ref, oid)); .unwrap_or_else(|| self.import_oid(t, relay_ref, oid))
Ok(Arc::clone(&ws.inc_ref().obj))
} }
sturdy::WireRef::Yours { oid: b, attenuation } => { sturdy::WireRef::Yours { oid: b, attenuation } => {
let oid = *b; let oid = *b;
let ws = self.exported.oid_map.get(&oid).map(Arc::clone)
.unwrap_or_else(|| self.exported.insert_inert_entity(t, oid.clone()));
if attenuation.is_empty() { if attenuation.is_empty() {
Ok(Arc::clone(&ws.inc_ref().obj)) self.exported.oid_map.get(&oid).map(Arc::clone).unwrap_or_else(
|| self.exported.insert_inert_entity(t, oid))
} else { } else {
let attenuated_obj = ws.obj.attenuate(&attenuation) match self.exported.oid_map.get(&oid) {
.map_err(|e| { None => self.exported.insert_inert_entity(t, oid),
io::Error::new( Some(ws) => {
io::ErrorKind::InvalidInput, let attenuated_obj = ws.obj.attenuate(&attenuation)
format!("Invalid capability attenuation: {:?}", e)) .map_err(|e| {
})?; io::Error::new(
io::ErrorKind::InvalidInput,
ws.inc_ref(); format!("Invalid capability attenuation: {:?}", e))
})?;
let variations = self.reimported_attenuations.entry(oid).or_default(); self.exported.insert(oid, attenuated_obj)
match variations.get(&attenuated_obj) {
None => {
variations.insert(Arc::clone(&attenuated_obj));
self.exported.ref_map.insert(Arc::clone(&attenuated_obj), Arc::clone(&ws));
Ok(attenuated_obj)
} }
Some(existing) =>
Ok(Arc::clone(existing))
} }
} }
} }
} };
Ok(Arc::clone(&ws.inc_ref().obj))
} }
} }
impl<'a, 'm> DomainDecode<Arc<Cap>> for ActivatedMembranes<'a, 'm> { impl<'a, 'activation, 'm> DomainDecode<Arc<Cap>> for ActivatedMembranes<'a, 'activation, 'm> {
fn decode_embedded<'de, 'src, S: BinarySource<'de>>( fn decode_embedded<'de, 'src, S: BinarySource<'de>>(
&mut self, &mut self,
src: &'src mut S, src: &'src mut S,
@ -762,12 +677,12 @@ async fn input_loop(
buf.reserve(BUFSIZE); buf.reserve(BUFSIZE);
let n = match r.read_buf(&mut buf).await { let n = match r.read_buf(&mut buf).await {
Ok(n) => n, Ok(n) => n,
Err(e) => { Err(e) =>
if e.kind() == io::ErrorKind::ConnectionReset { if e.kind() == io::ErrorKind::ConnectionReset {
break; break;
} } else {
return Err(e)?; return Err(e)?;
} },
}; };
match n { match n {
0 => break, 0 => break,
@ -812,7 +727,7 @@ async fn output_loop(
impl Entity<()> for TunnelRefEntity { impl Entity<()> for TunnelRefEntity {
fn exit_hook(&mut self, t: &mut Activation, exit_status: &Arc<ExitStatus>) { fn exit_hook(&mut self, t: &mut Activation, exit_status: &Arc<ExitStatus>) {
if let ExitStatus::Error(e) = &**exit_status { if let Err(e) = &**exit_status {
let e = e.clone(); let e = e.clone();
let mut g = self.relay_ref.lock(); let mut g = self.relay_ref.lock();
let tr = g.as_mut().expect("initialized"); let tr = g.as_mut().expect("initialized");

View File

@ -121,6 +121,7 @@ impl Pattern {
Pattern::PDiscard(_) => true, Pattern::PDiscard(_) => true,
Pattern::PAtom(b) => match &**b { Pattern::PAtom(b) => match &**b {
PAtom::Boolean => a.value().is_boolean(), PAtom::Boolean => a.value().is_boolean(),
PAtom::Float => a.value().is_float(),
PAtom::Double => a.value().is_double(), PAtom::Double => a.value().is_double(),
PAtom::SignedInteger => a.value().is_signedinteger(), PAtom::SignedInteger => a.value().is_signedinteger(),
PAtom::String => a.value().is_string(), PAtom::String => a.value().is_string(),

View File

@ -1,17 +0,0 @@
use preserves_schema::support::Unparse;
use crate::actor::AnyValue;
use crate::schemas::rpc as R;
pub fn question<L, Q: Unparse<L, AnyValue>>(literals: L, request: Q) -> R::Question {
R::Question {
request: request.unparse(literals),
}
}
pub fn answer<'a, L, Q: Unparse<&'a L, AnyValue>, A: Unparse<&'a L, AnyValue>>(literals: &'a L, request: Q, response: A) -> R::Answer {
R::Answer {
request: request.unparse(literals),
response: response.unparse(literals),
}
}

View File

@ -16,12 +16,19 @@ use crate::actor::Activation;
use crate::actor::Handle; use crate::actor::Handle;
use crate::actor::Cap; use crate::actor::Cap;
use crate::schemas::dataspace_patterns as ds; use crate::schemas::dataspace_patterns as ds;
use crate::pattern::{self, ConstantPositions, PathStep, Path, Paths}; use crate::pattern::{self, PathStep, Path, Paths};
type Bag<A> = bag::BTreeBag<A>; type Bag<A> = bag::BTreeBag<A>;
type Captures = AnyValue; type Captures = AnyValue;
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
enum Guard {
Rec(AnyValue, usize),
Seq(usize),
Map,
}
/// Index of assertions and [`Observe`rs][crate::schemas::dataspace::Observe]. /// Index of assertions and [`Observe`rs][crate::schemas::dataspace::Observe].
/// ///
/// Generally speaking, you will not need to use this structure; /// Generally speaking, you will not need to use this structure;
@ -37,13 +44,13 @@ pub struct Index {
#[derive(Debug)] #[derive(Debug)]
struct Node { struct Node {
continuation: Continuation, continuation: Continuation,
edges: Map<Selector, Map<ds::GroupType, Node>>, edges: Map<Selector, Map<Guard, Node>>,
} }
#[derive(Debug)] #[derive(Debug)]
struct Continuation { struct Continuation {
cached_assertions: Set<AnyValue>, cached_assertions: Set<AnyValue>,
leaf_map: Map<Arc<ConstantPositions>, Map<Captures, Leaf>>, leaf_map: Map<Paths, Map<Captures, Leaf>>,
} }
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)] #[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
@ -198,7 +205,7 @@ impl Node {
} }
fn extend(&mut self, pat: &ds::Pattern) -> &mut Continuation { fn extend(&mut self, pat: &ds::Pattern) -> &mut Continuation {
let (_pop_count, final_node) = self.extend_walk(&mut Vec::new(), 0, PathStep::new(0), pat); let (_pop_count, final_node) = self.extend_walk(&mut Vec::new(), 0, PathStep::Index(0), pat);
&mut final_node.continuation &mut final_node.continuation
} }
@ -209,13 +216,23 @@ impl Node {
step: PathStep, step: PathStep,
pat: &ds::Pattern, pat: &ds::Pattern,
) -> (usize, &mut Node) { ) -> (usize, &mut Node) {
let (guard, members): (ds::GroupType, Vec<(PathStep, &ds::Pattern)>) = match pat { let (guard, members): (Guard, Vec<(PathStep, &ds::Pattern)>) = match pat {
ds::Pattern::Group { type_, entries } => ds::Pattern::DCompound(b) => match &**b {
((&**type_).clone(), ds::DCompound::Arr { items } =>
entries.iter().map(|(k, p)| (k.clone(), p)).collect()), (Guard::Seq(items.len()),
ds::Pattern::Bind { pattern } => items.iter().enumerate().map(|(i, p)| (PathStep::Index(i), p)).collect()),
return self.extend_walk(path, pop_count, step, &**pattern), ds::DCompound::Rec { label, fields } =>
ds::Pattern::Discard | ds::Pattern::Lit { .. } => (Guard::Rec(label.clone(), fields.len()),
fields.iter().enumerate().map(|(i, p)| (PathStep::Index(i), p)).collect()),
ds::DCompound::Dict { entries, .. } =>
(Guard::Map,
entries.iter().map(|(k, p)| (PathStep::Key(k.clone()), p)).collect()),
}
ds::Pattern::DBind(b) => {
let ds::DBind { pattern, .. } = &**b;
return self.extend_walk(path, pop_count, step, pattern);
}
ds::Pattern::DDiscard(_) | ds::Pattern::DLit(_) =>
return (pop_count, self), return (pop_count, self),
}; };
@ -319,46 +336,41 @@ where FCont: FnMut(&mut Continuation, &AnyValue) -> (),
fn continuation(&mut self, c: &mut Continuation) { fn continuation(&mut self, c: &mut Continuation) {
(self.m_cont)(c, self.outer_value); (self.m_cont)(c, self.outer_value);
let mut empty_const_positions = Vec::new(); let mut empty_const_paths = Vec::new();
for (const_positions, const_val_map) in &mut c.leaf_map { for (const_paths, const_val_map) in &mut c.leaf_map {
if project_paths(self.outer_value, &const_positions.required_to_exist).is_none() { if let Some(const_vals) = project_paths(self.outer_value, const_paths) {
continue; let leaf_opt = if self.create_leaf_if_absent {
} Some(const_val_map.entry(const_vals.clone()).or_insert_with(Leaf::new))
let const_vals = match project_paths(self.outer_value, &const_positions.with_values) { } else {
Some(vs) => vs, const_val_map.get_mut(&const_vals)
None => continue, };
}; if let Some(leaf) = leaf_opt {
let leaf_opt = if self.create_leaf_if_absent { (self.m_leaf)(leaf, self.outer_value);
Some(const_val_map.entry(const_vals.clone()).or_insert_with(Leaf::new)) for (capture_paths, endpoints) in &mut leaf.endpoints_map {
} else { if let Some(cs) = project_paths(self.outer_value, &capture_paths) {
const_val_map.get_mut(&const_vals) (self.m_endpoints)(endpoints, cs);
}; }
if let Some(leaf) = leaf_opt {
(self.m_leaf)(leaf, self.outer_value);
for (capture_paths, endpoints) in &mut leaf.endpoints_map {
if let Some(cs) = project_paths(self.outer_value, &capture_paths) {
(self.m_endpoints)(endpoints, cs);
} }
} if leaf.is_empty() {
if leaf.is_empty() { const_val_map.remove(&const_vals);
const_val_map.remove(&const_vals); if const_val_map.is_empty() {
if const_val_map.is_empty() { empty_const_paths.push(const_paths.clone());
empty_const_positions.push(const_positions.clone()); }
} }
} }
} }
} }
for const_positions in empty_const_positions { for const_paths in empty_const_paths {
c.leaf_map.remove(&const_positions); c.leaf_map.remove(&const_paths);
} }
} }
} }
fn class_of(v: &AnyValue) -> Option<ds::GroupType> { fn class_of(v: &AnyValue) -> Option<Guard> {
match v.value() { match v.value() {
Value::Sequence(_) => Some(ds::GroupType::Arr), Value::Sequence(vs) => Some(Guard::Seq(vs.len())),
Value::Record(r) => Some(ds::GroupType::Rec { label: r.label().clone() }), Value::Record(r) => Some(Guard::Rec(r.label().clone(), r.arity())),
Value::Dictionary(_) => Some(ds::GroupType::Dict), Value::Dictionary(_) => Some(Guard::Map),
_ => None, _ => None,
} }
} }
@ -386,17 +398,15 @@ fn project_paths<'a>(v: &'a AnyValue, ps: &Paths) -> Option<Captures> {
} }
fn step<'a>(v: &'a AnyValue, s: &PathStep) -> Option<&'a AnyValue> { fn step<'a>(v: &'a AnyValue, s: &PathStep) -> Option<&'a AnyValue> {
match v.value() { match (v.value(), s) {
Value::Sequence(vs) => { (Value::Sequence(vs), PathStep::Index(i)) =>
let i = s.value().as_usize()?; if *i < vs.len() { Some(&vs[*i]) } else { None },
if i < vs.len() { Some(&vs[i]) } else { None } (Value::Record(r), PathStep::Index(i)) =>
} if *i < r.arity() { Some(&r.fields()[*i]) } else { None },
Value::Record(r) => { (Value::Dictionary(m), PathStep::Key(k)) =>
let i = s.value().as_usize()?; m.get(k),
if i < r.arity() { Some(&r.fields()[i]) } else { None } _ =>
} None,
Value::Dictionary(m) => m.get(s),
_ => None,
} }
} }
@ -413,14 +423,11 @@ impl Continuation {
) { ) {
let cached_assertions = &self.cached_assertions; let cached_assertions = &self.cached_assertions;
let const_val_map = let const_val_map =
self.leaf_map.entry(analysis.const_positions.clone()).or_insert_with({ self.leaf_map.entry(analysis.const_paths.clone()).or_insert_with({
|| { || {
let mut cvm = Map::new(); let mut cvm = Map::new();
for a in cached_assertions { for a in cached_assertions {
if project_paths(a, &analysis.const_positions.required_to_exist).is_none() { if let Some(key) = project_paths(a, &analysis.const_paths) {
continue;
}
if let Some(key) = project_paths(a, &analysis.const_positions.with_values) {
cvm.entry(key).or_insert_with(Leaf::new) cvm.entry(key).or_insert_with(Leaf::new)
.cached_assertions.insert(a.clone()); .cached_assertions.insert(a.clone());
} }
@ -455,7 +462,7 @@ impl Continuation {
observer: &Arc<Cap>, observer: &Arc<Cap>,
) { ) {
if let Entry::Occupied(mut const_val_map_entry) if let Entry::Occupied(mut const_val_map_entry)
= self.leaf_map.entry(analysis.const_positions) = self.leaf_map.entry(analysis.const_paths)
{ {
let const_val_map = const_val_map_entry.get_mut(); let const_val_map = const_val_map_entry.get_mut();
if let Entry::Occupied(mut leaf_entry) if let Entry::Occupied(mut leaf_entry)

View File

@ -2,7 +2,6 @@ use blake2::Blake2s256;
use getrandom::getrandom; use getrandom::getrandom;
use hmac::{SimpleHmac, Mac}; use hmac::{SimpleHmac, Mac};
use preserves::error::io_syntax_error;
use preserves::hex::HexParser; use preserves::hex::HexParser;
use preserves::hex::HexFormatter; use preserves::hex::HexFormatter;
use preserves::value::NestedValue; use preserves::value::NestedValue;
@ -22,7 +21,6 @@ pub use super::schemas::sturdy::*;
pub enum ValidationError { pub enum ValidationError {
SignatureError, SignatureError,
AttenuationError(CaveatError), AttenuationError(CaveatError),
BadCaveatsField,
} }
impl std::fmt::Display for ValidationError { impl std::fmt::Display for ValidationError {
@ -32,18 +30,10 @@ impl std::fmt::Display for ValidationError {
write!(f, "Invalid SturdyRef signature"), write!(f, "Invalid SturdyRef signature"),
ValidationError::AttenuationError(e) => ValidationError::AttenuationError(e) =>
write!(f, "Invalid SturdyRef attenuation: {:?}", e), write!(f, "Invalid SturdyRef attenuation: {:?}", e),
ValidationError::BadCaveatsField =>
write!(f, "Invalid caveats field in SturdyRef parameters"),
} }
} }
} }
impl From<ValidationError> for io::Error {
fn from(v: ValidationError) -> Self {
io_syntax_error(&v.to_string())
}
}
impl std::error::Error for ValidationError {} impl std::error::Error for ValidationError {}
const KEY_LENGTH: usize = 16; // bytes; 128 bits const KEY_LENGTH: usize = 16; // bytes; 128 bits
@ -81,21 +71,7 @@ pub fn decode<N: NestedValue>(bs: &[u8]) -> io::Result<N> {
impl SturdyRef { impl SturdyRef {
pub fn mint(oid: _Any, key: &[u8]) -> Self { pub fn mint(oid: _Any, key: &[u8]) -> Self {
let sig = signature(key, &encode(&oid)); let sig = signature(key, &encode(&oid));
SturdyRef::from_parts(oid, vec![], sig) SturdyRef { oid, caveat_chain: Vec::new(), sig }
}
pub fn from_parts(oid: _Any, caveats: Vec<Caveat>, sig: Vec<u8>) -> Self {
SturdyRef {
parameters: Parameters {
oid,
sig,
caveats: if caveats.is_empty() {
CaveatsField::Absent
} else {
CaveatsField::Present { caveats }
}
}
}
} }
pub fn from_hex(s: &str) -> Result<Self, Error> { pub fn from_hex(s: &str) -> Result<Self, Error> {
@ -107,14 +83,6 @@ impl SturdyRef {
HexFormatter::Packed.encode(&encode(&language().unparse(self))) HexFormatter::Packed.encode(&encode(&language().unparse(self)))
} }
pub fn caveat_chain(&self) -> Result<&[Caveat], ValidationError> {
match &self.parameters.caveats {
CaveatsField::Absent => Ok(&[]),
CaveatsField::Invalid { .. } => Err(ValidationError::BadCaveatsField),
CaveatsField::Present { caveats } => Ok(caveats),
}
}
pub fn validate_and_attenuate( pub fn validate_and_attenuate(
&self, &self,
key: &[u8], key: &[u8],
@ -122,15 +90,14 @@ impl SturdyRef {
) -> Result<_Ptr, ValidationError> { ) -> Result<_Ptr, ValidationError> {
self.validate(key).map_err(|_| ValidationError::SignatureError)?; self.validate(key).map_err(|_| ValidationError::SignatureError)?;
let target = unattenuated_target let target = unattenuated_target
.attenuate(self.caveat_chain()?) .attenuate(&self.caveat_chain)
.map_err(ValidationError::AttenuationError)?; .map_err(ValidationError::AttenuationError)?;
Ok(target) Ok(target)
} }
pub fn validate(&self, key: &[u8]) -> Result<(), ()> { pub fn validate(&self, key: &[u8]) -> Result<(), ()> {
let SturdyRef { parameters: Parameters { oid, sig, .. } } = self; let SturdyRef { oid, caveat_chain, sig } = self;
let key = chain_signature(&signature(&key, &encode(oid)), let key = chain_signature(&signature(&key, &encode(oid)), caveat_chain);
self.caveat_chain().map_err(|_| ())?);
if &key == sig { if &key == sig {
Ok(()) Ok(())
} else { } else {
@ -138,13 +105,13 @@ impl SturdyRef {
} }
} }
pub fn attenuate(&self, attenuation: &[Caveat]) -> Result<Self, ValidationError> { pub fn attenuate(&self, attenuation: &[Caveat]) -> Result<Self, CaveatError> {
Caveat::validate_many(attenuation).map_err(ValidationError::AttenuationError)?; Caveat::validate_many(attenuation)?;
let SturdyRef { parameters: Parameters { oid, sig, .. } } = self; let SturdyRef { oid, caveat_chain, sig } = self;
let oid = oid.clone(); let oid = oid.clone();
let mut caveat_chain = self.caveat_chain()?.to_vec(); let mut caveat_chain = caveat_chain.clone();
caveat_chain.extend(attenuation.iter().cloned()); caveat_chain.extend(attenuation.iter().cloned());
let sig = chain_signature(&sig, attenuation); let sig = chain_signature(&sig, attenuation);
Ok(SturdyRef::from_parts(oid, caveat_chain, sig)) Ok(SturdyRef { oid, caveat_chain, sig })
} }
} }

Some files were not shown because too many files have changed in this diff Show More