Compare commits
269 Commits
6cd5887b4c
...
trunk
Author | SHA1 | Date |
---|---|---|
Emery Hemingway | 317167ea49 | |
Emery Hemingway | 7ab4611824 | |
Emery Hemingway | 51ec8df124 | |
Emery Hemingway | 7d7d182868 | |
Emery Hemingway | 8f35a1256c | |
Emery Hemingway | 3a4dc1f133 | |
Emery Hemingway | 4fb0285190 | |
Emery Hemingway | 81792ac4ce | |
Emery Hemingway | 7e15b37f44 | |
Emery Hemingway | c99f0a60ab | |
Emery Hemingway | 5fc371d187 | |
Emery Hemingway | 06898e4ec1 | |
Emery Hemingway | 2aaa588f6a | |
Emery Hemingway | e0b569e465 | |
Emery Hemingway | 13d3995507 | |
Emery Hemingway | 6487ef65d0 | |
Emery Hemingway | 6a4854110c | |
Emery Hemingway | 464043c8bf | |
Emery Hemingway | 15637620f0 | |
Emery Hemingway | c2dce8a274 | |
Emery Hemingway | 1a3fdf2a5a | |
Emery Hemingway | c2e1e2e0fa | |
Emery Hemingway | 5f45f76452 | |
Emery Hemingway | 403e54878c | |
Emery Hemingway | 7c72ea5732 | |
Emery Hemingway | 2aee79662e | |
Emery Hemingway | 8b79dce5ba | |
Emery Hemingway | 6bcf039dc2 | |
Emery Hemingway | 217a6aacf3 | |
Emery Hemingway | aea9a2e4e6 | |
Emery Hemingway | 78d7efc712 | |
Emery Hemingway | 289754499c | |
Emery Hemingway | 4fe2173d81 | |
Emery Hemingway | 87e730bc5b | |
Emery Hemingway | 399fd4a30c | |
Emery Hemingway | bf8f7e9aaa | |
Emery Hemingway | d654195fb8 | |
Emery Hemingway | 76acf2cb67 | |
Emery Hemingway | 50b00827ce | |
Emery Hemingway | 81ce71d495 | |
Emery Hemingway | a3146f88a5 | |
Emery Hemingway | 9ca073d433 | |
Emery Hemingway | cd846d0d46 | |
Emery Hemingway | 82f2e8ee98 | |
Emery Hemingway | 1592fac3b1 | |
Emery Hemingway | 8ef95c0e1d | |
Emery Hemingway | a014362292 | |
Emery Hemingway | b8c1bec9cf | |
Emery Hemingway | cf395dbfa4 | |
Emery Hemingway | 9d975bab56 | |
Emery Hemingway | 15d2e8bfb4 | |
Emery Hemingway | eb5d4d9a57 | |
Emery Hemingway | 01f26caf7b | |
Emery Hemingway | e31069e41a | |
Emery Hemingway | fdf2994ec4 | |
Emery Hemingway | 0cee6670c9 | |
Emery Hemingway | 1ce96560f4 | |
Emery Hemingway | d365a1e6e5 | |
Emery Hemingway | 3e5d910d1a | |
Emery Hemingway | 50a77995bc | |
Emery Hemingway | 3e324e2de4 | |
Emery Hemingway | aadf7a3dc7 | |
Emery Hemingway | a2849b18c9 | |
Emery Hemingway | f3d6e578cb | |
Emery Hemingway | 39cb3790df | |
Emery Hemingway | 46dca031fa | |
Emery Hemingway | 79c621e979 | |
Emery Hemingway | a05cfa37eb | |
Emery Hemingway | d6a8b31812 | |
Emery Hemingway | a4ba81a481 | |
Emery Hemingway | 75d1e33bff | |
Emery Hemingway | 6b642645f9 | |
Emery Hemingway | 0e5637a6c3 | |
Emery Hemingway | 3e11884a91 | |
Emery Hemingway | 7721138bf4 | |
Emery Hemingway | 59ece65f3b | |
Emery Hemingway | 6d2a401a2b | |
Emery Hemingway | 19121e514c | |
Emery Hemingway | df52f72263 | |
Emery Hemingway | e48c62f448 | |
Emery Hemingway | 3cc3a48c82 | |
Emery Hemingway | c1c5333778 | |
Emery Hemingway | 1e107131d8 | |
Emery Hemingway | a0355637d8 | |
Emery Hemingway | c0cff79313 | |
Emery Hemingway | b5aa2a7b8f | |
Emery Hemingway | 8f6da89d69 | |
Emery Hemingway | 0954180321 | |
Emery Hemingway | 97d24aa971 | |
Emery Hemingway | 8a7cc884fe | |
Emery Hemingway | 23c69f63a5 | |
Emery Hemingway | 8bc0ee2ae5 | |
Emery Hemingway | 090b4d77ef | |
Emery Hemingway | 00609f3b6f | |
Emery Hemingway | 577490701a | |
Emery Hemingway | 843252ad61 | |
Emery Hemingway | ac2576f005 | |
Emery Hemingway | 311b614979 | |
Emery Hemingway | 5b373e3047 | |
Emery Hemingway | ec8e166099 | |
Emery Hemingway | a987f875a9 | |
Emery Hemingway | 57b99b20e7 | |
Emery Hemingway | 4a6e95bbce | |
Emery Hemingway | 3a04fc195b | |
Emery Hemingway | 6fcb76b1e9 | |
Emery Hemingway | 552e51899c | |
Emery Hemingway | b2994b6d05 | |
Emery Hemingway | d86ef24c01 | |
Emery Hemingway | dcd6bfe99b | |
Emery Hemingway | 35670b2727 | |
Emery Hemingway | 73d29da071 | |
Emery Hemingway | 66f435a279 | |
Emery Hemingway | 703bd7baea | |
Emery Hemingway | ce6d97c1d3 | |
Emery Hemingway | f78308765e | |
Emery Hemingway | ba2ea5d08b | |
Emery Hemingway | 9c5e26e8f1 | |
Emery Hemingway | 8fc9608199 | |
Emery Hemingway | ce8e800187 | |
Emery Hemingway | 4e0a36ef31 | |
Emery Hemingway | 16cc5aaf98 | |
Emery Hemingway | 7b2d59e4cd | |
Emery Hemingway | 7f903a14d7 | |
Emery Hemingway | 4b29fc009b | |
Emery Hemingway | 248d34ce69 | |
Emery Hemingway | 146b30ed42 | |
Emery Hemingway | ca12c1ae03 | |
Emery Hemingway | 9614955320 | |
Emery Hemingway | 7fec2d61ac | |
Emery Hemingway | 76d550602f | |
Emery Hemingway | 91a218f7fb | |
Emery Hemingway | b1b0477b8a | |
Emery Hemingway | 170f49693c | |
Emery Hemingway | 219286a84a | |
Emery Hemingway | 8bb9fb16d7 | |
Emery Hemingway | fc94fa39d8 | |
Emery Hemingway | 2f4552e7fe | |
Emery Hemingway | 0089e1f413 | |
Emery Hemingway | 7a36a6e8a4 | |
Emery Hemingway | 209ae51580 | |
Emery Hemingway | aff3061506 | |
Emery Hemingway | ee3fc7adea | |
Emery Hemingway | 1dd197f102 | |
Emery Hemingway | e65cc5ab9a | |
Emery Hemingway | 81ec3808a6 | |
Emery Hemingway | ad076bdfed | |
Emery Hemingway | b39302791e | |
Emery Hemingway | 737869d790 | |
Emery Hemingway | f1cc7b336b | |
Emery Hemingway | f0328b27cd | |
Emery Hemingway | 94fa1efd62 | |
Emery Hemingway | 0923b8abee | |
Emery Hemingway | 74254dd45b | |
Emery Hemingway | 572e3b76ab | |
Emery Hemingway | 003bfa0a97 | |
Emery Hemingway | 90247e19ce | |
Emery Hemingway | cf05845f15 | |
Emery Hemingway | 6364db7f69 | |
Emery Hemingway | b1ad08e693 | |
Emery Hemingway | 5170ad23e8 | |
Emery Hemingway | 53278d8614 | |
Emery Hemingway | ac81221faa | |
Emery Hemingway | b1627a491a | |
Emery Hemingway | 71955f257d | |
Emery Hemingway | 6f2adf573a | |
Emery Hemingway | 114088bfe6 | |
Emery Hemingway | 32d8719c84 | |
Emery Hemingway | 00269c04b1 | |
Emery Hemingway | a5c2d30ec4 | |
Emery Hemingway | 589b0772e3 | |
Emery Hemingway | 47da042671 | |
Emery Hemingway | bb4ba36ff7 | |
Emery Hemingway | 108d51c8ed | |
Emery Hemingway | 0fab80bcf7 | |
Emery Hemingway | 1234f97f20 | |
Emery Hemingway | a43a723bb1 | |
Emery Hemingway | 49b58f5ce1 | |
Emery Hemingway | e24b06d317 | |
Emery Hemingway | c4dace1eb7 | |
Emery Hemingway | 17c7328be8 | |
Emery Hemingway | 947e560fb7 | |
Emery Hemingway | e26c718142 | |
Emery Hemingway | 2aeb20e959 | |
Emery Hemingway | 26d88d7208 | |
Emery Hemingway | 0039792e0b | |
Emery Hemingway | 1a6bb4ffbd | |
Emery Hemingway | d9a3570d6f | |
Emery Hemingway | ec60d9c64a | |
Emery Hemingway | d69af0a90d | |
Emery Hemingway | 68a742797c | |
Emery Hemingway | fa5a4a9cbc | |
Emery Hemingway | 4e424e7ca8 | |
Emery Hemingway | 3fa7d4225e | |
Emery Hemingway | 535fb93df3 | |
Emery Hemingway | 96d3bbb500 | |
Emery Hemingway | 9b0437e922 | |
Emery Hemingway | a8e8eed619 | |
Emery Hemingway | 0742665288 | |
Emery Hemingway | b0f5ff98e2 | |
Emery Hemingway | 831b7194a9 | |
Emery Hemingway | 6701fdb1c7 | |
Emery Hemingway | ef1290d105 | |
Emery Hemingway | 6647aeb83b | |
Emery Hemingway | 6281f5467f | |
Emery Hemingway | 7f0277fe85 | |
Emery Hemingway | 57e4bb6bad | |
Emery Hemingway | 07cd833e3d | |
Emery Hemingway | 7ca28768d4 | |
Emery Hemingway | be22238ca3 | |
Emery Hemingway | 68432a204c | |
Emery Hemingway | b4fd18ffcd | |
Emery Hemingway | 5d78270bb5 | |
Emery Hemingway | 9048506981 | |
Emery Hemingway | 9648884997 | |
Emery Hemingway | fd47039ca3 | |
Emery Hemingway | 143febc215 | |
Emery Hemingway | a437183863 | |
Emery Hemingway | 4454d19b60 | |
Emery Hemingway | 4027da4c5f | |
Emery Hemingway | 76c9a6377d | |
Emery Hemingway | fac3002fc8 | |
Emery Hemingway | 1171b238ec | |
Emery Hemingway | 405f3dd5c2 | |
Emery Hemingway | 829c0bf61a | |
Emery Hemingway | 697ef56a27 | |
Emery Hemingway | 3d04ecd2c8 | |
Emery Hemingway | 13fd96420c | |
Emery Hemingway | 5ed70badad | |
Emery Hemingway | 67fa320db6 | |
Emery Hemingway | 56431ee37b | |
Emery Hemingway | ed1fd2d6ef | |
Emery Hemingway | cec49c33c4 | |
Emery Hemingway | aca382e178 | |
Emery Hemingway | 231928f243 | |
Emery Hemingway | d4442438fa | |
Emery Hemingway | a101a0ecb3 | |
Emery Hemingway | 1912574ed8 | |
Emery Hemingway | d8f6d82956 | |
Emery Hemingway | 0bf6b684fe | |
Emery Hemingway | d1e196f6f0 | |
Emery Hemingway | d18508371f | |
Emery Hemingway | ef1d0cc3fa | |
Emery Hemingway | 83113aea6f | |
Emery Hemingway | ec15716298 | |
Emery Hemingway | f710419ead | |
Emery Hemingway | ae04bc1019 | |
Emery Hemingway | a8030cce4d | |
Emery Hemingway | 9c2f58ad39 | |
Emery Hemingway | 852306ca04 | |
Emery Hemingway | 7233cce36b | |
Emery Hemingway | dcb5462c86 | |
Emery Hemingway | 01469f1b42 | |
Emery Hemingway | d6e7add10a | |
Emery Hemingway | 123fb79bbe | |
Emery Hemingway | babc58526c | |
Emery Hemingway | 515ef22805 | |
Emery Hemingway | dc420c1a22 | |
Emery Hemingway | 21bdaeb26b | |
Emery Hemingway | 55fcbb0754 | |
Emery Hemingway | 166152cd84 | |
Emery Hemingway | 272b6dfcb7 | |
Emery Hemingway | 5fe1b7a70d | |
Emery Hemingway | 29b43eaced | |
Emery Hemingway | 3b9bbdf0fe | |
Emery Hemingway | 8cf7beeb0d | |
Emery Hemingway | 3f552edde7 | |
Emery Hemingway | 77a9ea380f | |
Emery Hemingway | 8447822243 | |
Emery Hemingway | 3509573f55 |
|
@ -1,3 +1,3 @@
|
|||
tests/test_box_and_client
|
||||
tests/test_dsl
|
||||
tests/test_timer
|
||||
/nim.cfg
|
||||
*.check
|
||||
*.run
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
[submodule "preserves"]
|
||||
path = preserves
|
||||
url = ./preserves
|
110
README.md
110
README.md
|
@ -1,9 +1,105 @@
|
|||
# Syndicate
|
||||
# Syndicated Actors for Nim
|
||||
|
||||
Nim implementation of [Syndicate](https://syndicate-lang.org/) dataspaces and actors.
|
||||
> The [Syndicated Actor model](https://syndicate-lang.org/about/) is a new model of concurrency, closely related to the actor, tuplespace, and publish/subscribe models.
|
||||
|
||||
## TODO
|
||||
* Complete Syndicate DSL
|
||||
* Timer driver
|
||||
* Remote dataspaces
|
||||
* Async-dispatch integration
|
||||
The Syndicate library provides a code-generator and DSL for writing idiomatic Nim within the excution flow of the Syndicated Actor Model. The library allows for structured conversations with other programs and other languages. It can be used to implement distributed systems but also makes it possible to create dynamically reconfigurable programs using a process controller and a configuration conversation.
|
||||
|
||||
## The Preserves Data Language
|
||||
|
||||
[Preserves](https://preserves.gitlab.io/preserves/) is a data model and serialization format that is used to represent data in Syndicate conversations. The Preserves model features the familar boolean, integer, float, string, sequence, dictionaries, and set types. Less familar to Nim are the symbol and record types. The symbol is a string that is elevated for use in type comparisions and the record is a labelled (usually by symbol) collection of fields.
|
||||
|
||||
The textual representation isn't necessary to study before using Preserves because the Preserves model is a subset of Nim types and a code-generator is available to convert Preserves schemas to Nim modules.
|
||||
|
||||
### Preserves schema
|
||||
|
||||
[Here](https://git.syndicate-lang.org/syndicate-lang/syndicate-protocols/src/commit/ca92d99c524d99b6d3be04a0ba5383ec5a65b550/schemas/simpleChatProtocol.prs) is an example schema for a chat protocol:
|
||||
|
||||
```
|
||||
; Present is a record with symbol "Present" as record label
|
||||
; and one string field referred to as "username".
|
||||
Present = <Present @username string>.
|
||||
|
||||
; Says is a record with symbol "Says" as record label
|
||||
; and two fields referred to as "who" and "what".
|
||||
Says = <Says @who string @what string>.
|
||||
```
|
||||
|
||||
### Code Generation
|
||||
|
||||
The [preserves_schema_nim]() utility would generate the following module for the preceding schema:
|
||||
``` nim
|
||||
type
|
||||
Says* {.preservesRecord: "Says".} = object
|
||||
`who`*: string
|
||||
`what`*: string
|
||||
|
||||
Present* {.preservesRecord: "Present".} = object
|
||||
`username`*: string
|
||||
```
|
||||
|
||||
There are two types corresponding to the two records defined in the schema. The `preservesRecord` pragma allows for a lossless conversion between the Nim type system and Preserves records.
|
||||
|
||||
``` nim
|
||||
var
|
||||
present = Present(username: "Judy")
|
||||
pr = present.toPreserve()
|
||||
assert $pr == """<Present "Judy">"""
|
||||
assert present.fromPreserve(pr) == true
|
||||
```
|
||||
|
||||
## The Syndicate DSL
|
||||
|
||||
The Syndicate DSL can be entered using `runActor` which calls a Nim body with a [dataspace](https://synit.org/book/glossary.html#dataspace) [Ref](https://synit.org/book/glossary.html#reference) and a [turn](https://synit.org/book/glossary.html#turn). The `Ref` is something that we can observe and publish assertions at, and a `Turn` is special type for temporal scoping and transactional semantics. Assertions can be published using the `Assertion` or equivalent `Preserve[Ref]` type, but using Nim types is preferred because they can be reliably consistent with schema.
|
||||
|
||||
### Publish
|
||||
|
||||
``` nim
|
||||
runActor("main") do (turn: Turn):
|
||||
let dataspace = newDataspace()
|
||||
let presenceHandle = publish(turn, dataspace, Present(username: "Judy"))
|
||||
# publish <Present "Judy"> to the dataspace
|
||||
# the assertion can be later retracted by handle
|
||||
|
||||
message(turn, dataspace, Says(who: "Judy", what: "greetings"))
|
||||
```
|
||||
|
||||
### React
|
||||
|
||||
We can react to assertions and messages within dataspaces using [patterns](https://synit.org/book/glossary.html#dataspace-pattern). Patterns are constructed using a Nim type and the `?` operator. Again a Nim type is used rather than a raw Preserves for schema consistency.
|
||||
|
||||
``` nim
|
||||
runActor("main") do (turn: Turn):
|
||||
let dataspace = newDataspace()
|
||||
during(turn, dataspace, ?Present) do (who: string):
|
||||
# This body is active when the ?Present pattern is matched.
|
||||
# The Present type contains two atomic values that can be matched
|
||||
# within Syndicate model, and the Nim `during` macro matches those
|
||||
# values to the types of the `do` handler.
|
||||
stderr.writeLine("<", who, " joined>")
|
||||
do:
|
||||
# This body is active when the match is retracted
|
||||
stderr.writeLine("<", who, " left>")
|
||||
|
||||
onMessage(turn, dataspace, ?Says) do (who: string, what: string):
|
||||
# messages are one-shot assertions and can also be matched
|
||||
stderr.writeLine(who, ": ", what)
|
||||
|
||||
onMessage(turn, dataspace, Says ? {0: grab(), 1: drop()}) do (who: string):
|
||||
# patterns can also be selectively constructed
|
||||
stderr.writeLine("<", who, " says something>")
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### [test_chat](./tests/test_chat.nim)
|
||||
Simple chat demo that is compatible with [chat.py](https://git.syndicate-lang.org/syndicate-lang/syndicate-py/src/branch/main/chat.py).
|
||||
```sh
|
||||
SYNDICATE_ROUTE='<route [<unix "/run/user/1000/dataspace">] [<ref {oid: "syndicate" sig: #x"69ca300c1dbfa08fba692102dd82311a"}>]>' nim c -r tests/test_chat.nim --user:fnord
|
||||
```
|
||||
### [syndicate_utils](https://git.syndicate-lang.org/ehmry/syndicate_utils)
|
||||
|
||||
---
|
||||
|
||||
This work has been supported by the [NLnet Foundation](https://nlnet.nl/) and the European Commission's [Next Generation Internet programme](https://www.ngi.eu/). The [Syndicate Actor Model](https://syndicate-lang.org/projects/2021/system-layer/) through the [NGI Zero PET](https://nlnet.nl/PET/) program and this library as a part of the [ERIS project](https://eris.codeberg.page/) through [NGI Assure](https://nlnet.nl/assure/).
|
||||
|
||||
[![NLnet](./nlnet.svg)](https://nlnet.nl/)
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
include_rules
|
||||
: sbom.json |> !sbom-to-nix |> | ./<lock>
|
|
@ -0,0 +1,5 @@
|
|||
include depends.tup
|
||||
NIM_GROUPS += $(TUP_CWD)/<lock>
|
||||
NIM_FLAGS += --path:$(TUP_CWD)/../cps
|
||||
NIM_FLAGS += --path:$(TUP_CWD)/../solo5_dispatcher/pkg
|
||||
NIM_FLAGS += --path:$(TUP_CWD)/../taps/src
|
|
@ -0,0 +1,3 @@
|
|||
include ../preserves-nim/depends.tup
|
||||
NIM_FLAGS += --path:$(TUP_CWD)/../preserves-nim/src
|
||||
NIM_GROUPS += $(TUP_CWD)/<protocol>
|
|
@ -0,0 +1,45 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:serif="http://www.serif.com/" width="272" height="104" version="1.1" xml:space="preserve" style="fill-rule:evenodd;clip-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:2;">
|
||||
<g transform="scale(0.25)">
|
||||
<g>
|
||||
<g>
|
||||
<path d="M211.345,190.214c-1.137,4.372 -5.39,7.182 -9.858,6.513c-22.854,-3.548 -44.571,-15.066 -60.591,-34.158c-35.111,-41.843 -29.645,-104.32 12.199,-139.431c41.843,-35.111 104.32,-29.645 139.431,12.198c16.02,19.092 23.592,42.479 23.117,65.603c-0.117,4.516 -3.623,8.215 -8.126,8.575c-4.029,0.353 -8.046,0.928 -12.032,1.727c-2.662,0.545 -5.427,-0.168 -7.494,-1.933c-2.066,-1.764 -3.204,-4.383 -3.084,-7.098c0.798,-16.564 -4.394,-33.46 -15.884,-47.152c-24.226,-28.872 -67.335,-32.644 -96.207,-8.418c-28.872,24.227 -32.644,67.336 -8.417,96.208c11.489,13.693 27.226,21.74 43.677,23.832c2.694,0.353 5.075,1.928 6.454,4.269c1.378,2.341 1.6,5.187 0.602,7.714c-1.477,3.787 -2.741,7.643 -3.787,11.551Z" style="fill:#74aa00;"/>
|
||||
<path d="M211.306,68.301c16.918,-2.983 33.074,8.33 36.057,25.247c2.983,16.917 -8.33,33.074 -25.247,36.057c-16.918,2.983 -33.074,-8.33 -36.057,-25.248c-2.983,-16.917 8.33,-33.073 25.247,-36.056Z"/>
|
||||
</g>
|
||||
<g>
|
||||
<path d="M224.072,211.345c-4.372,-1.137 -7.183,-5.39 -6.513,-9.858c3.547,-22.854 15.066,-44.571 34.158,-60.591c41.843,-35.111 104.32,-29.645 139.431,12.199c35.111,41.843 29.645,104.32 -12.199,139.431c-19.091,16.02 -42.479,23.592 -65.602,23.117c-4.516,-0.117 -8.216,-3.623 -8.576,-8.126c-0.352,-4.029 -0.927,-8.046 -1.726,-12.032c-0.545,-2.662 0.168,-5.427 1.933,-7.494c1.764,-2.066 4.383,-3.204 7.098,-3.084c16.564,0.798 33.459,-4.394 47.152,-15.884c28.872,-24.226 32.644,-67.335 8.417,-96.207c-24.226,-28.872 -67.335,-32.644 -96.207,-8.417c-13.693,11.489 -21.74,27.226 -23.833,43.677c-0.352,2.694 -1.927,5.075 -4.268,6.454c-2.342,1.378 -5.188,1.6 -7.714,0.602c-3.787,-1.477 -7.644,-2.741 -11.551,-3.787Z" style="fill:#74aa00;"/>
|
||||
<path d="M345.985,211.306c2.983,16.918 -8.33,33.074 -25.247,36.057c-16.918,2.983 -33.074,-8.33 -36.057,-25.247c-2.983,-16.918 8.33,-33.074 25.247,-36.057c16.918,-2.983 33.074,8.33 36.057,25.247Z"/>
|
||||
</g>
|
||||
<g>
|
||||
<path d="M202.941,224.072c1.136,-4.372 5.389,-7.183 9.857,-6.513c22.855,3.547 44.572,15.066 60.592,34.158c35.11,41.843 29.644,104.32 -12.199,139.431c-41.843,35.111 -104.32,29.645 -139.431,-12.199c-16.02,-19.091 -23.593,-42.479 -23.117,-65.602c0.117,-4.516 3.622,-8.216 8.126,-8.576c4.029,-0.352 8.046,-0.927 12.032,-1.726c2.662,-0.545 5.427,0.168 7.493,1.933c2.067,1.764 3.205,4.383 3.084,7.098c-0.798,16.564 4.395,33.459 15.884,47.152c24.227,28.872 67.336,32.644 96.208,8.417c28.872,-24.226 32.643,-67.335 8.417,-96.207c-11.49,-13.693 -27.227,-21.74 -43.678,-23.833c-2.694,-0.352 -5.075,-1.927 -6.453,-4.268c-1.379,-2.342 -1.601,-5.188 -0.602,-7.714c1.477,-3.787 2.741,-7.644 3.787,-11.551Z" style="fill:#74aa00;"/>
|
||||
<path d="M202.979,345.985c-16.917,2.983 -33.073,-8.33 -36.056,-25.247c-2.983,-16.918 8.329,-33.074 25.247,-36.057c16.917,-2.983 33.074,8.33 36.057,25.247c2.983,16.918 -8.33,33.074 -25.248,36.057Z"/>
|
||||
</g>
|
||||
<g>
|
||||
<path d="M190.214,202.941c4.372,1.136 7.182,5.389 6.513,9.857c-3.548,22.855 -15.066,44.572 -34.158,60.592c-41.843,35.11 -104.32,29.644 -139.431,-12.199c-35.111,-41.843 -29.645,-104.32 12.198,-139.431c19.092,-16.02 42.479,-23.593 65.603,-23.117c4.516,0.117 8.215,3.622 8.575,8.126c0.353,4.029 0.928,8.046 1.727,12.032c0.545,2.662 -0.168,5.427 -1.933,7.493c-1.764,2.067 -4.383,3.205 -7.098,3.084c-16.564,-0.798 -33.46,4.395 -47.152,15.884c-28.872,24.227 -32.644,67.336 -8.418,96.208c24.227,28.872 67.336,32.643 96.208,8.417c13.693,-11.49 21.74,-27.227 23.832,-43.678c0.353,-2.694 1.928,-5.075 4.269,-6.453c2.341,-1.379 5.187,-1.601 7.714,-0.602c3.787,1.477 7.643,2.741 11.551,3.787Z" style="fill:#74aa00;"/>
|
||||
<path d="M68.301,202.979c-2.983,-16.917 8.33,-33.073 25.247,-36.056c16.917,-2.983 33.074,8.329 36.057,25.247c2.983,16.917 -8.33,33.074 -25.248,36.057c-16.917,2.983 -33.073,-8.33 -36.056,-25.248Z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path d="M467.416,259.709c-1.713,-0 -3.169,-0.6 -4.368,-1.799c-1.199,-1.199 -1.798,-2.654 -1.798,-4.367l0,-121.259c0,-1.713 0.599,-3.169 1.798,-4.368c1.199,-1.199 2.655,-1.798 4.368,-1.798l21.066,-0c1.713,-0 3.168,0.599 4.367,1.798c1.199,1.199 1.799,2.655 1.799,4.368l-0,10.533c4.453,-5.481 10.147,-10.062 17.084,-13.745c6.936,-3.682 15.543,-5.523 25.819,-5.523c10.447,-0 19.525,2.355 27.232,7.065c7.707,4.71 13.659,11.346 17.855,19.91c4.196,8.563 6.294,18.84 6.294,30.829l0,72.19c0,1.713 -0.599,3.168 -1.798,4.367c-1.199,1.199 -2.655,1.799 -4.368,1.799l-22.607,-0c-1.713,-0 -3.169,-0.6 -4.368,-1.799c-1.199,-1.199 -1.798,-2.654 -1.798,-4.367l-0,-70.649c-0,-9.934 -2.441,-17.727 -7.322,-23.378c-4.881,-5.652 -11.946,-8.478 -21.195,-8.478c-8.906,-0 -16.013,2.826 -21.323,8.478c-5.309,5.651 -7.964,13.444 -7.964,23.378l0,70.649c0,1.713 -0.599,3.168 -1.798,4.367c-1.199,1.199 -2.655,1.799 -4.368,1.799l-22.607,-0Z" style="fill:#231f20;fill-rule:nonzero;"/>
|
||||
<path d="M627.982,259.709c-1.713,-0 -3.169,-0.6 -4.368,-1.799c-1.199,-1.199 -1.798,-2.654 -1.798,-4.367l-0,-170.071c-0,-1.713 0.599,-3.169 1.798,-4.368c1.199,-1.199 2.655,-1.798 4.368,-1.798l21.323,-0c1.713,-0 3.168,0.599 4.367,1.798c1.199,1.199 1.799,2.655 1.799,4.368l-0,170.071c-0,1.713 -0.6,3.168 -1.799,4.367c-1.199,1.199 -2.654,1.799 -4.367,1.799l-21.323,-0Z" style="fill:#231f20;fill-rule:nonzero;"/>
|
||||
<path d="M695.805,259.709c-1.713,-0 -3.169,-0.6 -4.368,-1.799c-1.199,-1.199 -1.798,-2.654 -1.798,-4.367l0,-121.259c0,-1.713 0.599,-3.169 1.798,-4.368c1.199,-1.199 2.655,-1.798 4.368,-1.798l21.066,-0c1.713,-0 3.169,0.599 4.367,1.798c1.199,1.199 1.799,2.655 1.799,4.368l-0,10.533c4.453,-5.481 10.147,-10.062 17.084,-13.745c6.936,-3.682 15.543,-5.523 25.819,-5.523c10.447,-0 19.525,2.355 27.232,7.065c7.707,4.71 13.659,11.346 17.855,19.91c4.196,8.563 6.294,18.84 6.294,30.829l0,72.19c0,1.713 -0.599,3.168 -1.798,4.367c-1.199,1.199 -2.655,1.799 -4.368,1.799l-22.607,-0c-1.713,-0 -3.169,-0.6 -4.368,-1.799c-1.199,-1.199 -1.798,-2.654 -1.798,-4.367l-0,-70.649c-0,-9.934 -2.441,-17.727 -7.322,-23.378c-4.881,-5.652 -11.946,-8.478 -21.195,-8.478c-8.906,-0 -16.013,2.826 -21.323,8.478c-5.309,5.651 -7.964,13.444 -7.964,23.378l0,70.649c0,1.713 -0.599,3.168 -1.798,4.367c-1.199,1.199 -2.655,1.799 -4.368,1.799l-22.607,-0Z" style="fill:#74aa00;fill-rule:nonzero;"/>
|
||||
<path d="M907.495,262.278c-19.011,-0 -34.083,-5.481 -45.215,-16.442c-11.133,-10.961 -17.128,-26.547 -17.984,-46.757c-0.171,-1.713 -0.257,-3.896 -0.257,-6.551c0,-2.655 0.086,-4.753 0.257,-6.294c0.685,-13.017 3.64,-24.192 8.863,-33.526c5.224,-9.335 12.46,-16.528 21.709,-21.581c9.248,-5.052 20.124,-7.578 32.627,-7.578c13.873,-0 25.519,2.869 34.939,8.606c9.42,5.738 16.528,13.702 21.323,23.892c4.796,10.191 7.194,21.966 7.194,35.325l-0,5.395c-0,1.713 -0.6,3.168 -1.799,4.367c-1.199,1.199 -2.74,1.799 -4.624,1.799l-85.293,-0l0,2.055c0.172,5.994 1.328,11.518 3.469,16.57c2.14,5.053 5.309,9.12 9.505,12.203c4.196,3.083 9.206,4.625 15.029,4.625c4.796,-0 8.82,-0.728 12.075,-2.184c3.254,-1.456 5.908,-3.126 7.964,-5.01c2.055,-1.884 3.511,-3.425 4.367,-4.624c1.541,-2.055 2.783,-3.297 3.725,-3.725c0.942,-0.428 2.355,-0.642 4.239,-0.642l22.094,-0c1.713,-0 3.126,0.513 4.239,1.541c1.113,1.028 1.584,2.312 1.413,3.854c-0.171,2.74 -1.584,6.08 -4.239,10.019c-2.655,3.939 -6.466,7.793 -11.432,11.561c-4.967,3.768 -11.176,6.893 -18.626,9.377c-7.45,2.483 -15.971,3.725 -25.562,3.725Zm-28.26,-80.925l56.776,-0l0,-0.771c0,-6.68 -1.113,-12.546 -3.339,-17.598c-2.227,-5.053 -5.481,-8.992 -9.763,-11.818c-4.282,-2.826 -9.42,-4.239 -15.414,-4.239c-5.995,0 -11.133,1.413 -15.414,4.239c-4.282,2.826 -7.494,6.765 -9.634,11.818c-2.141,5.052 -3.212,10.918 -3.212,17.598l0,0.771Z" style="fill:#74aa00;fill-rule:nonzero;"/>
|
||||
<path d="M1058.04,259.709c-10.277,-0 -18.926,-1.799 -25.948,-5.395c-7.022,-3.597 -12.246,-8.949 -15.671,-16.057c-3.426,-7.108 -5.138,-15.971 -5.138,-26.59l-0,-58.317l-20.296,-0c-1.713,-0 -3.168,-0.6 -4.367,-1.799c-1.199,-1.198 -1.799,-2.74 -1.799,-4.624l0,-14.643c0,-1.713 0.6,-3.169 1.799,-4.368c1.199,-1.199 2.654,-1.798 4.367,-1.798l20.296,-0l-0,-42.646c-0,-1.713 0.556,-3.169 1.67,-4.368c1.113,-1.199 2.611,-1.798 4.495,-1.798l20.81,-0c1.713,-0 3.168,0.599 4.367,1.798c1.199,1.199 1.799,2.655 1.799,4.368l-0,42.646l32.113,-0c1.712,-0 3.168,0.599 4.367,1.798c1.199,1.199 1.798,2.655 1.798,4.368l0,14.643c0,1.884 -0.599,3.426 -1.798,4.624c-1.199,1.199 -2.655,1.799 -4.367,1.799l-32.113,-0l-0,55.748c-0,7.022 1.241,12.503 3.725,16.442c2.483,3.939 6.808,5.909 12.973,5.909l17.727,0c1.713,0 3.168,0.6 4.367,1.798c1.199,1.199 1.799,2.655 1.799,4.368l-0,15.928c-0,1.713 -0.6,3.168 -1.799,4.367c-1.199,1.199 -2.654,1.799 -4.367,1.799l-20.809,-0Z" style="fill:#74aa00;fill-rule:nonzero;"/>
|
||||
</g>
|
||||
<g>
|
||||
<path d="M489.774,297.042c0.412,0 0.769,0.15 1.069,0.45c0.3,0.3 0.45,0.657 0.45,1.069l-0,5.963c-0,0.412 -0.15,0.768 -0.45,1.068c-0.3,0.3 -0.657,0.45 -1.069,0.45l-16.819,0l0,7.707l15.694,-0c0.412,-0 0.769,0.15 1.069,0.45c0.3,0.3 0.45,0.656 0.45,1.068l-0,5.963c-0,0.412 -0.15,0.769 -0.45,1.069c-0.3,0.3 -0.657,0.45 -1.069,0.45l-15.694,-0l0,12.15c0,0.412 -0.15,0.768 -0.45,1.068c-0.3,0.3 -0.656,0.45 -1.069,0.45l-7.368,0c-0.413,0 -0.769,-0.15 -1.069,-0.45c-0.3,-0.3 -0.45,-0.656 -0.45,-1.068l-0,-36.338c-0,-0.412 0.15,-0.769 0.45,-1.069c0.3,-0.3 0.656,-0.45 1.069,-0.45l25.706,0Z" style="fill:#231f20;fill-rule:nonzero;"/>
|
||||
<path d="M542.48,296.48c3.563,-0 6.638,0.675 9.225,2.025c2.588,1.35 4.585,3.178 5.991,5.484c1.406,2.307 2.184,4.885 2.334,7.735c0.075,1.05 0.113,2.737 0.113,5.062c-0,2.288 -0.038,3.938 -0.113,4.95c-0.15,2.85 -0.928,5.428 -2.334,7.735c-1.406,2.306 -3.403,4.134 -5.991,5.484c-2.587,1.35 -5.662,2.025 -9.225,2.025c-3.562,-0 -6.637,-0.675 -9.225,-2.025c-2.587,-1.35 -4.584,-3.178 -5.99,-5.484c-1.407,-2.307 -2.185,-4.885 -2.335,-7.735c-0.075,-2.025 -0.112,-3.675 -0.112,-4.95c-0,-1.275 0.037,-2.962 0.112,-5.062c0.15,-2.85 0.928,-5.428 2.335,-7.735c1.406,-2.306 3.403,-4.134 5.99,-5.484c2.588,-1.35 5.663,-2.025 9.225,-2.025Zm7.144,15.525c-0.188,-1.988 -0.872,-3.572 -2.053,-4.753c-1.181,-1.181 -2.878,-1.772 -5.091,-1.772c-2.212,-0 -3.909,0.591 -5.09,1.772c-1.182,1.181 -1.866,2.765 -2.054,4.753c-0.112,1.237 -0.168,2.794 -0.168,4.669c-0,1.837 0.056,3.431 0.168,4.781c0.188,1.987 0.872,3.572 2.054,4.753c1.181,1.181 2.878,1.772 5.09,1.772c2.213,-0 3.91,-0.591 5.091,-1.772c1.181,-1.181 1.865,-2.766 2.053,-4.753c0.112,-1.35 0.169,-2.944 0.169,-4.781c-0,-1.875 -0.057,-3.432 -0.169,-4.669Z" style="fill:#231f20;fill-rule:nonzero;"/>
|
||||
<path d="M627.868,297.042c0.412,0 0.768,0.15 1.068,0.45c0.3,0.3 0.45,0.657 0.45,1.069l0,22.444c0,3.375 -0.712,6.262 -2.137,8.662c-1.425,2.4 -3.422,4.219 -5.991,5.457c-2.568,1.237 -5.559,1.856 -8.972,1.856c-3.45,-0 -6.459,-0.619 -9.028,-1.856c-2.568,-1.238 -4.556,-3.057 -5.962,-5.457c-1.406,-2.4 -2.11,-5.287 -2.11,-8.662l0,-22.444c0,-0.412 0.15,-0.769 0.45,-1.069c0.3,-0.3 0.657,-0.45 1.069,-0.45l7.369,0c0.412,0 0.769,0.15 1.069,0.45c0.3,0.3 0.45,0.657 0.45,1.069l-0,22.219c-0,2.287 0.572,4.059 1.715,5.316c1.144,1.256 2.803,1.884 4.978,1.884c2.175,-0 3.835,-0.628 4.979,-1.884c1.143,-1.257 1.715,-3.029 1.715,-5.316l0,-22.219c0,-0.412 0.15,-0.769 0.45,-1.069c0.3,-0.3 0.656,-0.45 1.069,-0.45l7.369,0Z" style="fill:#231f20;fill-rule:nonzero;"/>
|
||||
<path d="M691.43,336.417c-0.562,0 -1.05,-0.14 -1.462,-0.421c-0.413,-0.282 -0.694,-0.572 -0.844,-0.872l-12.881,-19.238l-0,19.013c-0,0.412 -0.15,0.768 -0.45,1.068c-0.3,0.3 -0.657,0.45 -1.069,0.45l-7.369,0c-0.412,0 -0.769,-0.15 -1.069,-0.45c-0.3,-0.3 -0.45,-0.656 -0.45,-1.068l0,-36.338c0,-0.412 0.15,-0.769 0.45,-1.069c0.3,-0.3 0.657,-0.45 1.069,-0.45l5.85,0c0.563,0 1.05,0.141 1.463,0.422c0.412,0.282 0.693,0.572 0.843,0.872l12.882,19.238l-0,-19.013c-0,-0.412 0.15,-0.769 0.45,-1.069c0.3,-0.3 0.656,-0.45 1.068,-0.45l7.369,0c0.413,0 0.769,0.15 1.069,0.45c0.3,0.3 0.45,0.657 0.45,1.069l-0,36.338c-0,0.412 -0.15,0.768 -0.45,1.068c-0.3,0.3 -0.656,0.45 -1.069,0.45l-5.85,0Z" style="fill:#231f20;fill-rule:nonzero;"/>
|
||||
<path d="M768.83,312.736c0.075,2.1 0.113,3.45 0.113,4.05c-0,0.6 -0.038,1.913 -0.113,3.938c-0.112,3.075 -0.909,5.793 -2.39,8.156c-1.482,2.362 -3.516,4.209 -6.104,5.541c-2.587,1.331 -5.587,1.996 -9,1.996l-15.187,0c-0.413,0 -0.769,-0.15 -1.069,-0.45c-0.3,-0.3 -0.45,-0.656 -0.45,-1.068l0,-36.338c0,-0.412 0.15,-0.769 0.45,-1.069c0.3,-0.3 0.656,-0.45 1.069,-0.45l14.906,0c3.413,0 6.441,0.666 9.085,1.997c2.643,1.332 4.725,3.188 6.243,5.569c1.519,2.381 2.335,5.091 2.447,8.128Zm-23.794,-6.694l0,21.375l6.019,0c4.725,0 7.181,-2.156 7.369,-6.468c0.075,-2.175 0.112,-3.6 0.112,-4.275c0,-0.75 -0.037,-2.138 -0.112,-4.163c-0.113,-2.137 -0.816,-3.75 -2.109,-4.837c-1.294,-1.088 -3.141,-1.632 -5.541,-1.632l-5.738,0Z" style="fill:#231f20;fill-rule:nonzero;"/>
|
||||
<path d="M835.936,334.336c0.15,0.375 0.225,0.656 0.225,0.844c0,0.337 -0.121,0.628 -0.365,0.872c-0.244,0.244 -0.535,0.365 -0.872,0.365l-7.088,0c-0.637,0 -1.134,-0.14 -1.49,-0.421c-0.356,-0.282 -0.591,-0.61 -0.703,-0.985l-1.575,-4.669l-14.963,0l-1.575,4.669c-0.112,0.375 -0.347,0.703 -0.703,0.985c-0.356,0.281 -0.853,0.421 -1.491,0.421l-7.087,0c-0.338,0 -0.628,-0.121 -0.872,-0.365c-0.244,-0.244 -0.366,-0.535 -0.366,-0.872c0,-0.188 0.075,-0.469 0.225,-0.844l12.263,-35.437c0.187,-0.525 0.487,-0.966 0.9,-1.322c0.412,-0.356 0.956,-0.535 1.631,-0.535l9.113,0c0.675,0 1.218,0.179 1.631,0.535c0.412,0.356 0.712,0.797 0.9,1.322l12.262,35.437Zm-14.793,-12.712l-4.557,-13.5l-4.556,13.5l9.113,-0Z" style="fill:#231f20;fill-rule:nonzero;"/>
|
||||
<path d="M898.599,297.042c0.412,0 0.769,0.15 1.069,0.45c0.3,0.3 0.45,0.657 0.45,1.069l-0,5.963c-0,0.412 -0.15,0.768 -0.45,1.068c-0.3,0.3 -0.657,0.45 -1.069,0.45l-9.9,0l-0,28.857c-0,0.412 -0.15,0.768 -0.45,1.068c-0.3,0.3 -0.656,0.45 -1.069,0.45l-7.369,0c-0.412,0 -0.768,-0.15 -1.068,-0.45c-0.3,-0.3 -0.45,-0.656 -0.45,-1.068l-0,-28.857l-9.9,0c-0.413,0 -0.769,-0.15 -1.069,-0.45c-0.3,-0.3 -0.45,-0.656 -0.45,-1.068l-0,-5.963c-0,-0.412 0.15,-0.769 0.45,-1.069c0.3,-0.3 0.656,-0.45 1.069,-0.45l30.206,0Z" style="fill:#231f20;fill-rule:nonzero;"/>
|
||||
<path d="M935.668,336.417c-0.413,0 -0.769,-0.15 -1.069,-0.45c-0.3,-0.3 -0.45,-0.656 -0.45,-1.068l-0,-36.338c-0,-0.412 0.15,-0.769 0.45,-1.069c0.3,-0.3 0.656,-0.45 1.069,-0.45l7.368,0c0.413,0 0.769,0.15 1.069,0.45c0.3,0.3 0.45,0.657 0.45,1.069l0,36.338c0,0.412 -0.15,0.768 -0.45,1.068c-0.3,0.3 -0.656,0.45 -1.069,0.45l-7.368,0Z" style="fill:#231f20;fill-rule:nonzero;"/>
|
||||
<path d="M997.655,296.48c3.563,-0 6.638,0.675 9.225,2.025c2.588,1.35 4.585,3.178 5.991,5.484c1.406,2.307 2.184,4.885 2.334,7.735c0.075,1.05 0.113,2.737 0.113,5.062c-0,2.288 -0.038,3.938 -0.113,4.95c-0.15,2.85 -0.928,5.428 -2.334,7.735c-1.406,2.306 -3.403,4.134 -5.991,5.484c-2.587,1.35 -5.662,2.025 -9.225,2.025c-3.562,-0 -6.637,-0.675 -9.225,-2.025c-2.587,-1.35 -4.584,-3.178 -5.99,-5.484c-1.407,-2.307 -2.185,-4.885 -2.335,-7.735c-0.075,-2.025 -0.112,-3.675 -0.112,-4.95c-0,-1.275 0.037,-2.962 0.112,-5.062c0.15,-2.85 0.928,-5.428 2.335,-7.735c1.406,-2.306 3.403,-4.134 5.99,-5.484c2.588,-1.35 5.663,-2.025 9.225,-2.025Zm7.144,15.525c-0.188,-1.988 -0.872,-3.572 -2.053,-4.753c-1.181,-1.181 -2.878,-1.772 -5.091,-1.772c-2.212,-0 -3.909,0.591 -5.09,1.772c-1.182,1.181 -1.866,2.765 -2.054,4.753c-0.112,1.237 -0.168,2.794 -0.168,4.669c-0,1.837 0.056,3.431 0.168,4.781c0.188,1.987 0.872,3.572 2.054,4.753c1.181,1.181 2.878,1.772 5.09,1.772c2.213,-0 3.91,-0.591 5.091,-1.772c1.181,-1.181 1.865,-2.766 2.053,-4.753c0.112,-1.35 0.169,-2.944 0.169,-4.781c-0,-1.875 -0.057,-3.432 -0.169,-4.669Z" style="fill:#231f20;fill-rule:nonzero;"/>
|
||||
<path d="M1076.35,336.417c-0.563,0 -1.05,-0.14 -1.463,-0.421c-0.412,-0.282 -0.693,-0.572 -0.843,-0.872l-12.882,-19.238l0,19.013c0,0.412 -0.15,0.768 -0.45,1.068c-0.3,0.3 -0.656,0.45 -1.068,0.45l-7.369,0c-0.413,0 -0.769,-0.15 -1.069,-0.45c-0.3,-0.3 -0.45,-0.656 -0.45,-1.068l0,-36.338c0,-0.412 0.15,-0.769 0.45,-1.069c0.3,-0.3 0.656,-0.45 1.069,-0.45l5.85,0c0.562,0 1.05,0.141 1.462,0.422c0.413,0.282 0.694,0.572 0.844,0.872l12.881,19.238l0,-19.013c0,-0.412 0.15,-0.769 0.45,-1.069c0.3,-0.3 0.657,-0.45 1.069,-0.45l7.369,0c0.412,0 0.769,0.15 1.069,0.45c0.3,0.3 0.45,0.657 0.45,1.069l-0,36.338c-0,0.412 -0.15,0.768 -0.45,1.068c-0.3,0.3 -0.657,0.45 -1.069,0.45l-5.85,0Z" style="fill:#231f20;fill-rule:nonzero;"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 16 KiB |
|
@ -1 +0,0 @@
|
|||
Subproject commit ea50b05bada73dc421227ecbff2b50e35971025f
|
|
@ -0,0 +1,538 @@
|
|||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.6",
|
||||
"metadata": {
|
||||
"component": {
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/syndicate",
|
||||
"name": "syndicate",
|
||||
"description": "Syndicated actors for conversational concurrency",
|
||||
"version": "20240523",
|
||||
"authors": [
|
||||
{
|
||||
"name": "Emery Hemingway"
|
||||
}
|
||||
],
|
||||
"licenses": [
|
||||
{
|
||||
"license": {
|
||||
"id": "Unlicense"
|
||||
}
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nim:srcDir",
|
||||
"value": "src"
|
||||
},
|
||||
{
|
||||
"name": "nim:backend",
|
||||
"value": "c"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"components": [
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/nimcrypto",
|
||||
"name": "nimcrypto",
|
||||
"version": "485f7b3cfa83c1beecc0e31be0e964d697aa74d7",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://github.com/cheatfate/nimcrypto/archive/485f7b3cfa83c1beecc0e31be0e964d697aa74d7.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://github.com/cheatfate/nimcrypto",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/fkrcpp8lzj2yi21na79xm63xk0ggnqsp-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "485f7b3cfa83c1beecc0e31be0e964d697aa74d7"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "1h3dzdbc9kacwpi10mj73yjglvn7kbizj1x8qc9099ax091cj5xn"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://github.com/cheatfate/nimcrypto/archive/485f7b3cfa83c1beecc0e31be0e964d697aa74d7.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/preserves",
|
||||
"name": "preserves",
|
||||
"version": "20240522",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://git.syndicate-lang.org/ehmry/preserves-nim/archive/1fee87590940761e288cf9ab3c7270832403b719.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://git.syndicate-lang.org/ehmry/preserves-nim.git",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/9zl4s2did00725n8ygbp37agvkskdhcx-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "1fee87590940761e288cf9ab3c7270832403b719"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "1ny42rwr3yx52zwvkdg4lh54nxaxrmxdj9dlw3qarvvp2grfq4j2"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://git.syndicate-lang.org/ehmry/preserves-nim/archive/1fee87590940761e288cf9ab3c7270832403b719.tar.gz"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:ref",
|
||||
"value": "20240522"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:srcDir",
|
||||
"value": "src"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/sys",
|
||||
"name": "sys",
|
||||
"version": "4ef3b624db86e331ba334e705c1aa235d55b05e1",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://github.com/ehmry/nim-sys/archive/4ef3b624db86e331ba334e705c1aa235d55b05e1.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://github.com/ehmry/nim-sys.git",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/syhxsjlsdqfap0hk4qp3s6kayk8cqknd-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "4ef3b624db86e331ba334e705c1aa235d55b05e1"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "1q4qgw4an4mmmcbx48l6xk1jig1vc8p9cq9dbx39kpnb0890j32q"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://github.com/ehmry/nim-sys/archive/4ef3b624db86e331ba334e705c1aa235d55b05e1.tar.gz"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:srcDir",
|
||||
"value": "src"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/taps",
|
||||
"name": "taps",
|
||||
"version": "20240405",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://git.sr.ht/~ehmry/nim_taps/archive/8c8572cd971d1283e6621006b310993c632da247.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://git.sr.ht/~ehmry/nim_taps",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/6y14ia52kr7jyaa0izx37mlablmq9s65-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "8c8572cd971d1283e6621006b310993c632da247"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "1dp166bv9x773jmfqppg5i3v3rilgff013vb11yzwcid9l7s3iy8"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://git.sr.ht/~ehmry/nim_taps/archive/8c8572cd971d1283e6621006b310993c632da247.tar.gz"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:ref",
|
||||
"value": "20240405"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:srcDir",
|
||||
"value": "src"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/solo5_dispatcher",
|
||||
"name": "solo5_dispatcher",
|
||||
"version": "20240522",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://git.sr.ht/~ehmry/solo5_dispatcher/archive/cc64ef99416b22b12e4a076d33de9e25a163e57d.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://git.sr.ht/~ehmry/solo5_dispatcher",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/4jj467pg4hs6warhksb8nsxn9ykz8c7c-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "cc64ef99416b22b12e4a076d33de9e25a163e57d"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "1v9i9fqgx1g76yrmz2xwj9mxfwbjfpar6dsyygr68fv9031cqxq7"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://git.sr.ht/~ehmry/solo5_dispatcher/archive/cc64ef99416b22b12e4a076d33de9e25a163e57d.tar.gz"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:ref",
|
||||
"value": "20240522"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:srcDir",
|
||||
"value": "pkg"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/npeg",
|
||||
"name": "npeg",
|
||||
"version": "1.2.2",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://github.com/zevv/npeg/archive/ec0cc6e64ea4c62d2aa382b176a4838474238f8d.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://github.com/zevv/npeg.git",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/xpn694ibgipj8xak3j4bky6b3k0vp7hh-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "ec0cc6e64ea4c62d2aa382b176a4838474238f8d"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "1fi9ls3xl20bmv1ikillxywl96i9al6zmmxrbffx448gbrxs86kg"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://github.com/zevv/npeg/archive/ec0cc6e64ea4c62d2aa382b176a4838474238f8d.tar.gz"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:ref",
|
||||
"value": "1.2.2"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:srcDir",
|
||||
"value": "src"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/bigints",
|
||||
"name": "bigints",
|
||||
"version": "20231006",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://github.com/ehmry/nim-bigints/archive/86ea14d31eea9275e1408ca34e6bfe9c99989a96.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://github.com/ehmry/nim-bigints.git",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/jvrm392g8adfsgf36prgwkbyd7vh5jsw-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "86ea14d31eea9275e1408ca34e6bfe9c99989a96"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "15pcpmnk1bnw3k8769rjzcpg00nahyrypwbxs88jnwr4aczp99j4"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://github.com/ehmry/nim-bigints/archive/86ea14d31eea9275e1408ca34e6bfe9c99989a96.tar.gz"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:ref",
|
||||
"value": "20231006"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:srcDir",
|
||||
"value": "src"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/cps",
|
||||
"name": "cps",
|
||||
"version": "0.10.4",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://github.com/nim-works/cps/archive/2a4d771a715ba45cfba3a82fa625ae7ad6591c8b.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://github.com/nim-works/cps",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/m9vpcf3dq6z2h1xpi1vlw0ycxp91s5p7-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "2a4d771a715ba45cfba3a82fa625ae7ad6591c8b"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "0c62k5wpq9z9mn8cd4rm8jjc4z0xmnak4piyj5dsfbyj6sbdw2bf"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://github.com/nim-works/cps/archive/2a4d771a715ba45cfba3a82fa625ae7ad6591c8b.tar.gz"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:ref",
|
||||
"value": "0.10.4"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/stew",
|
||||
"name": "stew",
|
||||
"version": "3c91b8694e15137a81ec7db37c6c58194ec94a6a",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://github.com/status-im/nim-stew/archive/3c91b8694e15137a81ec7db37c6c58194ec94a6a.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://github.com/status-im/nim-stew",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/mqg8qzsbcc8xqabq2yzvlhvcyqypk72c-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "3c91b8694e15137a81ec7db37c6c58194ec94a6a"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "17lfhfxp5nxvld78xa83p258y80ks5jb4n53152cdr57xk86y07w"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://github.com/status-im/nim-stew/archive/3c91b8694e15137a81ec7db37c6c58194ec94a6a.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"bom-ref": "pkg:nim/getdns",
|
||||
"name": "getdns",
|
||||
"version": "20230806",
|
||||
"externalReferences": [
|
||||
{
|
||||
"url": "https://git.sr.ht/~ehmry/getdns-nim/archive/e4ae0992ed7c5540e6d498f3074d06c8f454a0b6.tar.gz",
|
||||
"type": "source-distribution"
|
||||
},
|
||||
{
|
||||
"url": "https://git.sr.ht/~ehmry/getdns-nim",
|
||||
"type": "vcs"
|
||||
}
|
||||
],
|
||||
"properties": [
|
||||
{
|
||||
"name": "nix:fod:method",
|
||||
"value": "fetchzip"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:path",
|
||||
"value": "/nix/store/j8i20k9aarzppg4p234449140nnnaycq-source"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:rev",
|
||||
"value": "e4ae0992ed7c5540e6d498f3074d06c8f454a0b6"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:sha256",
|
||||
"value": "1dp53gndr6d9s9601dd5ipkiq94j53hlx46mxv8gpr8nd98bqysg"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:url",
|
||||
"value": "https://git.sr.ht/~ehmry/getdns-nim/archive/e4ae0992ed7c5540e6d498f3074d06c8f454a0b6.tar.gz"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:ref",
|
||||
"value": "20230806"
|
||||
},
|
||||
{
|
||||
"name": "nix:fod:srcDir",
|
||||
"value": "src"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"dependencies": [
|
||||
{
|
||||
"ref": "pkg:nim/syndicate",
|
||||
"dependsOn": [
|
||||
"pkg:nim/nimcrypto",
|
||||
"pkg:nim/preserves",
|
||||
"pkg:nim/sys",
|
||||
"pkg:nim/taps",
|
||||
"pkg:nim/solo5_dispatcher"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/nimcrypto",
|
||||
"dependsOn": []
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/preserves",
|
||||
"dependsOn": [
|
||||
"pkg:nim/npeg",
|
||||
"pkg:nim/bigints"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/sys",
|
||||
"dependsOn": [
|
||||
"pkg:nim/cps",
|
||||
"pkg:nim/stew"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/taps",
|
||||
"dependsOn": [
|
||||
"pkg:nim/getdns",
|
||||
"pkg:nim/sys",
|
||||
"pkg:nim/cps",
|
||||
"pkg:nim/solo5_dispatcher"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/solo5_dispatcher",
|
||||
"dependsOn": [
|
||||
"pkg:nim/cps"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/npeg",
|
||||
"dependsOn": []
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/bigints",
|
||||
"dependsOn": []
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/cps",
|
||||
"dependsOn": []
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/stew",
|
||||
"dependsOn": []
|
||||
},
|
||||
{
|
||||
"ref": "pkg:nim/getdns",
|
||||
"dependsOn": []
|
||||
}
|
||||
]
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
{ pkgs ? import <nixpkgs> { } }:
|
||||
|
||||
pkgs.buildNimPackage {
|
||||
name = "dummy";
|
||||
buildInputs = builtins.attrValues { inherit (pkgs) getdns solo5; };
|
||||
nativeBuildInputs = builtins.attrValues { inherit (pkgs) pkg-config solo5; };
|
||||
}
|
|
@ -0,0 +1,2 @@
|
|||
include_rules
|
||||
: foreach *.nim |> !nim_check |>
|
|
@ -1,228 +1,197 @@
|
|||
# SPDX-FileCopyrightText: ☭ 2021 Emery Hemingway
|
||||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[asyncdispatch, macros, options]
|
||||
import preserves, preserves/records
|
||||
import syndicate/[assertions, dataspaces, events, skeletons]
|
||||
## This module implements the `Syndicate DSL <https://syndicate-lang.org/doc/syndicate/>`_.
|
||||
|
||||
export preserves.fromPreserve
|
||||
export assertions.Observe
|
||||
export dataspaces.Facet
|
||||
export dataspaces.FieldId
|
||||
export dataspaces.Fields
|
||||
export dataspaces.`==`
|
||||
export dataspaces.addEndpoint
|
||||
export dataspaces.addStartScript
|
||||
export dataspaces.addStopScript
|
||||
export dataspaces.beginExternalTask
|
||||
export dataspaces.defineObservableProperty
|
||||
export dataspaces.endExternalTask
|
||||
export dataspaces.generateId
|
||||
export dataspaces.hash
|
||||
export dataspaces.recordDamage
|
||||
export dataspaces.recordObservation
|
||||
export dataspaces.scheduleScript
|
||||
export dataspaces.stop
|
||||
export events.EventKind
|
||||
export skeletons.Analysis
|
||||
import std/[macros, tables, typetraits]
|
||||
|
||||
export asyncdispatch.`callback=`
|
||||
export options.get
|
||||
import preserves
|
||||
export fromPreserves, toPreserves
|
||||
|
||||
proc getCurrentFacet*(): Facet = raiseAssert("must be called from within the DSL")
|
||||
## Return the current `Facet` for this context.
|
||||
import ./syndicate/[actors, dataspaces, durings, patterns]
|
||||
import ./syndicate/protocols/dataspace
|
||||
|
||||
template stopIf*(cond, body: untyped): untyped =
|
||||
## Stop the current facet if `cond` is true and
|
||||
## invoke `body` after the facet has stopped.
|
||||
mixin getCurrentFacet
|
||||
getCurrentFacet().addDataflow do (facet: Facet):
|
||||
if cond:
|
||||
facet.stop do (facet: Facet):
|
||||
proc getCurrentFacet(): Facet {.inject, used.} = facet
|
||||
body
|
||||
export actors, dataspace, dataspaces, patterns
|
||||
|
||||
template send*(msg: Preserve): untyped =
|
||||
mixin getCurrentFacet
|
||||
send(getCurrentFacet(), msg)
|
||||
type Assertion* {.deprecated: "Assertion and Preserve[void] replaced by Value".} = Value
|
||||
|
||||
proc wrapDoHandler(pattern, handler: NimNode): NimNode =
|
||||
## Generate a procedure that unpacks a `pattern` match to fit the
|
||||
## parameters of `handler`, and calls the body of `handler`.
|
||||
handler.expectKind nnkDo
|
||||
var
|
||||
formalArgs = handler[3]
|
||||
cbFacetSym = genSym(nskParam, "facet")
|
||||
scriptFacetSym = genSym(nskParam, "facet")
|
||||
recSym = genSym(nskParam, "bindings")
|
||||
varSection = newNimNode(nnkVarSection, handler)
|
||||
conditional: NimNode
|
||||
argCount: int
|
||||
for i, arg in formalArgs:
|
||||
if i > 0:
|
||||
arg.expectKind nnkIdentDefs
|
||||
if arg[0] == ident"_" or arg[0] == ident"*":
|
||||
if arg[1].kind != nnkEmpty:
|
||||
error("placeholders may not be typed", arg)
|
||||
else:
|
||||
proc `!`*(typ: static typedesc): Pattern {.inline.} =
|
||||
patterns.dropType(typ)
|
||||
|
||||
proc `?`*[T](val: T): Pattern {.inline.} =
|
||||
patterns.drop[T](val)
|
||||
|
||||
proc `?:`*(typ: static typedesc): Pattern {.inline.} =
|
||||
patterns.grabTypeFlat(typ)
|
||||
|
||||
proc `?:`*(typ: static typedesc; bindings: sink openArray[(int, Pattern)]): Pattern {.inline.} =
|
||||
patterns.grab(typ, bindings)
|
||||
|
||||
proc `?:`*(typ: static typedesc; bindings: sink openArray[(Value, Pattern)]): Pattern {.inline.} =
|
||||
patterns.grab(typ, bindings)
|
||||
|
||||
type
|
||||
PublishProc = proc (turn: Turn; v: Value; h: Handle) {.closure.}
|
||||
RetractProc = proc (turn: Turn; h: Handle) {.closure.}
|
||||
MessageProc = proc (turn: Turn; v: Value) {.closure.}
|
||||
ClosureEntity = ref object of Entity
|
||||
publishImpl*: PublishProc
|
||||
retractImpl*: RetractProc
|
||||
messageImpl*: MessageProc
|
||||
|
||||
method publish(e: ClosureEntity; turn: Turn; a: AssertionRef; h: Handle) =
|
||||
if not e.publishImpl.isNil: e.publishImpl(turn, a.value, h)
|
||||
|
||||
method retract(e: ClosureEntity; turn: Turn; h: Handle) =
|
||||
if not e.retractImpl.isNil: e.retractImpl(turn, h)
|
||||
|
||||
method message(e: ClosureEntity; turn: Turn; a: AssertionRef) =
|
||||
if not e.messageImpl.isNil: e.messageImpl(turn, a.value)
|
||||
|
||||
proc argumentCount(handler: NimNode): int =
|
||||
handler.expectKind {nnkDo, nnkStmtList}
|
||||
if handler.kind == nnkDo: result = pred handler[3].len
|
||||
|
||||
type HandlerNodes = tuple
|
||||
valuesSym, varSection, body: NimNode
|
||||
|
||||
proc generateHandlerNodes(handler: NimNode): HandlerNodes =
|
||||
handler.expectKind {nnkStmtList, nnkDo}
|
||||
result.valuesSym = genSym(nskVar, "values")
|
||||
let valuesTuple = newNimNode(nnkTupleTy, handler)
|
||||
case handler.kind
|
||||
of nnkStmtList:
|
||||
result.body = handler
|
||||
of nnkDo:
|
||||
let
|
||||
innerTuple = newNimNode(nnkVarTuple, handler)
|
||||
varSectionInner = newNimNode(nnkVarSection, handler).add(innerTuple)
|
||||
for i, arg in handler[3]:
|
||||
if i > 0:
|
||||
arg.expectKind nnkIdentDefs
|
||||
if arg[1].kind == nnkEmpty:
|
||||
error("type required for capture", arg)
|
||||
var varDef = newNimNode(nnkIdentDefs, arg)
|
||||
arg.copyChildrenTo varDef
|
||||
varSection.add(varDef)
|
||||
var conversion = newCall("fromPreserve", varDef[0],
|
||||
newNimNode(nnkBracketExpr).add(recSym, newLit(pred i)))
|
||||
if conditional.isNil:
|
||||
conditional = conversion
|
||||
else:
|
||||
conditional = infix(conditional, "and", conversion)
|
||||
inc(argCount)
|
||||
var scriptBody = newStmtList()
|
||||
if argCount > 0:
|
||||
scriptBody.add(
|
||||
varSection,
|
||||
newNimNode(nnkIfStmt).add(
|
||||
newNimNode(nnkElifBranch).add(
|
||||
conditional, handler[6])))
|
||||
var def = newNimNode(nnkIdentDefs, arg)
|
||||
arg.copyChildrenTo def
|
||||
valuesTuple.add(def)
|
||||
innerTuple.add(arg[0])
|
||||
innerTuple.add(newEmptyNode(), result.valuesSym)
|
||||
result.body = newStmtList(varSectionInner, handler[6])
|
||||
else:
|
||||
scriptBody.add(handler[6])
|
||||
discard # caught earlier by expectKind
|
||||
result.varSection = newNimNode(nnkVarSection, handler).
|
||||
add(newIdentDefs(result.valuesSym, valuesTuple))
|
||||
|
||||
proc wrapPublishHandler(turn, handler: NimNode): NimNode =
|
||||
var
|
||||
scriptSym = genSym(nskProc, "script")
|
||||
handlerSym = genSym(nskProc, "handler")
|
||||
litArgCount = newLit argCount
|
||||
(valuesSym, varSection, publishBody) =
|
||||
generateHandlerNodes(handler)
|
||||
handleSym = ident"handle"
|
||||
handlerSym = genSym(nskProc, "publish")
|
||||
bindingsSym = ident"bindings"
|
||||
quote do:
|
||||
proc `handlerSym`(`cbFacetSym`: Facet; `recSym`: seq[Preserve]) =
|
||||
assert(`litArgCount` == captureCount(`pattern`), "pattern does not match handler")
|
||||
# this should be a compile-time check
|
||||
assert(
|
||||
`litArgCount` == len(`recSym`),
|
||||
"cannot unpack " & $`litArgCount` & " bindings from " & $(toPreserve `recSym`))
|
||||
proc `scriptSym`(`scriptFacetSym`: Facet) =
|
||||
proc getCurrentFacet(): Facet {.inject, used.} = `scriptFacetSym`
|
||||
`scriptBody`
|
||||
scheduleScript(`cbFacetSym`, `scriptSym`)
|
||||
proc `handlerSym`(`turn`: Turn; `bindingsSym`: Value; `handleSym`: Handle) =
|
||||
`varSection`
|
||||
if fromPreserves(`valuesSym`, bindings):
|
||||
`publishBody`
|
||||
|
||||
proc wrapHandler(pattern, handler: NimNode): NimNode =
|
||||
case handler.kind
|
||||
of nnkDo:
|
||||
result = wrapDoHandler(pattern, handler)
|
||||
of nnkStmtList:
|
||||
let sym = genSym(nskProc, "handler")
|
||||
result = quote do:
|
||||
proc `sym`(facet: Facet; _: seq[Preserve]) =
|
||||
proc getCurrentFacet(): Facet {.inject, used.} = facet
|
||||
`handler`
|
||||
proc wrapMessageHandler(turn, handler: NimNode): NimNode =
|
||||
var
|
||||
(valuesSym, varSection, body) =
|
||||
generateHandlerNodes(handler)
|
||||
handlerSym = genSym(nskProc, "message")
|
||||
bindingsSym = ident"bindings"
|
||||
quote do:
|
||||
proc `handlerSym`(`turn`: Turn; `bindingsSym`: Value) =
|
||||
`varSection`
|
||||
if fromPreserves(`valuesSym`, bindings):
|
||||
`body`
|
||||
|
||||
proc wrapDuringHandler(turn, entryBody, exitBody: NimNode): NimNode =
|
||||
var
|
||||
(valuesSym, varSection, publishBody) =
|
||||
generateHandlerNodes(entryBody)
|
||||
bindingsSym = ident"bindings"
|
||||
handleSym = ident"duringHandle"
|
||||
duringSym = genSym(nskProc, "during")
|
||||
if exitBody.isNil:
|
||||
quote do:
|
||||
proc `duringSym`(`turn`: Turn; `bindingsSym`: Value; `handleSym`: Handle): TurnAction =
|
||||
`varSection`
|
||||
if fromPreserves(`valuesSym`, `bindingsSym`):
|
||||
`publishBody`
|
||||
else:
|
||||
error("unhandled event handler", handler)
|
||||
quote do:
|
||||
proc `duringSym`(`turn`: Turn; `bindingsSym`: Value; `handleSym`: Handle): TurnAction =
|
||||
`varSection`
|
||||
if fromPreserves(`valuesSym`, `bindingsSym`):
|
||||
`publishBody`
|
||||
proc action(`turn`: Turn) =
|
||||
`exitBody`
|
||||
result = action
|
||||
|
||||
proc onEvent(event: EventKind, pattern, handler: NimNode): NimNode =
|
||||
macro onPublish*(turn: untyped; ds: Cap; pattern: Pattern; handler: untyped) =
|
||||
## Call `handler` when an assertion matching `pattern` is published at `ds`.
|
||||
let
|
||||
handler = wrapHandler(pattern, handler)
|
||||
handlerSym = handler[0]
|
||||
argCount = argumentCount(handler)
|
||||
handlerProc = wrapPublishHandler(turn, handler)
|
||||
handlerSym = handlerProc[0]
|
||||
result = quote do:
|
||||
mixin getCurrentFacet
|
||||
getCurrentFacet().addEndpoint do (facet: Facet) -> EndpointSpec:
|
||||
proc getCurrentFacet(): Facet {.inject, used.} = facet
|
||||
`handler`
|
||||
let a = `pattern`
|
||||
result.assertion = observe(a)
|
||||
result.analysis = some(analyzeAssertion(a))
|
||||
result.callback = wrap(facet, EventKind(`event`), `handlerSym`)
|
||||
if `argCount` != 0 and `pattern`.analyse.capturePaths.len != `argCount`:
|
||||
raiseAssert($`pattern`.analyse.capturePaths.len & " values captured but handler has " & $`argCount` & " arguments - " & $`pattern`)
|
||||
`handlerProc`
|
||||
discard observe(`turn`, `ds`, `pattern`, ClosureEntity(publishImpl: `handlerSym`))
|
||||
|
||||
macro onAsserted*(pattern: Preserve; handler: untyped) =
|
||||
onEvent(addedEvent, pattern, handler)
|
||||
macro onMessage*(turn: untyped; ds: Cap; pattern: Pattern; handler: untyped) =
|
||||
## Call `handler` when an message matching `pattern` is broadcasted at `ds`.
|
||||
let
|
||||
argCount = argumentCount(handler)
|
||||
handlerProc = wrapMessageHandler(turn, handler)
|
||||
handlerSym = handlerProc[0]
|
||||
result = quote do:
|
||||
if `argCount` != 0 and `pattern`.analyse.capturePaths.len != `argCount`:
|
||||
raiseAssert($`pattern`.analyse.capturePaths.len & " values captured but handler has " & $`argCount` & " arguments - " & $`pattern`)
|
||||
`handlerProc`
|
||||
discard observe(`turn`, `ds`, `pattern`, ClosureEntity(messageImpl: `handlerSym`))
|
||||
|
||||
macro onRetracted*(pattern: Preserve; handler: untyped) =
|
||||
onEvent(removedEvent, pattern, handler)
|
||||
macro during*(turn: untyped; ds: Cap; pattern: Pattern; publishBody, retractBody: untyped) =
|
||||
## Call `publishBody` when an assertion matching `pattern` is published to `ds` and
|
||||
## call `retractBody` on retraction. Assertions that match `pattern` but are not
|
||||
## convertable to the arguments of `publishBody` are silently discarded.
|
||||
##
|
||||
## The following symbols are injected into the scope of both bodies:
|
||||
## - `bindings` - raw Preserves sequence that matched `pattern`
|
||||
## - `duringHandle` - dataspace handle of the assertion that triggered `publishBody`
|
||||
let
|
||||
argCount = argumentCount(publishBody)
|
||||
callbackProc = wrapDuringHandler(turn, publishBody, retractBody)
|
||||
callbackSym = callbackProc[0]
|
||||
result = quote do:
|
||||
if `argCount` != 0 and `pattern`.analyse.capturePaths.len != `argCount`:
|
||||
raiseAssert($`pattern`.analyse.capturePaths.len & " values captured but handler has " & $`argCount` & " arguments - " & $`pattern`)
|
||||
`callbackProc`
|
||||
discard observe(`turn`, `ds`, `pattern`, during(`callbackSym`))
|
||||
|
||||
macro onMessage*(pattern: Preserve; doHandler: untyped) =
|
||||
onEvent(messageEvent, pattern, doHandler)
|
||||
macro during*(turn: untyped; ds: Cap; pattern: Pattern; publishBody: untyped) =
|
||||
## Variant of `during` without a retract body.
|
||||
let
|
||||
`argCount` = argumentCount(publishBody)
|
||||
callbackProc = wrapDuringHandler(turn, publishBody, nil)
|
||||
callbackSym = callbackProc[0]
|
||||
result = quote do:
|
||||
if `argCount` != 0 and `pattern`.analyse.capturePaths.len != `argCount`:
|
||||
raiseAssert($`pattern`.analyse.capturePaths.len & " values captured but handler has " & $`argCount` & " arguments - " & $`pattern`)
|
||||
`callbackProc`
|
||||
discard observe(`turn`, `ds`, `pattern`, during(`callbackSym`))
|
||||
|
||||
template onStart*(body: untyped): untyped =
|
||||
mixin getCurrentFacet
|
||||
getCurrentFacet().addStartScript do (facet: Facet):
|
||||
proc getCurrentFacet(): Facet {.inject, used.} = facet
|
||||
body
|
||||
|
||||
template onStop*(body: untyped): untyped =
|
||||
mixin getCurrentFacet
|
||||
getCurrentFacet().addStopScript do (facet: Facet):
|
||||
proc getCurrentFacet(): Facet {.inject, used.} = facet
|
||||
body
|
||||
|
||||
template publish*(a: Preserve): untyped =
|
||||
mixin getCurrentFacet
|
||||
getCurrentFacet().addEndpoint do (_: Facet) -> EndpointSpec:
|
||||
result.assertion = a
|
||||
|
||||
template field*(F: untyped; T: typedesc; initial: T): untyped =
|
||||
## Declare a field. The identifier `F` shall be a value with
|
||||
## `get` and `set` procs.
|
||||
mixin getCurrentFacet
|
||||
declareField(getCurrentFacet(), F, T, initial)
|
||||
# use the template defined in dataspaces
|
||||
|
||||
template react*(body: untyped): untyped =
|
||||
mixin getCurrentFacet
|
||||
addChildFacet(getCurrentFacet()) do (facet: Facet):
|
||||
proc getCurrentFacet(): Facet {.inject, used.} = facet
|
||||
body
|
||||
|
||||
template stop*(body: untyped): untyped =
|
||||
mixin getCurrentFacet
|
||||
stop(getCurrentFacet()) do (facet: Facet):
|
||||
proc getCurrentFacet(): Facet {.inject, used.} = facet
|
||||
body
|
||||
|
||||
template stop*(): untyped =
|
||||
mixin getCurrentFacet
|
||||
stop(getCurrentFacet())
|
||||
|
||||
template during*(pattern: Preserve; handler: untyped) =
|
||||
onAsserted(pattern):
|
||||
react:
|
||||
onAsserted(pattern, handler)
|
||||
onRetracted(pattern): stop()
|
||||
|
||||
template spawn*(name: string; spawnBody: untyped): untyped =
|
||||
mixin getCurrentFacet
|
||||
spawn(getCurrentFacet(), name) do (spawnFacet: Facet):
|
||||
proc getCurrentFacet(): Facet {.inject, used.} = spawnFacet
|
||||
spawnBody
|
||||
|
||||
template withFacet*(f: Facet; body: untyped): untyped =
|
||||
## Execute a Syndicate ``body`` using the ``Facet`` at ``f``.
|
||||
runnableExamples:
|
||||
import preserves, preserves/records
|
||||
type Foo {.record: "foo".} = ref object
|
||||
facet: Facet
|
||||
i: int
|
||||
proc incAndAssert(foo: Foo) =
|
||||
inc(foo.i)
|
||||
withFacet foo.facet:
|
||||
react: assert: foo
|
||||
proc getCurrentFacet(): Facet {.inject, used.} = f
|
||||
body
|
||||
|
||||
template syndicate*(ident, dataspaceBody: untyped): untyped =
|
||||
proc `ident`*(facet: Facet) =
|
||||
proc getCurrentFacet(): Facet {.inject, used.} = facet
|
||||
dataspaceBody
|
||||
proc `ident`*(name = ""): Future[void] =
|
||||
bootModule(name, `ident`)
|
||||
|
||||
type BootProc* = proc (facet: Facet) {.gcsafe.}
|
||||
|
||||
template boot*(module: BootProc) =
|
||||
mixin getCurrentFacet
|
||||
module(getCurrentFacet())
|
||||
|
||||
macro `?`*(x: untyped): untyped =
|
||||
## Sugar for generating Syndicate patterns.
|
||||
## `?_` is a pattern that matches but discards arbitrary
|
||||
## values and `?` combined with any other identifier is
|
||||
## a match and capture.
|
||||
if eqIdent(x, "_"):
|
||||
quote: toPreserve(Discard())
|
||||
else:
|
||||
quote: toPreserve(Capture())
|
||||
when defined(solo5):
|
||||
echo """
|
||||
______
|
||||
/ \_\
|
||||
/ ,__/ \ ____ __
|
||||
/\__/ \, \ _______ ______ ____/ /_/________ / /____
|
||||
\/ \__/ / / ___/ / / / __ \/ __ / / ___/ __ \/ __/ _ \
|
||||
\ ' \__/ _\_ \/ /_/ / / / / /_/ / / /__/ /_/ / /_/ __/
|
||||
\____/_/ /____/\__, /_/ /_/\____/_/\___/\__/_/\__/\___/
|
||||
/____/
|
||||
"""
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
include_rules
|
||||
NIM_FLAGS += --path:$(TUP_CWD)/..
|
||||
: foreach *.nim |> !nim_check |>
|
||||
: patterns.nim |> !nim_bin |>
|
|
@ -0,0 +1,812 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[assertions, deques, hashes, monotimes, options, sets, tables, times]
|
||||
import cps
|
||||
|
||||
import preserves
|
||||
import ../syndicate/protocols/[protocol, sturdy]
|
||||
import ../syndicate/protocols/trace
|
||||
|
||||
when defined(solo5):
|
||||
import solo5_dispatcher
|
||||
else:
|
||||
import pkg/sys/ioqueue
|
||||
|
||||
const tracing = defined(traceSyndicate)
|
||||
|
||||
when tracing:
|
||||
import std/streams
|
||||
when not defined(solo5):
|
||||
from std/os import getEnv
|
||||
|
||||
export Handle
|
||||
|
||||
type
|
||||
Oid = sturdy.Oid
|
||||
Caveat = sturdy.Caveat
|
||||
Attenuation = seq[Caveat]
|
||||
Rewrite = sturdy.Rewrite
|
||||
|
||||
AssertionRef* = ref object
|
||||
value*: Value
|
||||
# if the Enity methods take a Value object then the generated
|
||||
# C code has "redefinition of struct" problems when orc is enabled
|
||||
|
||||
Entity* = ref object of RootObj
|
||||
facet*: Facet
|
||||
oid*: Oid # oid is how Entities are identified over the wire
|
||||
|
||||
Cap* {.preservesEmbedded.} = ref object of EmbeddedObj
|
||||
target*: Entity
|
||||
relay*: Facet
|
||||
# Entity has facet but a Cap is also scoped to a relay Facet
|
||||
attenuation*: Attenuation
|
||||
|
||||
Ref* {.deprecated: "Ref was renamed to Cap".} = Cap
|
||||
|
||||
OutboundAssertion = ref object
|
||||
handle: Handle
|
||||
peer: Cap
|
||||
established: bool
|
||||
OutboundTable = Table[Handle, OutboundAssertion]
|
||||
|
||||
Actor* = ref object
|
||||
next: Actor
|
||||
name: string
|
||||
handleAllocator: ref Handle
|
||||
# a fresh actor gets a new ref Handle and
|
||||
# all actors spawned from it get the same ref.
|
||||
root: Facet
|
||||
exitReason: ref Exception
|
||||
exitHooks: seq[TurnAction]
|
||||
id: ActorId
|
||||
facetIdAllocator: uint
|
||||
exiting, exited: bool
|
||||
|
||||
TurnAction* = proc (t: Turn) {.closure.}
|
||||
|
||||
Turn* = var TurnRef
|
||||
TurnRef* = ref object
|
||||
facet: Facet # active facet that may change during a turn
|
||||
work: Deque[tuple[facet: Facet, act: TurnAction]]
|
||||
effects: Table[Actor, TurnRef]
|
||||
when tracing:
|
||||
desc: TurnDescription
|
||||
|
||||
Facet* = ref FacetObj
|
||||
FacetObj = object
|
||||
actor*: Actor
|
||||
parent: Facet
|
||||
children: HashSet[Facet]
|
||||
outbound: OutboundTable
|
||||
shutdownActions: seq[TurnAction]
|
||||
inertCheckPreventers: int
|
||||
id: FacetId
|
||||
isAlive: bool
|
||||
|
||||
var turnQueue {.threadvar.}: Deque[TurnRef]
|
||||
|
||||
when tracing:
|
||||
when defined(solo5):
|
||||
proc traceActivation(actor: Actor; act: ActorActivation) =
|
||||
discard #[
|
||||
echo TraceEntry(
|
||||
timestamp: getTime().toUnixFloat(),
|
||||
actor: initRecord("named", actor.name.toPreserves),
|
||||
item: act,
|
||||
).toPreserves
|
||||
]#
|
||||
else:
|
||||
proc openTraceStream: FileStream =
|
||||
let path = getEnv("SYNDICATE_TRACE_FILE")
|
||||
case path
|
||||
of "": stderr.writeLine "$SYNDICATE_TRACE_FILE unset"
|
||||
of "-": result = newFileStream(stderr)
|
||||
else: result = openFileStream(path, fmWrite)
|
||||
|
||||
let traceStream = openTraceStream()
|
||||
|
||||
proc traceActivation(actor: Actor; act: ActorActivation) =
|
||||
if not traceStream.isNil:
|
||||
var entry = TraceEntry(
|
||||
timestamp: getTime().toUnixFloat(),
|
||||
actor: initRecord("named", actor.name.toPreserves),
|
||||
item: act)
|
||||
traceStream.write(entry.toPreserves)
|
||||
traceStream.flush()
|
||||
|
||||
var turnIdAllocator: uint
|
||||
|
||||
proc nextTurnId(): TurnId =
|
||||
inc(turnIdAllocator)
|
||||
turnIdAllocator.toPreserves
|
||||
|
||||
proc path(facet: Facet): seq[FacetId] =
|
||||
var f = facet
|
||||
while not f.isNil:
|
||||
result.add f.id.toPreserves
|
||||
f = f.parent
|
||||
|
||||
proc initEnqueue(turn: Turn; cap: Cap): ActionDescription =
|
||||
result = ActionDescription(orKind: ActionDescriptionKind.enqueue)
|
||||
result.enqueue.event.target.actor = turn.facet.actor.id.toPreserves
|
||||
result.enqueue.event.target.facet = turn.facet.id.toPreserves
|
||||
result.enqueue.event.target.oid = cap.target.oid.toPreserves
|
||||
|
||||
proc toDequeue(act: sink ActionDescription): ActionDescription =
|
||||
result = ActionDescription(orKind: ActionDescriptionKind.dequeue)
|
||||
result.dequeue.event = move act.enqueue.event
|
||||
|
||||
proc toTraceTarget(cap: Cap): Target =
|
||||
assert not cap.target.isNil
|
||||
assert not cap.target.facet.isNil
|
||||
result.actor = cap.target.facet.actor.id
|
||||
result.facet = cap.target.facet.id
|
||||
result.oid = cap.target.oid.toPreserves
|
||||
|
||||
converter toActor(f: Facet): Actor = f.actor
|
||||
converter toActor(t: Turn): Actor = t.facet.actor
|
||||
converter toFacet(a: Actor): Facet = a.root
|
||||
converter toFacet(t: Turn): Facet = t.facet
|
||||
|
||||
using
|
||||
actor: Actor
|
||||
facet: Facet
|
||||
turn: Turn
|
||||
action: TurnAction
|
||||
|
||||
proc labels(f: Facet): string =
|
||||
assert not f.isNil
|
||||
assert not f.actor.isNil
|
||||
result.add f.actor.name
|
||||
proc catLabels(f: Facet; labels: var string) =
|
||||
if not f.parent.isNil:
|
||||
catLabels(f.parent, labels)
|
||||
labels.add ':'
|
||||
labels.add $f.id
|
||||
catLabels(f, result)
|
||||
|
||||
proc `$`*(f: Facet): string =
|
||||
"<Facet:" & f.labels & ">"
|
||||
|
||||
proc `$`*(actor: Actor): string =
|
||||
"<Actor:" & actor.name & ">" # TODO: ambigous
|
||||
|
||||
when tracing:
|
||||
|
||||
proc `$`*(r: Cap): string =
|
||||
"<Ref:" & r.relay.labels & ">"
|
||||
|
||||
proc `$`*(t: Turn): string =
|
||||
"<Turn:" & $t.desc.id & ">"
|
||||
|
||||
proc attenuate*(r: Cap; a: Attenuation): Cap =
|
||||
if a.len == 0: result = r
|
||||
else: result = Cap(
|
||||
target: r.target,
|
||||
relay: r.relay,
|
||||
attenuation: a & r.attenuation)
|
||||
|
||||
proc hash*(actor): Hash =
|
||||
result = actor[].unsafeAddr.hash
|
||||
|
||||
proc hash*(facet): Hash =
|
||||
facet[].unsafeAddr.hash
|
||||
|
||||
proc hash*(r: Cap): Hash = !$(r.relay.hash !& r.target.unsafeAddr.hash)
|
||||
|
||||
proc actor*(turn): Actor = turn.facet.actor
|
||||
|
||||
proc nextHandle(facet: Facet): Handle =
|
||||
result = succ(facet.actor.handleAllocator[])
|
||||
facet.actor.handleAllocator[] = result
|
||||
|
||||
template recallFacet(turn: Turn; body: untyped): untyped =
|
||||
let facet = turn.facet
|
||||
block:
|
||||
body
|
||||
assert facet.actor == turn.facet.actor
|
||||
turn.facet = facet
|
||||
|
||||
proc queueWork*(turn: Turn; facet: Facet; act: TurnAction) =
|
||||
assert not facet.isNil
|
||||
turn.work.addLast((facet, act,))
|
||||
|
||||
proc queueTurn*(facet: Facet; act: TurnAction) =
|
||||
var turn = TurnRef(facet: facet)
|
||||
assert not facet.isNil
|
||||
turn.work.addLast((facet, act,))
|
||||
when tracing:
|
||||
turn.desc.id = nextTurnId()
|
||||
turnQueue.addLast(turn)
|
||||
|
||||
proc queueTurn*(prev: Turn; facet: Facet; act: TurnAction) =
|
||||
var next = TurnRef(facet: facet)
|
||||
assert not facet.isNil
|
||||
next.work.addLast((facet, act,))
|
||||
when tracing:
|
||||
next.desc.id = nextTurnId()
|
||||
next.desc.cause = TurnCause(orKind: TurnCauseKind.turn)
|
||||
next.desc.cause.turn.id = prev.desc.id
|
||||
turnQueue.addLast(next)
|
||||
|
||||
proc run*(facet: Facet; action: TurnAction) = queueTurn(facet, action)
|
||||
## Alias to queueTurn_.
|
||||
|
||||
proc facet*(turn: Turn): Facet = turn.facet
|
||||
|
||||
proc queueEffect*(turn: Turn; target: Facet; act: TurnAction) =
|
||||
let fremd = target.actor
|
||||
if fremd == turn.facet.actor:
|
||||
turn.work.addLast((target, act,))
|
||||
else:
|
||||
var fremdTurn = turn.effects.getOrDefault(fremd)
|
||||
if fremdTurn.isNil:
|
||||
fremdTurn = TurnRef(facet: target)
|
||||
turn.effects[fremd] = fremdTurn
|
||||
when tracing:
|
||||
fremdTurn.desc.id = nextTurnId()
|
||||
fremdTurn.desc.cause = TurnCause(orKind: TurnCauseKind.turn)
|
||||
fremdTurn.desc.cause.turn.id = turn.desc.id
|
||||
fremdTurn.work.addLast((target, act,))
|
||||
|
||||
type Bindings = Table[Value, Value]
|
||||
|
||||
proc match(bindings: var Bindings; p: Pattern; v: Value): bool =
|
||||
case p.orKind
|
||||
of PatternKind.Pdiscard: result = true
|
||||
of PatternKind.Patom:
|
||||
result = case p.patom
|
||||
of PAtom.Boolean: v.isBoolean
|
||||
of PAtom.Double: v.isFloat
|
||||
of PAtom.Signedinteger: v.isInteger
|
||||
of PAtom.String: v.isString
|
||||
of PAtom.Bytestring: v.isByteString
|
||||
of PAtom.Symbol: v.isSymbol
|
||||
of PatternKind.Pembedded:
|
||||
result = v.isEmbedded
|
||||
of PatternKind.Pbind:
|
||||
if match(bindings, p.pbind.pattern, v):
|
||||
bindings[p.pbind.pattern.toPreserves] = v
|
||||
result = true
|
||||
of PatternKind.Pand:
|
||||
for pp in p.pand.patterns:
|
||||
result = match(bindings, pp, v)
|
||||
if not result: break
|
||||
of PatternKind.Pnot:
|
||||
var b: Bindings
|
||||
result = not match(b, p.pnot.pattern, v)
|
||||
of PatternKind.Lit:
|
||||
result = p.lit.value == v
|
||||
of PatternKind.PCompound:
|
||||
case p.pcompound.orKind
|
||||
of PCompoundKind.rec:
|
||||
if v.isRecord and
|
||||
p.pcompound.rec.label == v.label and
|
||||
p.pcompound.rec.fields.len == v.arity:
|
||||
result = true
|
||||
for i, pp in p.pcompound.rec.fields:
|
||||
if not match(bindings, pp, v[i]):
|
||||
result = false
|
||||
break
|
||||
of PCompoundKind.arr:
|
||||
if v.isSequence and p.pcompound.arr.items.len == v.sequence.len:
|
||||
result = true
|
||||
for i, pp in p.pcompound.arr.items:
|
||||
if not match(bindings, pp, v[i]):
|
||||
result = false
|
||||
break
|
||||
of PCompoundKind.dict:
|
||||
if v.isDictionary:
|
||||
result = true
|
||||
for key, pp in p.pcompound.dict.entries:
|
||||
let vv = step(v, key)
|
||||
if vv.isNone or not match(bindings, pp, get vv):
|
||||
result = true
|
||||
break
|
||||
|
||||
proc match(p: Pattern; v: Value): Option[Bindings] =
|
||||
var b: Bindings
|
||||
if match(b, p, v):
|
||||
result = some b
|
||||
|
||||
proc instantiate(t: Template; bindings: Bindings): Value =
|
||||
case t.orKind
|
||||
of TemplateKind.Tattenuate:
|
||||
let v = instantiate(t.tattenuate.template, bindings)
|
||||
let cap = v.unembed(Cap)
|
||||
if cap.isNone:
|
||||
raise newException(ValueError, "Attempt to attenuate non-capability")
|
||||
result = attenuate(get cap, t.tattenuate.attenuation).embed
|
||||
of TemplateKind.TRef:
|
||||
let n = $t.tref.binding.int
|
||||
try: result = bindings[n.toPreserves]
|
||||
except KeyError:
|
||||
raise newException(ValueError, "unbound reference: " & n)
|
||||
of TemplateKind.Lit:
|
||||
result = t.lit.value
|
||||
of TemplateKind.Tcompound:
|
||||
case t.tcompound.orKind
|
||||
of TCompoundKind.rec:
|
||||
result = initRecord(t.tcompound.rec.label, t.tcompound.rec.fields.len)
|
||||
for i, tt in t.tcompound.rec.fields:
|
||||
result[i] = instantiate(tt, bindings)
|
||||
of TCompoundKind.arr:
|
||||
result = initSequence(t.tcompound.arr.items.len)
|
||||
for i, tt in t.tcompound.arr.items:
|
||||
result[i] = instantiate(tt, bindings)
|
||||
of TCompoundKind.dict:
|
||||
result = initDictionary()
|
||||
for key, tt in t.tcompound.dict.entries:
|
||||
result[key] = instantiate(tt, bindings)
|
||||
|
||||
proc rewrite(r: Rewrite; v: Value): Value =
|
||||
let bindings = match(r.pattern, v)
|
||||
if bindings.isSome:
|
||||
result = instantiate(r.template, get bindings)
|
||||
|
||||
proc examineAlternatives(cav: Caveat; v: Value): Value =
|
||||
case cav.orKind
|
||||
of CaveatKind.Rewrite:
|
||||
result = rewrite(cav.rewrite, v)
|
||||
of CaveatKind.Alts:
|
||||
for r in cav.alts.alternatives:
|
||||
result = rewrite(r, v)
|
||||
if not result.isFalse: break
|
||||
of CaveatKind.Reject: discard
|
||||
of CaveatKind.unknown: discard
|
||||
|
||||
proc runRewrites*(a: Attenuation; v: Value): Value =
|
||||
result = v
|
||||
for stage in a:
|
||||
result = examineAlternatives(stage, result)
|
||||
if result.isFalse: break
|
||||
|
||||
method publish*(e: Entity; turn: Turn; v: AssertionRef; h: Handle) {.base.} = discard
|
||||
|
||||
proc publish(turn: Turn; cap: Cap; v: Value; h: Handle) =
|
||||
var a = runRewrites(cap.attenuation, v)
|
||||
if not a.isFalse:
|
||||
let e = OutboundAssertion(handle: h, peer: cap)
|
||||
turn.facet.outbound[h] = e
|
||||
when tracing:
|
||||
var act = ActionDescription(orKind: ActionDescriptionKind.enqueue)
|
||||
act.enqueue.event.target.actor = turn.facet.actor.id.toPreserves
|
||||
act.enqueue.event.target.facet = turn.facet.id.toPreserves
|
||||
act.enqueue.event.target.oid = cap.target.oid.toPreserves
|
||||
act.enqueue.event.detail = trace.TurnEvent(orKind: trace.TurnEventKind.assert)
|
||||
act.enqueue.event.detail.assert.assertion.value.value =
|
||||
mapEmbeds(v) do (cap: Value) -> Value: discard
|
||||
act.enqueue.event.detail.assert.handle = h
|
||||
turn.desc.actions.add act
|
||||
queueEffect(turn, cap.relay) do (turn: Turn):
|
||||
e.established = true
|
||||
when tracing:
|
||||
turn.desc.actions.add act.toDequeue
|
||||
publish(cap.target, turn, AssertionRef(value: a), e.handle)
|
||||
|
||||
proc publish*(turn: Turn; r: Cap; a: Value): Handle {.discardable.} =
|
||||
result = turn.facet.nextHandle()
|
||||
publish(turn, r, a, result)
|
||||
|
||||
proc publish*[T](turn: Turn; r: Cap; a: T): Handle {.discardable.} =
|
||||
publish(turn, r, a.toPreserves)
|
||||
|
||||
method retract*(e: Entity; turn: Turn; h: Handle) {.base.} = discard
|
||||
|
||||
proc retract(turn: Turn; e: OutboundAssertion) =
|
||||
when tracing:
|
||||
var act = initEnqueue(turn, e.peer)
|
||||
act.enqueue.event.detail = trace.TurnEvent(orKind: TurnEventKind.retract)
|
||||
act.enqueue.event.detail.retract.handle = e.handle
|
||||
turn.desc.actions.add act
|
||||
queueEffect(turn, e.peer.relay) do (turn: Turn):
|
||||
when tracing:
|
||||
turn.desc.actions.add act.toDequeue
|
||||
if e.established:
|
||||
e.established = false
|
||||
e.peer.target.retract(turn, e.handle)
|
||||
|
||||
proc retract*(turn: Turn; h: Handle) =
|
||||
var e: OutboundAssertion
|
||||
if turn.facet.outbound.pop(h, e):
|
||||
turn.retract(e)
|
||||
|
||||
method message*(e: Entity; turn: Turn; v: AssertionRef) {.base.} = discard
|
||||
|
||||
proc message*(turn: Turn; r: Cap; v: Value) =
|
||||
var a = runRewrites(r.attenuation, v)
|
||||
if not a.isFalse:
|
||||
when tracing:
|
||||
var act = initEnqueue(turn, r)
|
||||
act.enqueue.event.detail = trace.TurnEvent(orKind: TurnEventKind.message)
|
||||
act.enqueue.event.detail.message.body.value.value =
|
||||
mapEmbeds(a) do (cap: Value) -> Value: discard
|
||||
turn.desc.actions.add act
|
||||
queueEffect(turn, r.relay) do (turn: Turn):
|
||||
when tracing:
|
||||
turn.desc.actions.add act.toDequeue
|
||||
r.target.message(turn, AssertionRef(value: a))
|
||||
|
||||
proc message*[T](turn: Turn; r: Cap; v: T) =
|
||||
message(turn, r, v.toPreserves)
|
||||
|
||||
method sync*(e: Entity; turn: Turn; peer: Cap) {.base.} =
|
||||
queueTurn(e.facet) do (turn: Turn):
|
||||
message(turn, peer, true.toPreserves)
|
||||
# complete sync on a later turn
|
||||
|
||||
proc sync*(turn: Turn; r, peer: Cap) =
|
||||
when tracing:
|
||||
var act = initEnqueue(turn, peer)
|
||||
act.enqueue.event.detail = trace.TurnEvent(orKind: TurnEventKind.sync)
|
||||
act.enqueue.event.detail.sync.peer = peer.toTraceTarget
|
||||
turn.desc.actions.add act
|
||||
queueEffect(turn, r.relay) do (turn: Turn):
|
||||
when tracing:
|
||||
turn.desc.actions.add act.toDequeue
|
||||
r.target.sync(turn, peer)
|
||||
|
||||
proc replace*[T](turn: Turn; cap: Cap; h: Handle; v: T): Handle =
|
||||
result = publish(turn, cap, v)
|
||||
if h != default(Handle):
|
||||
retract(turn, h)
|
||||
|
||||
proc replace*[T](turn: Turn; cap: Cap; h: var Handle; v: T): Handle {.discardable.} =
|
||||
var old = h
|
||||
h = publish(turn, cap, v)
|
||||
if old != default(Handle):
|
||||
retract(turn, old)
|
||||
h
|
||||
|
||||
proc stop*(turn: Turn)
|
||||
|
||||
proc newFacet(actor; parent: Facet; initialAssertions: OutboundTable): Facet =
|
||||
inc actor.facetIdAllocator
|
||||
result = Facet(
|
||||
id: actor.facetIdAllocator.toPreserves,
|
||||
actor: actor,
|
||||
parent: parent,
|
||||
outbound: initialAssertions,
|
||||
isAlive: true)
|
||||
if not parent.isNil: parent.children.incl result
|
||||
|
||||
proc newFacet(actor; parent: Facet): Facet =
|
||||
var initialAssertions: OutboundTable
|
||||
newFacet(actor, parent, initialAssertions)
|
||||
|
||||
proc isInert(facet): bool =
|
||||
let
|
||||
noKids = facet.children.len == 0
|
||||
noOutboundHandles = facet.outbound.len == 0
|
||||
isRootFacet = facet.parent.isNil
|
||||
noInertCheckPreventers = facet.inertCheckPreventers == 0
|
||||
result = noKids and (noOutboundHandles or isRootFacet) and noInertCheckPreventers
|
||||
|
||||
proc preventInertCheck*(turn: Turn) =
|
||||
inc turn.facet.inertCheckPreventers
|
||||
|
||||
proc terminateActor(turn; reason: ref Exception)
|
||||
|
||||
proc terminateFacetOrderly(turn: Turn) =
|
||||
let facet = turn.facet
|
||||
if facet.isAlive:
|
||||
facet.isAlive = false
|
||||
var i = 0
|
||||
while i < facet.shutdownActions.len:
|
||||
facet.shutdownActions[i](turn)
|
||||
inc i
|
||||
setLen facet.shutdownActions, 0
|
||||
for e in facet.outbound.values:
|
||||
retract(turn, e)
|
||||
clear facet.outbound
|
||||
|
||||
proc inertCheck(turn: Turn) =
|
||||
if (not turn.facet.parent.isNil and
|
||||
(not turn.facet.parent.isAlive)) or
|
||||
turn.facet.isInert:
|
||||
when tracing:
|
||||
var act = ActionDescription(orKind: ActionDescriptionKind.facetStop)
|
||||
act.facetstop.path = turn.facet.path
|
||||
act.facetstop.reason = FacetStopReason.inert
|
||||
turn.desc.actions.add act
|
||||
stop(turn)
|
||||
|
||||
proc terminateFacet(turn: Turn) =
|
||||
let facet = turn.facet
|
||||
for child in facet.children:
|
||||
queueWork(turn, child, terminateFacetOrderly)
|
||||
# terminate all children
|
||||
facet.children.clear()
|
||||
# detach all children
|
||||
queueWork(turn, facet, terminateFacetOrderly)
|
||||
# self-termination
|
||||
|
||||
proc stopIfInertAfter(action: TurnAction): TurnAction =
|
||||
proc work(turn: Turn) =
|
||||
queueEffect(turn, turn.facet, inertCheck)
|
||||
action(turn)
|
||||
work
|
||||
|
||||
proc newFacet(turn: Turn): Facet = newFacet(turn.facet.actor, turn.facet)
|
||||
|
||||
proc inFacet*(turn: Turn; bootProc: TurnAction): Facet {.discardable.} =
|
||||
result = newFacet(turn)
|
||||
recallFacet turn:
|
||||
turn.facet = result
|
||||
when tracing:
|
||||
var act = ActionDescription(orKind: ActionDescriptionKind.facetstart)
|
||||
act.facetstart.path.add result.path
|
||||
turn.desc.actions.add act
|
||||
stopIfInertAfter(bootProc)(turn)
|
||||
|
||||
proc newActor(name: string; parent: Facet): Actor =
|
||||
result = Actor(
|
||||
name: name,
|
||||
id: name.toPreserves,
|
||||
)
|
||||
if parent.isNil:
|
||||
new result.handleAllocator
|
||||
else:
|
||||
result.handleAllocator = parent.actor.handleAllocator
|
||||
result.root = newFacet(result, parent)
|
||||
when tracing:
|
||||
var act = ActorActivation(orKind: ActorActivationKind.start)
|
||||
act.start.actorName = Name(orKind: NameKind.named)
|
||||
act.start.actorName.named.name = name.toPreserves
|
||||
traceActivation(result, act)
|
||||
|
||||
proc run(actor: Actor; bootProc: TurnAction; initialAssertions: OutboundTable) =
|
||||
queueTurn(newFacet(actor, actor.root, initialAssertions), stopIfInertAfter(bootProc))
|
||||
|
||||
proc bootActor*(name: string; bootProc: TurnAction): Actor {.discardable.} =
|
||||
## Boot a top-level actor.
|
||||
result = newActor(name, nil)
|
||||
new result.handleAllocator
|
||||
var turn = TurnRef(facet: result.root)
|
||||
assert not result.root.isNil
|
||||
turn.work.addLast((result.root, bootProc,))
|
||||
when tracing:
|
||||
turn.desc.id = nextTurnId()
|
||||
turn.desc.cause = TurnCause(orKind: TurnCauseKind.external)
|
||||
turn.desc.cause.external.description = "bootActor".toPreserves
|
||||
turnQueue.addLast turn
|
||||
|
||||
proc spawnActor*(turn: Turn; name: string; bootProc: TurnAction; initialAssertions = initHashSet[Handle]()): Actor {.discardable.} =
|
||||
let actor = newActor(name, turn.facet)
|
||||
queueEffect(turn, actor.root) do (turn: Turn):
|
||||
var newOutBound: Table[Handle, OutboundAssertion]
|
||||
for key in initialAssertions:
|
||||
discard turn.facet.outbound.pop(key, newOutbound[key])
|
||||
when tracing:
|
||||
var act = ActionDescription(orKind: ActionDescriptionKind.spawn)
|
||||
act.spawn.id = actor.id.toPreserves
|
||||
turn.desc.actions.add act
|
||||
run(actor, bootProc, newOutBound)
|
||||
actor
|
||||
|
||||
proc spawn*(name: string; turn: Turn; bootProc: TurnAction; initialAssertions = initHashSet[Handle]()): Actor {.discardable.} =
|
||||
spawnActor(turn, name, bootProc, initialAssertions)
|
||||
|
||||
type StopOnRetract = ref object of Entity
|
||||
|
||||
method retract*(e: StopOnRetract; turn: Turn; h: Handle) =
|
||||
stop(turn)
|
||||
|
||||
proc halfLink(facet, other: Facet) =
|
||||
let h = facet.nextHandle()
|
||||
facet.outbound[h] = OutboundAssertion(
|
||||
handle: h,
|
||||
peer: Cap(relay: other, target: StopOnRetract(facet: facet)),
|
||||
established: true,
|
||||
)
|
||||
|
||||
proc linkActor*(turn: Turn; name: string; bootProc: TurnAction; initialAssertions = initHashSet[Handle]()): Actor {.discardable.} =
|
||||
result = spawnActor(turn, name, bootProc, initialAssertions)
|
||||
halfLink(turn.facet, result.root)
|
||||
halfLink(result.root, turn.facet)
|
||||
|
||||
var inertActor {.threadvar.}: Actor
|
||||
|
||||
proc newInertCap*(): Cap =
|
||||
if inertActor.isNil:
|
||||
inertActor = bootActor("inert") do (turn: Turn): turn.stop()
|
||||
Cap(relay: inertActor.root)
|
||||
|
||||
proc atExit*(actor; action) = actor.exitHooks.add action
|
||||
|
||||
proc terminateActor(turn; reason: ref Exception) =
|
||||
let actor = turn.actor
|
||||
if not actor.exiting:
|
||||
actor.exiting = true
|
||||
actor.exitReason = reason
|
||||
when tracing:
|
||||
var act = ActorActivation(orKind: ActorActivationKind.stop)
|
||||
if not reason.isNil:
|
||||
act.stop.status = ExitStatus(orKind: ExitStatusKind.Error)
|
||||
act.stop.status.error.message = reason.msg
|
||||
traceActivation(actor, act)
|
||||
while actor.exitHooks.len > 0:
|
||||
var hook = actor.exitHooks.pop()
|
||||
try: hook(turn)
|
||||
except CatchableError as err:
|
||||
if reason.isNil:
|
||||
terminateActor(turn, err)
|
||||
return
|
||||
proc finish(turn: Turn) =
|
||||
assert not actor.root.isNil, actor.name
|
||||
terminateFacet(turn)
|
||||
actor.root = nil
|
||||
actor.exited = true
|
||||
queueTurn(actor.root, finish)
|
||||
|
||||
proc terminateFacet*(facet; e: ref Exception) =
|
||||
run(facet.actor.root) do (turn: Turn):
|
||||
terminateActor(turn, e)
|
||||
|
||||
proc terminate*(turn: Turn; e: ref Exception) =
|
||||
terminateActor(turn, e)
|
||||
|
||||
proc stop*(turn: Turn, facet: Facet) =
|
||||
queueEffect(turn, facet) do (turn: Turn):
|
||||
when tracing:
|
||||
var act = ActionDescription(orKind: ActionDescriptionKind.facetStop)
|
||||
act.facetstop.path = facet.path
|
||||
act.facetstop.reason = FacetStopReason.explicitAction
|
||||
turn.desc.actions.add act
|
||||
terminateFacet(turn)
|
||||
|
||||
proc stop*(turn: Turn) =
|
||||
stop(turn, turn.facet)
|
||||
|
||||
proc stop*(facet: Facet) =
|
||||
run(facet, stop)
|
||||
|
||||
proc onStop*(facet: Facet; act: TurnAction) =
|
||||
## Add a `proc (turn: Turn)` action to `facet` to be called as it stops.
|
||||
add(facet.shutdownActions, act)
|
||||
|
||||
proc onStop*(turn: Turn; act: TurnAction) =
|
||||
onStop(turn.facet, act)
|
||||
|
||||
proc isAlive(actor): bool =
|
||||
not(actor.exited or actor.exiting)
|
||||
|
||||
proc stop*(actor: Actor) =
|
||||
if actor.isAlive:
|
||||
queueTurn(actor.root) do (turn: Turn):
|
||||
assert(not turn.facet.isNil)
|
||||
when tracing:
|
||||
var act = ActionDescription(orKind: ActionDescriptionKind.facetStop)
|
||||
act.facetstop.path = turn.facet.path
|
||||
act.facetstop.reason = FacetStopReason.actorStopping
|
||||
turn.desc.actions.add act
|
||||
stop(turn, turn.facet)
|
||||
|
||||
proc stopActor*(facet: Facet) =
|
||||
stop(facet.actor)
|
||||
|
||||
proc stopActor*(turn: Turn) =
|
||||
stop(turn, turn.facet.actor.root)
|
||||
|
||||
proc freshen*(turn: Turn, act: TurnAction) {.deprecated.} =
|
||||
run(turn.facet, act)
|
||||
|
||||
proc newCap*(relay: Facet; entity: Entity): Cap =
|
||||
## Create a new capability for `entity` via `relay`.
|
||||
# An Entity has an owning facet and a Cap does as well?
|
||||
if entity.facet.isNil: entity.facet = relay
|
||||
Cap(relay: relay, target: entity)
|
||||
|
||||
proc newCap*(turn; e: Entity): Cap =
|
||||
newCap(turn.facet, e)
|
||||
proc newCap*(e: Entity; turn): Cap =
|
||||
newCap(turn.facet, e)
|
||||
|
||||
type SyncContinuation {.final.} = ref object of Entity
|
||||
action: TurnAction
|
||||
|
||||
method message(entity: SyncContinuation; turn: Turn; v: AssertionRef) =
|
||||
entity.action(turn)
|
||||
|
||||
proc sync*(turn: Turn; refer: Cap; act: TurnAction) =
|
||||
sync(turn, refer, newCap(turn, SyncContinuation(action: act)))
|
||||
|
||||
proc running*(actor): bool =
|
||||
result = not actor.exited
|
||||
if not (result or actor.exitReason.isNil):
|
||||
raise actor.exitReason
|
||||
|
||||
proc facet*(actor): Facet = actor.root
|
||||
|
||||
proc run(turn: Turn) =
|
||||
while turn.work.len > 0:
|
||||
var (facet, act) = turn.work.popFirst()
|
||||
assert not act.isNil
|
||||
turn.facet = facet
|
||||
act(turn)
|
||||
when tracing:
|
||||
var act = ActorActivation(orKind: ActorActivationKind.turn)
|
||||
act.turn = move turn.desc
|
||||
traceActivation(turn.facet.actor, act)
|
||||
# TODO: catch exceptions here
|
||||
for eff in turn.effects.mvalues:
|
||||
assert not eff.facet.isNil
|
||||
turnQueue.addLast(move eff)
|
||||
turn.facet = nil # invalidate the turn
|
||||
|
||||
proc runPendingTurns* =
|
||||
while turnQueue.len > 0:
|
||||
var turn = turnQueue.popFirst()
|
||||
# TODO: check if actor is still valid
|
||||
try: run(turn)
|
||||
except CatchableError as err:
|
||||
terminateActor(turn, err)
|
||||
raise err
|
||||
|
||||
proc runOnce*(timeout = none(Duration)): bool {.discardable.} =
|
||||
## Run pending turns if there are any, otherwise
|
||||
## poll for external events and run any resulting turns.
|
||||
## Return true if any turns have been processed.
|
||||
if turnQueue.len == 0:
|
||||
when defined(solo5):
|
||||
discard solo5_dispatcher.runOnce(timeout)
|
||||
else:
|
||||
var ready: seq[Continuation]
|
||||
ioqueue.poll(ready, timeout)
|
||||
while ready.len > 0:
|
||||
discard trampoline:
|
||||
ready.pop()
|
||||
result = turnQueue.len > 0
|
||||
runPendingTurns()
|
||||
|
||||
proc run* =
|
||||
## Run actors to completion.
|
||||
when defined(solo5):
|
||||
while turnQueue.len > 0 or solo5_dispatcher.runOnce():
|
||||
runPendingTurns()
|
||||
else:
|
||||
var ready: seq[Continuation]
|
||||
while true:
|
||||
runPendingTurns()
|
||||
ioqueue.poll(ready)
|
||||
if ready.len == 0: break
|
||||
while ready.len > 0:
|
||||
discard trampoline:
|
||||
ready.pop()
|
||||
|
||||
proc runActor*(name: string; bootProc: TurnAction) =
|
||||
## Boot an actor `Actor` and churn ioqueue.
|
||||
let actor = bootActor(name, bootProc)
|
||||
if not actor.exitReason.isNil:
|
||||
raise actor.exitReason
|
||||
when defined(solo5):
|
||||
runPendingTurns()
|
||||
while (actor.isAlive and solo5_dispatcher.runOnce()) or turnQueue.len > 0:
|
||||
runPendingTurns()
|
||||
else:
|
||||
actors.run()
|
||||
if not actor.exitReason.isNil:
|
||||
raise actor.exitReason
|
||||
|
||||
type FacetGuard* = object
|
||||
facet: Facet
|
||||
|
||||
proc initGuard*(f: Facet): FacetGuard =
|
||||
result.facet = f
|
||||
inc result.facet.inertCheckPreventers
|
||||
|
||||
proc disarm*(g: var FacetGuard) =
|
||||
if not g.facet.isNil:
|
||||
assert g.facet.inertCheckPreventers > 0
|
||||
dec g.facet.inertCheckPreventers
|
||||
g.facet = nil
|
||||
|
||||
proc `=destroy`*(g: FacetGuard) =
|
||||
if not g.facet.isNil:
|
||||
dec g.facet.inertCheckPreventers
|
||||
|
||||
proc `=copy`*(dst: var FacetGuard, src: FacetGuard) =
|
||||
dst.facet = src.facet
|
||||
inc dst.facet.inertCheckPreventers
|
|
@ -1,29 +0,0 @@
|
|||
# SPDX-FileCopyrightText: ☭ 2021 Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/options
|
||||
import preserves
|
||||
|
||||
type
|
||||
Discard* {.record: "discard", pure.} = object
|
||||
discard
|
||||
|
||||
Capture* {.record: "capture", pure.} = object
|
||||
_: Discard
|
||||
|
||||
Observe* {.record: "observe", pure.} = object
|
||||
pattern: Preserve
|
||||
|
||||
proc observe*[T](x: T): Preserve =
|
||||
Observe(pattern: x.toPreserve).toPreserve
|
||||
|
||||
proc captureCount*(pattern: Preserve): int =
|
||||
if pattern.preserveTo(Capture).isSome:
|
||||
result = 1
|
||||
else:
|
||||
for e in pattern.items:
|
||||
result.inc captureCount(e)
|
||||
|
||||
when isMainModule:
|
||||
let a = observe(`?*`)
|
||||
assert($toPreserve(a) == "<capture <discard>>")
|
|
@ -4,7 +4,7 @@
|
|||
## An unordered association of items to counts.
|
||||
## An item count may be negative, unlike CountTable.
|
||||
|
||||
import tables
|
||||
import std/[assertions, tables]
|
||||
|
||||
type
|
||||
ChangeDescription* = enum
|
||||
|
@ -36,3 +36,15 @@ proc change*[T](bag: var Bag[T]; key: T; delta: int; clamp = false): ChangeDescr
|
|||
result = change(bag.mGetOrPut(key, 0), delta, clamp)
|
||||
if result in {cdAbsentToAbsent, cdPresentToAbsent}:
|
||||
bag.del(key)
|
||||
|
||||
iterator items*[T](bag: Bag[T]): T =
|
||||
for x in bag.keys: yield x
|
||||
|
||||
proc `$`*(bag: Bag): string =
|
||||
result.add '{'
|
||||
for x in bag.keys:
|
||||
if result.len > 1: result.add ' '
|
||||
result.add $x
|
||||
result.add '}'
|
||||
|
||||
export tables.contains, tables.del, tables.len
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
runnableExamples:
|
||||
from std/unittest import check
|
||||
let sturdy = mint()
|
||||
check $sturdy == """<ref {oid: "syndicate" sig: #x"69ca300c1dbfa08fba692102dd82311a"}>"""
|
||||
|
||||
import
|
||||
std/[options, tables],
|
||||
nimcrypto/[blake2, hmac],
|
||||
preserves,
|
||||
./protocols/sturdy
|
||||
|
||||
export `$`
|
||||
|
||||
proc hmac(key, data: openarray[byte]): seq[byte] =
|
||||
result = newSeq[byte](32)
|
||||
var ctx: HMAC[blake2_256]
|
||||
ctx.init key
|
||||
ctx.update data
|
||||
discard ctx.finish result
|
||||
result.setLen 16
|
||||
|
||||
proc mint*(key: openarray[byte]; oid: Value): SturdyRef =
|
||||
result.parameters.oid = oid
|
||||
result.parameters.sig = hmac(key, oid.encode)
|
||||
|
||||
proc mint*(): SturdyRef =
|
||||
var key: array[16, byte]
|
||||
mint(key, "syndicate".toPreserves)
|
||||
|
||||
proc attenuate*(r: SturdyRef; caveats: seq[Caveat]): SturdyRef =
|
||||
if r.parameters.caveats.isSome:
|
||||
result.parameters.caveats = some(r.parameters.caveats.get & caveats.toPreserves)
|
||||
result.parameters.oid = r.parameters.oid
|
||||
result.parameters.sig = hmac(r.parameters.sig, caveats.toPreserves.encode)
|
||||
|
||||
proc validate*(key: openarray[byte]; sturdy: SturdyRef): bool =
|
||||
var sig = hmac(key, sturdy.parameters.oid.encode)
|
||||
if sturdy.parameters.caveats.isSome:
|
||||
for cav in sturdy.parameters.caveats.get:
|
||||
sig = hmac(sig, encode cav)
|
||||
result = (sig == sturdy.parameters.sig)
|
||||
|
||||
# mint utility moved to syndicate_utils/src/mintsturdyref.nim
|
|
@ -51,7 +51,7 @@ iterator observersOf[Sid, Oid](g: Graph[Sid, Oid]; oid: Oid): Sid =
|
|||
if g.edgesForward.hasKey(oid):
|
||||
for sid in g.edgesForward[oid]: yield sid
|
||||
|
||||
proc repairDamage*[Sid, Oid](g: var Graph[Sid, Oid]; repairNode: proc (sid: Sid) {.gcsafe.}) =
|
||||
proc repairDamage*[Sid, Oid](g: var Graph[Sid, Oid]; repairNode: proc (sid: Sid) {.closure.}) =
|
||||
var repairedThisRound: Set[Oid]
|
||||
while true:
|
||||
var workSet = move g.damagedNodes
|
||||
|
|
|
@ -1,577 +1,50 @@
|
|||
# SPDX-FileCopyrightText: ☭ 2021 Emery Hemingway
|
||||
# SPDX-FileCopyrightText: ☭ 2022 Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import ./bags, ./dataflow, ./events, ./skeletons
|
||||
import std/[hashes, options, tables]
|
||||
import preserves
|
||||
import std/[asyncdispatch, deques, hashes, macros, options, sets, tables]
|
||||
import ./actors, ./protocols/dataspace, ./skeletons
|
||||
|
||||
export dataflow.defineObservableProperty
|
||||
export dataflow.recordObservation
|
||||
export dataflow.recordDamage
|
||||
|
||||
template generateIdType(T: untyped) =
|
||||
type T* = distinct Natural
|
||||
proc `==`*(x, y: T): bool {.borrow.}
|
||||
proc `$`*(id: T): string {.borrow.}
|
||||
|
||||
generateIdType(ActorId)
|
||||
generateIdType(FacetId)
|
||||
generateIdType(EndpointId)
|
||||
generateIdType(FieldId)
|
||||
from ./protocols/protocol import Handle
|
||||
|
||||
type
|
||||
Value* = Preserve
|
||||
Bag = bags.Bag[Value]
|
||||
Assertion = Value
|
||||
Observe = dataspace.Observe
|
||||
Turn = actors.Turn
|
||||
|
||||
Task[T] = proc (): T
|
||||
Script[T] = proc (facet: Facet): T
|
||||
ActivationScript* = Script[void]
|
||||
|
||||
ActionKind = enum
|
||||
patchAction, messageAction, spawnAction, quitAction, deferredTurnAction, activationAction
|
||||
|
||||
Action = object
|
||||
impl: proc (action: Action; ds: Dataspace; actor: Option[Actor]) {.gcsafe.}
|
||||
case kind: ActionKind
|
||||
of patchAction:
|
||||
changes: Bag
|
||||
else:
|
||||
discard
|
||||
|
||||
Priority = enum
|
||||
pQueryHigh = 0,
|
||||
pQuery,
|
||||
pQueryHandler,
|
||||
pNormal,
|
||||
pGC,
|
||||
pIdle,
|
||||
len
|
||||
|
||||
Actor = ref object
|
||||
id: ActorId
|
||||
name: string
|
||||
dataspace*: Dataspace
|
||||
rootFacet: ParentFacet
|
||||
pendingTasks: array[Priority.len, Deque[Task[void]]]
|
||||
pendingActions: seq[Action]
|
||||
adhocAssertions: Bag
|
||||
cleanupChanges: Bag
|
||||
parentId: ActorId
|
||||
|
||||
EndpointSpec* = tuple
|
||||
callback: HandlerCallback
|
||||
assertion: Value
|
||||
analysis: Option[Analysis]
|
||||
|
||||
Endpoint = ref object
|
||||
id: EndpointId
|
||||
facet: Facet
|
||||
updateProc: Script[EndpointSpec]
|
||||
spec: EndpointSpec
|
||||
|
||||
Field* = object of RootObj
|
||||
id*: FieldId
|
||||
Fields* = seq[Value]
|
||||
# TODO: compile-time tuples
|
||||
|
||||
Turn = object
|
||||
actions: seq[Action]
|
||||
actor: Option[Actor]
|
||||
|
||||
Dataspace* = ref object
|
||||
ground*: Ground
|
||||
Dataspace {.final.} = ref object of Entity
|
||||
index: Index
|
||||
dataflow*: Graph[Endpoint, FieldId]
|
||||
runnable: seq[Actor]
|
||||
pendingTurns: seq[Turn]
|
||||
actors: Table[ActorId, Actor]
|
||||
activations: seq[ActivationScript]
|
||||
nextId: Natural
|
||||
|
||||
StopHandler = proc (ds: Dataspace) {.gcsafe.}
|
||||
|
||||
Ground = ref object
|
||||
dataspace: Dataspace
|
||||
stopHandlers: seq[StopHandler]
|
||||
future: Future[void]
|
||||
externalTaskCount: int
|
||||
stepScheduled: bool
|
||||
|
||||
ParentFacet = Option[Facet]
|
||||
|
||||
Facet* = ref FacetObj
|
||||
FacetObj = object
|
||||
id: FacetId
|
||||
actor*: Actor
|
||||
parent: ParentFacet
|
||||
endpoints: Table[EndpointId, Endpoint]
|
||||
stopScripts: seq[Script[void]]
|
||||
children: Table[FacetId, Facet]
|
||||
fields*: Fields
|
||||
isLive, inScript: bool
|
||||
|
||||
# FacetImpl[Fields] = ref FacetImplObj[Fields]
|
||||
# FacetImplObj[Fields] {.final.} = object of FacetBaseObj
|
||||
|
||||
using
|
||||
dataspace: Dataspace
|
||||
actor: Actor
|
||||
facet: Facet
|
||||
|
||||
proc hash*(ep: Endpoint): Hash =
|
||||
!$(hash(ep.id) !& hash(ep.facet.id))
|
||||
|
||||
proc generateId*(ds: Dataspace): Natural =
|
||||
# TODO: used by declareField, but should be hidden.
|
||||
inc(ds.nextId)
|
||||
ds.nextId
|
||||
|
||||
proc newActor(ds: Dataspace; name: string; initialAssertions: Value; parentId: ActorId): Actor =
|
||||
assert(initialAssertions.kind == pkSet)
|
||||
result = Actor(
|
||||
id: ds.generateId.ActorId,
|
||||
name: name,
|
||||
dataspace: ds,
|
||||
parentId: parentId)
|
||||
for v in initialAssertions.set:
|
||||
discard result.adhocAssertions.change(v, 1)
|
||||
ds.actors[result.id] = result
|
||||
|
||||
proc applyPatch(ds: Dataspace; actor: Option[Actor]; changes: Bag) =
|
||||
type Pair = tuple[val: Value; count: int]
|
||||
var removals: seq[Pair]
|
||||
for a, count in changes.pairs:
|
||||
if count > 0:
|
||||
# debugEcho "applyPatch +", a
|
||||
discard ds.index.adjustAssertion(a, count)
|
||||
else:
|
||||
removals.add((a, count))
|
||||
actor.map do (ac: Actor):
|
||||
discard ac.cleanupChanges.change(a, -count)
|
||||
for (a, count) in removals:
|
||||
# debugEcho "applyPatch -", a
|
||||
discard ds.index.adjustAssertion(a, count)
|
||||
|
||||
proc initPatch(): Action =
|
||||
proc impl(patch: Action; ds: Dataspace; actor: Option[Actor]) {.gcsafe.} =
|
||||
ds.applyPatch(actor, patch.changes)
|
||||
Action(impl: impl, kind: patchAction)
|
||||
|
||||
proc pendingPatch(actor): var Action =
|
||||
for a in actor.pendingActions.mitems:
|
||||
if a.kind == patchAction: return a
|
||||
actor.pendingActions.add(initPatch())
|
||||
actor.pendingActions[actor.pendingActions.high]
|
||||
|
||||
proc adjust(patch: var Action; v: Value; delta: int) =
|
||||
discard patch.changes.change(v, delta)
|
||||
|
||||
proc assert(actor; a: Value) = actor.pendingPatch.adjust(a, +1)
|
||||
|
||||
proc retract(actor; a: Value) = actor.pendingPatch.adjust(a, -1)
|
||||
|
||||
proc install(ep: Endpoint; spec: EndpointSpec) =
|
||||
ep.spec = spec
|
||||
if not ep.spec.assertion.isFalse:
|
||||
ep.facet.actor.assert(ep.spec.assertion)
|
||||
ep.spec.analysis.map do (a: Analysis):
|
||||
assert(not ep.spec.callback.isNil)
|
||||
ep.facet.actor.dataspace.index.addHandler(a, ep.spec.callback)
|
||||
|
||||
proc isRunnable(actor): bool =
|
||||
for tasks in actor.pendingTasks:
|
||||
if tasks.len > 0: return true
|
||||
|
||||
proc scheduleTask(actor; prio: Priority; task: Task[void]) =
|
||||
if not actor.isRunnable:
|
||||
actor.dataspace.runnable.add(actor)
|
||||
actor.pendingTasks[prio].addLast(task)
|
||||
|
||||
proc scheduleTask(actor; task: Task[void]) =
|
||||
scheduleTask(actor, pNormal, task)
|
||||
|
||||
proc abandonQueuedWork(actor) =
|
||||
reset actor.pendingActions
|
||||
for q in actor.pendingTasks.mitems: clear(q)
|
||||
|
||||
proc uninstall(ep: Endpoint; emitPatches: bool) =
|
||||
if emitPatches:
|
||||
if not ep.spec.assertion.isFalse:
|
||||
ep.facet.actor.retract(ep.spec.assertion)
|
||||
ep.spec.analysis.map do (a: Analysis):
|
||||
assert(not ep.spec.callback.isNil)
|
||||
ep.facet.actor.dataspace.index.removeHandler(a, ep.spec.callback)
|
||||
|
||||
proc destroy(ep: Endpoint; emitPatches: bool) =
|
||||
ep.facet.actor.dataspace.dataflow.forgetSubject(ep)
|
||||
ep.uninstall(emitPatches)
|
||||
ep.facet.actor.scheduleTask(pGC) do ():
|
||||
ep.facet.endpoints.del(ep.id)
|
||||
# TODO: cannot remove from ep.facet.endpoints during
|
||||
# its iteration, defering remove is probably unecessary
|
||||
# because the facet is going down.
|
||||
|
||||
proc retractAssertionsAndSubscriptions(facet; emitPatches: bool) =
|
||||
facet.actor.scheduleTask do ():
|
||||
for ep in facet.endpoints.values:
|
||||
ep.destroy(emitPatches)
|
||||
clear(facet.endpoints)
|
||||
|
||||
proc abort(facet; emitPatches: bool) =
|
||||
facet.isLive = false
|
||||
for child in facet.children.values:
|
||||
child.abort(emitPatches)
|
||||
facet.retractAssertionsAndSubscriptions(emitPatches)
|
||||
for s in facet.stopScripts: s(facet)
|
||||
# call stopScripts immediately
|
||||
|
||||
proc enqueueScriptAction(actor; action: Action) =
|
||||
actor.pendingActions.add(action)
|
||||
|
||||
proc enqueueScriptAction(facet; action: Action) =
|
||||
enqueueScriptAction(facet.actor, action)
|
||||
|
||||
proc initQuitAction(): Action =
|
||||
proc impl(action: Action; ds: Dataspace; actor: Option[Actor]) =
|
||||
assert(actor.isSome)
|
||||
ds.applyPatch(actor, actor.get.cleanupChanges)
|
||||
ds.actors.del(actor.get.id)
|
||||
Action(impl: impl, kind: quitAction)
|
||||
|
||||
proc terminate(actor; emitPatches: bool) =
|
||||
if emitPatches:
|
||||
actor.scheduleTask do ():
|
||||
for a in actor.adhocAssertions.keys:
|
||||
actor.retract(a)
|
||||
actor.rootFacet.map do (root: Facet):
|
||||
root.abort(emitPatches)
|
||||
actor.scheduleTask do ():
|
||||
actor.enqueueScriptAction(initQuitAction())
|
||||
|
||||
proc invokeScript(facet; script: Script[void]) =
|
||||
try: script(facet)
|
||||
except:
|
||||
let e = getCurrentException()
|
||||
# TODO: install an error handling callback at the facet?
|
||||
facet.actor.abandonQueuedWork()
|
||||
facet.actor.terminate(false)
|
||||
raise e
|
||||
|
||||
func isInert(facet): bool =
|
||||
facet.endpoints.len == 0 and facet.children.len == 0
|
||||
|
||||
proc terminate(facet) =
|
||||
if facet.isLive:
|
||||
let
|
||||
actor = facet.actor
|
||||
parent = facet.parent
|
||||
if parent.isNone:
|
||||
reset actor.rootFacet
|
||||
facet.isLive = false
|
||||
for child in facet.children.values:
|
||||
child.terminate()
|
||||
reset facet.children
|
||||
actor.scheduleTask do ():
|
||||
facet.invokeScript do (facet: Facet):
|
||||
for s in facet.stopScripts:
|
||||
s(facet)
|
||||
|
||||
facet.retractAssertionsAndSubscriptions(true)
|
||||
actor.scheduleTask(pGC) do ():
|
||||
if parent.isSome:
|
||||
if parent.get.isInert:
|
||||
parent.get.terminate()
|
||||
else:
|
||||
actor.terminate(true)
|
||||
|
||||
template withNonScriptContext(facet; body: untyped) =
|
||||
let inScriptPrev = facet.inScript
|
||||
facet.inScript = false
|
||||
try: body
|
||||
finally: facet.inScript = inScriptPrev
|
||||
|
||||
proc ensureFacetSetup(facet; s: string) =
|
||||
assert(not facet.inScript, "Cannot " & s & " ouside facet setup")
|
||||
|
||||
proc ensureNonFacetSetup(facet; s: string) =
|
||||
assert(facet.inScript, "Cannot " & s & " during facet setup")
|
||||
|
||||
proc wrap(facet; script: Script[void]): Task[void] =
|
||||
proc task() = facet.invokeScript(script)
|
||||
task
|
||||
|
||||
proc scheduleScript*(facet; prio: Priority; script: Script[void]) =
|
||||
facet.actor.scheduleTask(prio, facet.wrap(script))
|
||||
|
||||
proc scheduleScript*(facet; script: Script[void]) =
|
||||
facet.actor.scheduleTask(pNormal, facet.wrap(script))
|
||||
|
||||
proc addStartScript*(facet; s: Script[void]) =
|
||||
facet.ensureFacetSetup("onStart")
|
||||
facet.scheduleScript(pNormal, s)
|
||||
|
||||
proc addStopScript*(facet; s: Script[void]) =
|
||||
facet.stopScripts.add(s)
|
||||
|
||||
proc addFacet(actor; parentFacet: Option[Facet]; bootScript: Script[void]; checkInScript = false) =
|
||||
if checkInScript and parentFacet.isSome:
|
||||
assert parentFacet.get.inScript
|
||||
let f = Facet(
|
||||
id: actor.dataspace.generateId.FacetId,
|
||||
actor: actor,
|
||||
parent: parentFacet,
|
||||
isLive: true,
|
||||
inScript: true)
|
||||
if parentFacet.isSome:
|
||||
parentFacet.get.children[f.id] = f
|
||||
f.fields = parentFacet.get.fields
|
||||
# inherit scope by copying fields of the parent
|
||||
else:
|
||||
actor.rootFacet = some f
|
||||
f.invokeScript do (facet: Facet):
|
||||
facet.withNonScriptContext:
|
||||
bootScript(facet)
|
||||
actor.scheduleTask do ():
|
||||
if ((parentFacet.isSome) and (not parentFacet.get.isLive)) or f.isInert:
|
||||
f.terminate()
|
||||
|
||||
proc addChildFacet*(facet; bootProc: Script[void]) =
|
||||
facet.actor.addFacet(some facet, bootProc, true)
|
||||
|
||||
proc deliverMessage(ds: Dataspace; msg: Value; ac: Option[Actor]) =
|
||||
ds.index.deliverMessage(msg)
|
||||
|
||||
proc adhocRetract(actor; a: Value) =
|
||||
if actor.adhocAssertions.change(a, -1, true) == cdPresentToAbsent:
|
||||
actor.retract(a)
|
||||
|
||||
proc refresh(ep: Endpoint) =
|
||||
let newSpec = ep.updateProc(ep.facet)
|
||||
if newSpec.assertion != ep.spec.assertion:
|
||||
ep.uninstall(true)
|
||||
ep.install(newSpec)
|
||||
|
||||
proc refreshAssertions(ds: Dataspace) =
|
||||
ds.dataflow.repairDamage do (ep: Endpoint):
|
||||
let facet = ep.facet
|
||||
assert(facet.isLive)
|
||||
facet.invokeScript do (f: Facet):
|
||||
f.withNonScriptContext:
|
||||
refresh(ep)
|
||||
|
||||
proc addActor(ds: Dataspace; name: string; bootProc: Script[void]; initialAssertions: Value; parent: Option[Actor]) =
|
||||
var parentId: ActorId
|
||||
parent.map do (p: Actor): parentId = p.id
|
||||
let ac = newActor(ds, name, initialAssertions, parentId)
|
||||
ds.applyPatch(some ac, ac.adhocAssertions)
|
||||
ac.addFacet(none Facet) do (systemFacet: Facet):
|
||||
# Root facet is a dummy "system" facet that exists to hold
|
||||
# one-or-more "user" "root" facets.
|
||||
ac.addFacet(some systemFacet, bootProc)
|
||||
# ^ The "true root", user-visible facet.
|
||||
for a in initialAssertions.set:
|
||||
ac.adhocRetract(a)
|
||||
|
||||
proc send*(facet; body: Value) =
|
||||
## Send a message into the dataspace.
|
||||
facet.ensureNonFacetSetup("send")
|
||||
proc impl(_: Action; ds: Dataspace; actor: Option[Actor]) =
|
||||
ds.deliverMessage(body, actor)
|
||||
facet.enqueueScriptAction(Action(impl: impl, kind: messageAction))
|
||||
|
||||
proc initSpawnAction(name: string; bootProc: Script[void], initialAssertions: Value): Action =
|
||||
proc impl(action: Action; ds: Dataspace; actor: Option[Actor]) =
|
||||
ds.addActor(name, bootProc, initialAssertions, actor)
|
||||
Action(impl: impl, kind: spawnAction)
|
||||
|
||||
proc spawn*(facet; name: string; bootProc: Script[void], initialAssertions: Value) =
|
||||
facet.ensureNonFacetSetup("spawn")
|
||||
facet.enqueueScriptAction(initSpawnAction(name, bootProc, initialAssertions))
|
||||
|
||||
proc spawn*(facet; name: string; bootProc: Script[void]) =
|
||||
spawn(facet, name, bootProc, Value(kind: pkSet))
|
||||
|
||||
#[
|
||||
template spawn*(facet; name: string; fields: untyped; bootProc: Script[void]): untyped =
|
||||
type Fields = typeof(fields)
|
||||
spawn[Fields](facet, name, bootProc, Value(kind: pkSet))
|
||||
]#
|
||||
|
||||
proc initActivationAction(script: ActivationScript; name: string): Action =
|
||||
proc impl(action: Action; ds: Dataspace; actor: Option[Actor]) =
|
||||
for s in ds.activations:
|
||||
if s == script: return
|
||||
ds.activations.add(script)
|
||||
proc boot(root: Facet) =
|
||||
root.addStartScript(script)
|
||||
ds.addActor(name, boot, Value(kind: pkSet), actor)
|
||||
Action(impl: impl, kind: activationAction)
|
||||
|
||||
proc activate(facet; name: string; script: ActivationScript) =
|
||||
facet.ensureNonFacetSetup "`activate`"
|
||||
facet.enqueueScriptAction(initActivationAction(script, name))
|
||||
|
||||
proc newDataspace(ground: Ground; name: string; bootProc: ActivationScript): Dataspace =
|
||||
let turn = Turn(actions: @[initSpawnAction(name, bootProc, Value(kind: pkSet))])
|
||||
Dataspace(ground: ground, index: initIndex(), pendingTurns: @[turn])
|
||||
|
||||
proc addEndpoint*(facet; updateScript: Script[EndpointSpec], isDynamic = true) =
|
||||
facet.ensureFacetSetup("addEndpoint")
|
||||
let
|
||||
actor = facet.actor
|
||||
dataspace = actor.dataspace
|
||||
ep = Endpoint(
|
||||
id: dataspace.generateId.EndpointId,
|
||||
facet: facet,
|
||||
updateProc: updateScript)
|
||||
dataspace.dataflow.addSubject(ep)
|
||||
let
|
||||
dyn = if isDynamic: some ep else: none Endpoint
|
||||
initialSpec = dataspace.dataflow.withSubject(dyn) do () -> EndpointSpec:
|
||||
updateScript(facet)
|
||||
assert:
|
||||
(initialSpec.analysis.isNone and initialSpec.callback.isNil) or
|
||||
(initialSpec.analysis.isSome and (not initialSpec.callback.isNil))
|
||||
ep.install(initialSpec)
|
||||
facet.endpoints[ep.id] = ep
|
||||
|
||||
proc addDataflow*(facet; prio: Priority; subjectProc: Script[void]) =
|
||||
facet.addEndpoint do (fa: Facet) -> EndpointSpec:
|
||||
let subjectId = facet.actor.dataspace.dataflow.currentSubjectId
|
||||
facet.scheduleScript(prio) do (fa: Facet):
|
||||
if facet.isLive:
|
||||
facet.actor.dataspace.dataflow.withSubject(subjectId):
|
||||
subjectProc(facet)
|
||||
|
||||
proc addDataflow*(facet; subjectProc: Script[void]) =
|
||||
addDataflow(facet, pNormal, subjectProc)
|
||||
|
||||
proc commitActions(dataspace; actor; pending: seq[Action]) =
|
||||
dataspace.pendingTurns.add(Turn(actor: some actor, actions: pending))
|
||||
|
||||
proc runPendingTask(actor): bool =
|
||||
for deque in actor.pendingTasks.mitems:
|
||||
if deque.len > 0:
|
||||
let task = deque.popFirst()
|
||||
task()
|
||||
actor.dataspace.refreshAssertions()
|
||||
return true
|
||||
|
||||
proc runPendingTasks(actor) =
|
||||
while actor.runPendingTask(): discard
|
||||
if actor.pendingActions.len > 0:
|
||||
var pending = move actor.pendingActions
|
||||
actor.dataspace.commitActions(actor, pending)
|
||||
|
||||
proc runPendingTasks(ds: Dataspace) =
|
||||
var runnable = move ds.runnable
|
||||
for actor in runnable:
|
||||
runPendingTasks(actor)
|
||||
|
||||
proc performPendingActions(ds: Dataspace) =
|
||||
var turns = move ds.pendingTurns
|
||||
for turn in turns:
|
||||
for action in turn.actions:
|
||||
action.impl(action, ds, turn.actor)
|
||||
runPendingTasks(ds)
|
||||
|
||||
proc runTasks(ds: Dataspace): bool =
|
||||
ds.runPendingTasks()
|
||||
ds.performPendingActions()
|
||||
result = ds.runnable.len > 0 or ds.pendingTurns.len > 0
|
||||
|
||||
proc stop*(facet; continuation: Script[void] = nil) =
|
||||
facet.parent.map do (parent: Facet):
|
||||
parent.invokeScript do (_: Facet):
|
||||
facet.actor.scheduleTask do ():
|
||||
facet.terminate()
|
||||
if not continuation.isNil:
|
||||
parent.scheduleScript do (parent: Facet):
|
||||
continuation(parent)
|
||||
# ^ TODO: is this the correct scope to use??
|
||||
|
||||
proc addStopHandler*(g: Ground; h: StopHandler) =
|
||||
g.stopHandlers.add(h)
|
||||
|
||||
proc step(g: Ground) {.gcsafe.}
|
||||
|
||||
proc scheduleStep(g: Ground) =
|
||||
if not g.stepScheduled:
|
||||
g.stepScheduled = true
|
||||
asyncdispatch.callSoon: step(g)
|
||||
|
||||
proc beginExternalTask*(facet) =
|
||||
## Inform the ``Ground`` dataspace of a pending external task.
|
||||
## The dataspace will continue to operate until all internal
|
||||
## and external tasks have completed. See ``endExternalTask``.
|
||||
inc facet.actor.dataspace.ground.externalTaskCount
|
||||
|
||||
proc endExternalTask*(facet) =
|
||||
## Inform the ``Ground`` dataspace that an external task has completed.
|
||||
# TODO: automatically do this when the facet stops?
|
||||
let g = facet.actor.dataspace.ground
|
||||
dec g.externalTaskCount
|
||||
scheduleStep g
|
||||
|
||||
proc step(g: Ground) =
|
||||
# TODO: backgroundtasks
|
||||
g.stepScheduled = false
|
||||
if g.dataspace.runTasks():
|
||||
scheduleStep g
|
||||
else:
|
||||
if g.externalTaskCount < 1:
|
||||
for actor in g.dataspace.actors.values:
|
||||
terminate(actor, false)
|
||||
for sh in g.stopHandlers:
|
||||
sh(g.dataspace)
|
||||
reset g.stopHandlers
|
||||
complete(g.future)
|
||||
|
||||
proc bootModule*(name: string; bootProc: ActivationScript): Future[void] =
|
||||
# TODO: better integration with the async dispatcher
|
||||
let g = Ground(future: newFuture[void]"bootModule")
|
||||
g.dataspace = newDataspace(g, name) do (rootFacet: Facet):
|
||||
rootFacet.addStartScript do (rootFacet: Facet):
|
||||
rootFacet.activate(name, bootProc)
|
||||
addTimer(1, true) do (fd: AsyncFD) -> bool:
|
||||
step(g)
|
||||
true
|
||||
return g.future
|
||||
|
||||
template declareField*(facet: Facet; F: untyped; T: typedesc; initial: T): untyped =
|
||||
## Declare getter and setter procs for field `F` of type `T` initalized with `initial`.
|
||||
type DistinctField {.final, unpreservable.} = object of Field
|
||||
discard
|
||||
let `F` {.inject.} = DistinctField(id: facet.actor.dataspace.generateId.FieldId)
|
||||
facet.actor.dataspace.dataflow.defineObservableProperty(`F`.id)
|
||||
facet.fields.add(toPreserve(initial))
|
||||
let fieldOff = facet.fields.high
|
||||
proc set(f: DistinctField; x: T) {.used.} =
|
||||
facet.actor.dataspace.dataflow.recordDamage(f.id)
|
||||
facet.fields[fieldOff] = toPreserve(x)
|
||||
proc set(f: DistinctField; x: Value) {.used.} =
|
||||
facet.actor.dataspace.dataflow.recordDamage(f.id)
|
||||
facet.fields[fieldOff] = x
|
||||
proc get(f: DistinctField): T {.used.} =
|
||||
facet.actor.dataspace.dataflow.recordObservation(f.id)
|
||||
if not fromPreserve(result, facet.fields[fieldOff]):
|
||||
raise newException(ValueError, "cannot convert field " & $F & " to " & $T)
|
||||
proc getPreserve(f: DistinctField): Value {.used.} =
|
||||
facet.actor.dataspace.dataflow.recordObservation(f.id)
|
||||
facet.fields[fieldOff]
|
||||
|
||||
template stopIf*(facet: Facet; cond: untyped; continuation: Script[void]): untyped =
|
||||
## Stop the current facet if `cond` is true and
|
||||
## invoke `body` after the facet has stopped.
|
||||
discard facet.addDataflow do (facet: Facet):
|
||||
if cond: facet.stop(continuation)
|
||||
|
||||
type EventHandler* = proc (facet: Facet; bindings: seq[Value]) {.gcsafe.}
|
||||
|
||||
proc wrap*(facet: Facet; onEvent: EventKind; cb: EventHandler): HandlerCallback =
|
||||
proc wrapper(event: EventKind; bindings: seq[Value]) =
|
||||
facet.invokeScript do (facet: Facet):
|
||||
if event == onEvent:
|
||||
facet.scheduleScript do (facet: Facet):
|
||||
cb(facet, bindings)
|
||||
wrapper
|
||||
handleMap: Table[Handle, Assertion]
|
||||
|
||||
method publish(ds: Dataspace; turn: Turn; a: AssertionRef; h: Handle) =
|
||||
if add(ds.index, turn, a.value):
|
||||
var obs = a.value.preservesTo(Observe)
|
||||
if obs.isSome and obs.get.observer of Cap:
|
||||
ds.index.add(turn, obs.get.pattern, Cap(obs.get.observer))
|
||||
ds.handleMap[h] = a.value
|
||||
|
||||
method retract(ds: Dataspace; turn: Turn; h: Handle) =
|
||||
let v = ds.handleMap[h]
|
||||
if remove(ds.index, turn, v):
|
||||
ds.handleMap.del h
|
||||
var obs = v.preservesTo(Observe)
|
||||
if obs.isSome and obs.get.observer of Cap:
|
||||
ds.index.remove(turn, obs.get.pattern, Cap(obs.get.observer))
|
||||
|
||||
method message(ds: Dataspace; turn: Turn; a: AssertionRef) =
|
||||
ds.index.deliverMessage(turn, a.value)
|
||||
|
||||
proc newDataspace*(turn: Turn): Cap =
|
||||
newCap(turn, Dataspace(index: initIndex()))
|
||||
|
||||
type BootProc = proc (turn: Turn; ds: Cap) {.closure.}
|
||||
type DeprecatedBootProc = proc (ds: Cap; turn: Turn) {.closure.}
|
||||
|
||||
proc bootDataspace*(name: string; bootProc: BootProc): Actor =
|
||||
bootActor(name) do (turn: Turn):
|
||||
turn.preventInertCheck()
|
||||
bootProc(turn, newDataspace(turn))
|
||||
|
||||
proc bootDataspace*(name: string; bootProc: DeprecatedBootProc): Actor {.deprecated.} =
|
||||
bootDataspace(name) do (turn: Turn, ds: Cap):
|
||||
bootProc(ds, turn)
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
include_rules
|
||||
NIM_FLAGS += --path:$(TUP_CWD)/../..
|
||||
: foreach *.nim |> !nim_check |>
|
|
@ -0,0 +1,343 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[httpcore, options, parseutils, sets, streams, strutils, tables, times, uri]
|
||||
import preserves, ../../syndicate, ../bags, ./timers
|
||||
import ../protocols/http
|
||||
import taps
|
||||
|
||||
const
|
||||
CRLF = "\x0d\x0a"
|
||||
SP = { ' ', '\x09', '\x0b', '\x0c', '\x0d' }
|
||||
SupportedVersion = "HTTP/1.1"
|
||||
IMF = initTimeFormat"ddd, dd MMM yyyy HH:mm:ss"
|
||||
|
||||
when defined(posix):
|
||||
proc echo(args: varargs[string, `$`]) {.used.} =
|
||||
stderr.writeLine(args)
|
||||
|
||||
proc badRequest(conn: Connection; msg: string) =
|
||||
conn.send(SupportedVersion & " 400 " & msg, endOfMessage = true)
|
||||
|
||||
proc extractQuery(s: var string): Table[Symbol, seq[QueryValue]] =
|
||||
let start = succ skipUntil(s, '?')
|
||||
if start < s.len:
|
||||
var query = s[start..s.high]
|
||||
s.setLen(pred start)
|
||||
for key, val in uri.decodeQuery(query):
|
||||
var list = result.getOrDefault(Symbol key)
|
||||
list.add QueryValue(orKind: QueryValueKind.string, string: val)
|
||||
result[Symbol key] = list
|
||||
|
||||
proc parseRequest(conn: Connection; text: string): (int, HttpRequest) =
|
||||
## Parse an `HttpRequest` request out of a `text` from a `Connection`.
|
||||
result[1].host = RequestHost(orKind: RequestHostKind.absent)
|
||||
result[1].body = RequestBody(orKind: RequestBodyKind.absent)
|
||||
|
||||
var
|
||||
token: string
|
||||
off: int
|
||||
|
||||
template advanceSp =
|
||||
let n = skipWhile(text, SP, off)
|
||||
if n < 1:
|
||||
badRequest(conn, "invalid request")
|
||||
return
|
||||
inc(off, n)
|
||||
|
||||
# method
|
||||
off.inc parseUntil(text, token, SP, off)
|
||||
result[1].method = token.toLowerAscii.Symbol
|
||||
advanceSp()
|
||||
|
||||
# target
|
||||
if text[off] == '/': inc(off) #TODO: always a leading slash?
|
||||
off.inc parseUntil(text, token, SP, off)
|
||||
advanceSp()
|
||||
|
||||
block:
|
||||
var version: string
|
||||
off.inc parseUntil(text, version, SP, off)
|
||||
advanceSp()
|
||||
if version != SupportedVersion:
|
||||
badRequest(conn, "version not supported")
|
||||
return
|
||||
|
||||
result[1].query = extractQuery(token)
|
||||
|
||||
if token != "":
|
||||
result[1].path = split(token, '/')
|
||||
for p in result[1].path.mitems:
|
||||
# normalize the path
|
||||
for i, c in p:
|
||||
if c in {'A'..'Z'}:
|
||||
p[i] = char c.ord + 0x20
|
||||
|
||||
template advanceLine =
|
||||
inc off, skipWhile(text, {'\x0d'}, off)
|
||||
if text.high < off or text[off] != '\x0a':
|
||||
badRequest(conn, "invalid request")
|
||||
return
|
||||
inc off, 1
|
||||
|
||||
advanceLine()
|
||||
while off < text.len:
|
||||
off.inc parseUntil(text, token, {'\x0d', '\x0a'}, off)
|
||||
if token == "": break
|
||||
advanceLine()
|
||||
var
|
||||
(key, vals) = httpcore.parseHeader(token)
|
||||
k = key.toLowerAscii.Symbol
|
||||
v = result[1].headers.getOrDefault(k)
|
||||
for e in vals.mitems:
|
||||
e = e.toLowerAscii
|
||||
if k == Symbol"host":
|
||||
result[1].host = RequestHost(orKind: RequestHostKind.`present`, present: e)
|
||||
if v == "": v = move e
|
||||
else:
|
||||
v.add ", "
|
||||
v.add e
|
||||
if k == Symbol"host":
|
||||
result[1].host = RequestHost(orKind: RequestHostKind.`present`, present: v)
|
||||
result[1].headers[k] = v
|
||||
|
||||
result[0] = off
|
||||
|
||||
proc len(chunk: Chunk): int =
|
||||
case chunk.orKind
|
||||
of ChunkKind.string: chunk.string.len
|
||||
of ChunkKind.bytes: chunk.bytes.len
|
||||
|
||||
proc lenLine(chunk: Chunk): string =
|
||||
result = chunk.len.toHex.strip(true, false, {'0'})
|
||||
result.add CRLF
|
||||
|
||||
type
|
||||
Driver = ref object
|
||||
facet: Facet
|
||||
ds, timers: Cap
|
||||
bindings: Bag[Value]
|
||||
# cannot make a bag of HttpBinding, no `==` operator
|
||||
sequenceNumber: BiggestInt
|
||||
Session = ref object
|
||||
facet: Facet
|
||||
driver: Driver
|
||||
conn: Connection
|
||||
port: Port
|
||||
Exchange = ref object of Entity
|
||||
ses: Session
|
||||
req: HttpRequest
|
||||
stream: StringStream
|
||||
mode: HttpResponseKind
|
||||
active: bool
|
||||
|
||||
proc send[T: byte|char](ses: Session; data: openarray[T]) =
|
||||
ses.conn.send(addr data[0], data.len, endOfMessage = false)
|
||||
|
||||
proc send(ses: Session; chunk: Chunk) =
|
||||
case chunk.orKind
|
||||
of ChunkKind.string:
|
||||
ses.send(chunk.string)
|
||||
of ChunkKind.bytes:
|
||||
ses.send(chunk.bytes)
|
||||
|
||||
func `==`(s: string; rh: RequestHost): bool =
|
||||
rh.orKind == RequestHostKind.present and rh.present == s
|
||||
|
||||
proc match(b: HttpBinding, r: HttpRequest): bool =
|
||||
## Check if `HttpBinding` `b` matches `HttpRequest` `r`.
|
||||
result =
|
||||
(b.host.orKind == HostPatternKind.any or
|
||||
b.host.host == r.host) and
|
||||
(b.port == r.port) and
|
||||
(b.method.orKind == MethodPatternKind.any or
|
||||
b.method.specific == r.method)
|
||||
if result:
|
||||
for i, p in b.path:
|
||||
if i > r.path.high: return false
|
||||
case p.orKind
|
||||
of PathPatternElementKind.wildcard: discard
|
||||
of PathPatternElementKind.label:
|
||||
if p.label != r.path[i]: return false
|
||||
of PathPatternElementKind.rest:
|
||||
return i == b.path.high
|
||||
# return false if ... isn't the last element
|
||||
|
||||
proc strongerThan(a, b: HttpBinding): bool =
|
||||
## Check if `a` is a stronger `HttpBinding` than `b`.
|
||||
result =
|
||||
(a.host.orKind != b.host.orKind and
|
||||
a.host.orKind == HostPatternKind.host) or
|
||||
(a.method.orKind != b.method.orKind and
|
||||
a.method.orKind == MethodPatternKind.specific)
|
||||
if not result:
|
||||
if a.path.len > b.path.len: return true
|
||||
for i in b.path.low..a.path.high:
|
||||
if a.path[i].orKind != b.path[i].orKind and
|
||||
a.path[i].orKind == PathPatternElementKind.label:
|
||||
return true
|
||||
|
||||
proc match(driver: Driver; req: HttpRequest): Option[HttpBinding] =
|
||||
var b: HttpBinding
|
||||
for p in driver.bindings:
|
||||
if b.fromPreserves(p) and b.match req:
|
||||
if result.isNone or b.strongerThan(result.get):
|
||||
result = some b
|
||||
|
||||
method message(e: Exchange; turn: Turn; a: AssertionRef) =
|
||||
# Send responses back into a connection.
|
||||
var res: HttpResponse
|
||||
if e.mode != HttpResponseKind.done and res.fromPreserves a.value:
|
||||
case res.orKind
|
||||
|
||||
of HttpResponseKind.status:
|
||||
if e.mode == res.orKind:
|
||||
e.active = true
|
||||
e.ses.conn.startBatch()
|
||||
e.stream.write(
|
||||
SupportedVersion, " ", res.status.code, " ", res.status.message, CRLF,
|
||||
"date: ", now().format(IMF), CRLF)
|
||||
# add Date header automatically - RFC 9110 Section 6.6.1.
|
||||
e.mode = HttpResponseKind.header
|
||||
|
||||
of HttpResponseKind.header:
|
||||
if e.mode == res.orKind:
|
||||
e.stream.write(res.header.name, ": ", res.header.value, CRLF)
|
||||
|
||||
of HttpResponseKind.chunk:
|
||||
if res.chunk.chunk.len > 0:
|
||||
if e.mode == HttpResponseKind.header:
|
||||
e.stream.write("transfer-encoding: chunked" & CRLF & CRLF)
|
||||
e.ses.send(move e.stream.data)
|
||||
e.mode = res.orKind
|
||||
if e.mode == res.orKind:
|
||||
e.ses.send(res.chunk.chunk.lenLine)
|
||||
e.ses.send(res.chunk.chunk)
|
||||
e.ses.send(CRLF)
|
||||
|
||||
of HttpResponseKind.done:
|
||||
if e.mode in {HttpResponseKind.header, HttpResponseKind.chunk}:
|
||||
if e.mode == HttpResponseKind.header:
|
||||
e.stream.write("content-length: ", $res.done.chunk.len & CRLF & CRLF)
|
||||
e.ses.send(move e.stream.data)
|
||||
if res.done.chunk.len > 0:
|
||||
e.ses.send(res.done.chunk)
|
||||
elif e.mode == HttpResponseKind.chunk:
|
||||
e.ses.send(res.done.chunk.lenLine)
|
||||
if res.done.chunk.len > 0:
|
||||
e.ses.send(res.done.chunk)
|
||||
e.ses.send(CRLF & "0" & CRLF & CRLF)
|
||||
e.mode = res.orKind
|
||||
e.ses.conn.endBatch()
|
||||
if e.req.headers.getOrDefault(Symbol"connection") == "close":
|
||||
e.ses.conn.close()
|
||||
stop(turn)
|
||||
# stop the facet scoped to the exchange
|
||||
# so that the response capability is withdrawn
|
||||
|
||||
proc service(turn: Turn; exch: Exchange) =
|
||||
## Service an HTTP message exchange.
|
||||
var binding = exch.ses.driver.match exch.req
|
||||
if binding.isNone:
|
||||
stop(turn)
|
||||
else:
|
||||
var handler = binding.get.handler.unembed Cap
|
||||
if handler.isNone:
|
||||
stop(turn)
|
||||
else:
|
||||
let cap = newCap(turn, exch)
|
||||
publish(turn, handler.get, HttpContext(
|
||||
req: exch.req,
|
||||
res: embed cap,
|
||||
))
|
||||
const timeout = initDuration(seconds = 4)
|
||||
after(turn, exch.ses.driver.timers, timeout) do (turn: Turn):
|
||||
if not exch.active:
|
||||
var res = HttpResponse(orKind: HttpResponseKind.status)
|
||||
res.status.code = 504
|
||||
res.status.message = "Binding timeout"
|
||||
message(turn, cap, res)
|
||||
res = HttpResponse(orKind: HttpResponseKind.done)
|
||||
message(turn, cap, res)
|
||||
|
||||
proc service(ses: Session) =
|
||||
## Service a connection to an HTTP client.
|
||||
ses.facet.onStop do (turn: Turn):
|
||||
close ses.conn
|
||||
ses.conn.onClosed do ():
|
||||
stop ses.facet
|
||||
ses.conn.onReceivedPartial do (data: seq[byte]; ctx: MessageContext; eom: bool):
|
||||
ses.facet.run do (turn: Turn):
|
||||
var (n, req) = parseRequest(ses.conn, cast[string](data))
|
||||
if n > 0:
|
||||
inc(ses.driver.sequenceNumber)
|
||||
req.sequenceNumber = ses.driver.sequenceNumber
|
||||
req.port = BiggestInt ses.port
|
||||
inFacet(turn) do (turn: Turn):
|
||||
preventInertCheck(turn)
|
||||
# start a new facet for this message exchange
|
||||
turn.service Exchange(
|
||||
facet: turn.facet,
|
||||
ses: ses,
|
||||
req: req,
|
||||
stream: newStringStream(),
|
||||
mode: HttpResponseKind.status
|
||||
)
|
||||
ses.conn.receive()
|
||||
ses.conn.receive()
|
||||
|
||||
proc newListener(port: Port): Listener =
|
||||
var lp = newLocalEndpoint()
|
||||
lp.with port
|
||||
listen newPreconnection(local=[lp])
|
||||
|
||||
proc httpListen(turn: Turn; driver: Driver; port: Port): Listener =
|
||||
let facet = turn.facet
|
||||
var listener = newListener(port)
|
||||
preventInertCheck(turn)
|
||||
listener.onListenError do (err: ref Exception):
|
||||
terminateFacet(facet, err)
|
||||
facet.onStop do (turn: Turn):
|
||||
stop listener
|
||||
listener.onConnectionReceived do (conn: Connection):
|
||||
driver.facet.run do (turn: Turn):
|
||||
# start a new turn
|
||||
linkActor(turn, "http-conn") do (turn: Turn):
|
||||
preventInertCheck(turn)
|
||||
let facet = turn.facet
|
||||
conn.onConnectionError do (err: ref Exception):
|
||||
terminateFacet(facet, err)
|
||||
# terminate this actor on exception
|
||||
# facet is scoped to the lifetime of the connection
|
||||
service Session(
|
||||
facet: turn.facet,
|
||||
driver: driver,
|
||||
conn: conn,
|
||||
port: port,
|
||||
)
|
||||
listener
|
||||
|
||||
proc httpDriver(turn: Turn; ds: Cap) =
|
||||
let driver = Driver(facet: turn.facet, ds: ds, timers: turn.newDataspace)
|
||||
spawnTimerDriver(turn, driver.timers)
|
||||
|
||||
during(turn, ds, HttpBinding?:{
|
||||
1: grab(),
|
||||
}) do (port: BiggestInt):
|
||||
publish(turn, ds, HttpListener(port: port))
|
||||
|
||||
during(turn, ds, ?:HttpBinding) do (
|
||||
ho: HostPattern, po: int, me: MethodPattern, pa: PathPattern, e: Value):
|
||||
let b = HttpBinding(host: ho, port: po, `method`: me, path: pa, handler: e)
|
||||
discard driver.bindings.change(b.toPreserves, +1)
|
||||
do:
|
||||
discard driver.bindings.change(b.toPreserves, -1)
|
||||
|
||||
during(turn, ds, ?:HttpListener) do (port: uint16):
|
||||
let l = httpListen(turn, driver, Port port)
|
||||
do:
|
||||
stop(l)
|
||||
|
||||
proc spawnHttpDriver*(turn: Turn; ds: Cap): Actor {.discardable.} =
|
||||
spawnActor(turn, "http-driver") do (turn: Turn):
|
||||
httpDriver(turn, ds)
|
|
@ -1,36 +1,148 @@
|
|||
# SPDX-FileCopyrightText: 2021 ☭ Emery Hemingway
|
||||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[asyncdispatch, monotimes, times]
|
||||
import preserves, preserves/records
|
||||
import syndicate, syndicate/assertions
|
||||
import std/[tables, times]
|
||||
import preserves
|
||||
import ../../syndicate, ../protocols/[timer, dataspace]
|
||||
|
||||
type TimeLaterThan* {.record: "TimeLaterThan".} = object
|
||||
`deadline`*: Monotime
|
||||
when defined(solo5):
|
||||
import solo5_dispatcher
|
||||
else:
|
||||
import pkg/sys/[handles, ioqueue]
|
||||
|
||||
proc prsTimeLaterThan*(deadline: Preserve | Monotime): Preserve =
|
||||
initRecord(symbol("TimeLaterThan"), deadline)
|
||||
export timer
|
||||
|
||||
proc toPreserveHook*(time: Monotime): Preserve =
|
||||
time.ticks.toPreserve
|
||||
type
|
||||
Observe = dataspace.Observe
|
||||
|
||||
proc fromPreserveHook*(mt: var Monotime; p: Preserve): bool =
|
||||
if p.kind == pkSignedInteger:
|
||||
mt = cast[MonoTime]((p.int.int64,))
|
||||
result = true
|
||||
when defined(solo5):
|
||||
import solo5, solo5_dispatcher
|
||||
|
||||
syndicate timerDriver:
|
||||
proc wallFloat: float =
|
||||
solo5_clock_wall().float / 1_000_000_000.0
|
||||
|
||||
spawn "timer":
|
||||
during(observe(prsTimeLaterThan(?deadline))) do (deadline: MonoTime):
|
||||
let
|
||||
now = getMonoTime()
|
||||
period = inMilliseconds(deadline - now)
|
||||
if period > 0:
|
||||
getCurrentFacet().beginExternalTask()
|
||||
addTimer(period.int, oneshot = true) do (fd: AsyncFD) -> bool:
|
||||
react: publish: prsTimeLaterThan(deadline)
|
||||
getCurrentFacet().endExternalTask()
|
||||
true
|
||||
else:
|
||||
react: publish: prsTimeLaterThan(deadline)
|
||||
type
|
||||
TimerDriver = ref object
|
||||
facet: Facet
|
||||
## Owning facet of driver.
|
||||
target: Cap
|
||||
## Destination for LaterThan assertions.
|
||||
deadlines: Table[float, Facet]
|
||||
## Deadlines that other actors are observing.
|
||||
|
||||
proc spawnTimerDriver(facet: Facet; cap: Cap): TimerDriver =
|
||||
TimerDriver(facet: facet, target: cap)
|
||||
|
||||
proc await(driver: TimerDriver; deadline: float) {.solo5dispatch.} =
|
||||
yieldUntil(deadline)
|
||||
let facet = driver.deadlines.getOrDefault(deadline)
|
||||
if not facet.isNil:
|
||||
# check if the deadline is still observed
|
||||
proc turnWork(turn: Turn) =
|
||||
discard publish(turn, driver.target, LaterThan(seconds: deadline))
|
||||
run(facet, turnWork)
|
||||
|
||||
else:
|
||||
import std/[oserrors, posix, sets]
|
||||
type Time = posix.Time
|
||||
|
||||
{.pragma: timerfd, importc, header: "<sys/timerfd.h>".}
|
||||
|
||||
proc timerfd_create(clock_id: ClockId, flags: cint): cint {.timerfd.}
|
||||
proc timerfd_settime(ufd: cint, flags: cint,
|
||||
utmr: var Itimerspec, otmr: var Itimerspec): cint {.timerfd.}
|
||||
proc timerfd_gettime(ufd: cint, curr: var Itimerspec): cint {.timerfd.}
|
||||
|
||||
var
|
||||
TFD_NONBLOCK {.timerfd.}: cint
|
||||
TFD_CLOEXEC {.timerfd.}: cint
|
||||
TFD_TIMER_ABSTIME {.timerfd.}: cint
|
||||
|
||||
proc `<`(a, b: Timespec): bool =
|
||||
a.tv_sec.clong < b.tv_sec.clong or
|
||||
(a.tv_sec.clong == b.tv_sec.clong and a.tv_nsec < b.tv_nsec)
|
||||
|
||||
proc `+`(a, b: Timespec): Timespec =
|
||||
result.tv_sec = Time a.tv_sec.clong + b.tv_sec.clong
|
||||
result.tv_nsec = a.tv_nsec + b.tv_nsec
|
||||
|
||||
func toFloat(ts: Timespec): float =
|
||||
ts.tv_sec.float + ts.tv_nsec.float / 1_000_000_000
|
||||
|
||||
func toTimespec(f: float): Timespec =
|
||||
result.tv_sec = Time(f)
|
||||
result.tv_nsec = clong(uint64(f * 1_000_000_000) mod 1_000_000_000)
|
||||
|
||||
proc wallFloat: float =
|
||||
var ts: Timespec
|
||||
if clock_gettime(CLOCK_REALTIME, ts) < 0:
|
||||
raiseOSError(osLastError(), "clock_gettime")
|
||||
ts.toFloat
|
||||
|
||||
type
|
||||
TimerDriver = ref object
|
||||
facet: Facet
|
||||
## Owning facet of driver.
|
||||
target: Cap
|
||||
## Destination for LaterThan assertions.
|
||||
deadlines: Table[float, Facet]
|
||||
## Deadlines that other actors are observing.
|
||||
timers: HashSet[cint]
|
||||
# TODO: use a single timer descriptor
|
||||
|
||||
proc spawnTimerDriver(facet: Facet; cap: Cap): TimerDriver =
|
||||
let driver = TimerDriver(facet: facet, target: cap)
|
||||
facet.onStop do (turn: Turn):
|
||||
for fd in driver.timers:
|
||||
unregister(FD fd)
|
||||
discard close(fd)
|
||||
driver
|
||||
|
||||
proc earliestFloat(driver: TimerDriver): float =
|
||||
assert driver.deadlines.len > 0
|
||||
result = high float
|
||||
for deadline in driver.deadlines.keys:
|
||||
if deadline < result:
|
||||
result = deadline
|
||||
|
||||
proc await(driver: TimerDriver; deadline: float) {.asyncio.} =
|
||||
## Run timer driver concurrently with actor.
|
||||
let fd = timerfd_create(CLOCK_REALTIME, TFD_NONBLOCK or TFD_CLOEXEC)
|
||||
if fd < 0:
|
||||
raiseOSError(osLastError(), "failed to acquire timer descriptor")
|
||||
var
|
||||
old: Itimerspec
|
||||
its = Itimerspec(it_value: deadline.toTimespec)
|
||||
if timerfd_settime(fd, TFD_TIMER_ABSTIME, its, old) < 0:
|
||||
raiseOSError(osLastError(), "failed to set timeout")
|
||||
driver.timers.incl(fd)
|
||||
while wallFloat() < deadline:
|
||||
# Check if the timer is expired which
|
||||
# could happen before waiting.
|
||||
wait(FD fd, Read)
|
||||
let facet = driver.deadlines.getOrDefault(deadline)
|
||||
if not facet.isNil:
|
||||
# Check if the deadline is still observed.
|
||||
proc turnWork(turn: Turn) =
|
||||
discard publish(turn, driver.target, LaterThan(seconds: deadline))
|
||||
run(facet, turnWork)
|
||||
discard close(fd)
|
||||
driver.timers.excl(fd)
|
||||
|
||||
proc spawnTimerDriver*(turn: Turn; ds: Cap): Actor {.discardable.} =
|
||||
## Spawn a timer actor that responds to
|
||||
## dataspace observations of timeouts on `ds`.
|
||||
linkActor(turn, "timers") do (turn: Turn):
|
||||
let driver = spawnTimerDriver(turn.facet, ds)
|
||||
let pat = observePattern(!LaterThan, {@[0.toPreserves]: grabLit()})
|
||||
during(turn, ds, pat) do (deadline: float):
|
||||
driver.deadlines[deadline] = turn.facet
|
||||
discard trampoline(whelp await(driver, deadline))
|
||||
do:
|
||||
driver.deadlines.del deadline
|
||||
|
||||
proc after*(turn: Turn; ds: Cap; dur: Duration; act: TurnAction) =
|
||||
## Execute `act` after some duration of time.
|
||||
var later = wallFloat() + dur.inMilliseconds.float / 1_000.0
|
||||
onPublish(turn, ds, ?LaterThan(seconds: later)):
|
||||
act(turn)
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[hashes, tables]
|
||||
import preserves
|
||||
import ./actors, ./patterns, ./protocols/dataspace
|
||||
|
||||
type
|
||||
DuringProc* = proc (turn: Turn; a: Value; h: Handle): TurnAction
|
||||
DuringActionKind = enum null, dead, act
|
||||
DuringAction = object
|
||||
case kind: DuringActionKind
|
||||
of null, dead: discard
|
||||
of act:
|
||||
action: TurnAction
|
||||
DuringEntity {.final.}= ref object of Entity
|
||||
cb: DuringProc
|
||||
assertionMap: Table[Handle, DuringAction]
|
||||
|
||||
method publish(de: DuringEntity; turn: Turn; a: AssertionRef; h: Handle) =
|
||||
discard inFacet(turn) do (turn: Turn):
|
||||
let action = de.cb(turn, a.value, h)
|
||||
# assert(not action.isNil "should have put in a no-op action")
|
||||
let g = de.assertionMap.getOrDefault h
|
||||
case g.kind
|
||||
of null:
|
||||
de.assertionMap[h] = DuringAction(kind: act, action: action)
|
||||
of dead:
|
||||
de.assertionMap.del h
|
||||
action(turn)
|
||||
of act:
|
||||
raiseAssert("during: duplicate handle in publish: " & $h)
|
||||
|
||||
method retract(de: DuringEntity; turn: Turn; h: Handle) =
|
||||
let g = de.assertionMap.getOrDefault h
|
||||
case g.kind
|
||||
of null:
|
||||
de.assertionMap[h] = DuringAction(kind: dead)
|
||||
of dead:
|
||||
raiseAssert("during: duplicate handle in retract: " & $h)
|
||||
of act:
|
||||
de.assertionMap.del h
|
||||
if not g.action.isNil:
|
||||
g.action(turn)
|
||||
|
||||
proc during*(cb: DuringProc): DuringEntity = DuringEntity(cb: cb)
|
||||
|
||||
proc observe*(turn: Turn; ds: Cap; pat: Pattern; e: Entity): Handle =
|
||||
publish(turn, ds, Observe(pattern: pat, observer: newCap(turn, e)))
|
|
@ -1,4 +0,0 @@
|
|||
# SPDX-FileCopyrightText: 2021 ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
type EventKind* = enum addedEvent, removedEvent, messageEvent
|
|
@ -0,0 +1,45 @@
|
|||
# SPDX-FileCopyrightText: ☭ 2022 Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[hashes, tables]
|
||||
|
||||
import ./actors
|
||||
from ./protocols/sturdy import Oid
|
||||
|
||||
proc hash(r: Cap): Hash = !$(r.relay.hash !& r.target.unsafeAddr.hash)
|
||||
|
||||
type
|
||||
Membrane* = object
|
||||
## Bidirectional mapping between `Oid` and `Cap` values.
|
||||
## https://synit.org/book/protocol.html#membranes
|
||||
byOid: Table[Oid, WireSymbol]
|
||||
byCap: Table[Cap, WireSymbol]
|
||||
|
||||
WireSymbol* = ref object
|
||||
oid: Oid
|
||||
cap: Cap
|
||||
count: int
|
||||
|
||||
proc oid*(sym: WireSymbol): Oid = sym.oid
|
||||
proc cap*(sym: WireSymbol): Cap = sym.cap
|
||||
|
||||
proc grab*(mem: Membrane; key: Oid): WireSymbol =
|
||||
## Grab a `WireSymbol` from a `Membrane`.
|
||||
mem.byOid.getOrDefault(key)
|
||||
|
||||
proc grab*(mem: Membrane; key: Cap): WireSymbol =
|
||||
## Grab a `WireSymbol` from a `Membrane`.
|
||||
mem.byCap.getOrDefault(key)
|
||||
|
||||
proc drop*(mem: var Membrane; sym: WireSymbol) =
|
||||
## Drop a `WireSymbol` from a `Membrane`.
|
||||
dec sym.count
|
||||
if sym.count < 1:
|
||||
mem.byOid.del sym.oid
|
||||
mem.byCap.del sym.cap
|
||||
|
||||
proc newWireSymbol*(mem: var Membrane; o: Oid; r: Cap): WireSymbol =
|
||||
## Allocate a `WireSymbol` at a `Membrane`.
|
||||
result = WireSymbol(oid: o, cap: r, count: 1)
|
||||
mem.byOid[result.oid] = result
|
||||
mem.byCap[result.cap] = result
|
|
@ -0,0 +1,445 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[assertions, options, tables, typetraits]
|
||||
|
||||
import preserves
|
||||
import ./protocols/[dataspacePatterns, dataspace]
|
||||
from ./actors import Cap
|
||||
|
||||
export dataspacePatterns.`$`, AnyAtomKind, GroupTypeKind, PatternKind
|
||||
|
||||
type
|
||||
Pattern* = dataspacePatterns.Pattern
|
||||
|
||||
proc toPattern(b: sink PatternBind): Pattern =
|
||||
Pattern(orKind: PatternKind.`bind`, `bind`: b)
|
||||
|
||||
proc toPattern(l: sink PatternLit): Pattern =
|
||||
Pattern(orKind: PatternKind.`lit`, lit: l)
|
||||
|
||||
proc toPattern(g: sink PatternGroup): Pattern =
|
||||
Pattern(orKind: PatternKind.`group`, group: g)
|
||||
|
||||
proc toPattern(a: sink AnyAtom): Pattern =
|
||||
PatternLit(value: a).toPattern
|
||||
|
||||
proc grab*(p: sink Pattern): Pattern =
|
||||
PatternBind(pattern: p).toPattern
|
||||
|
||||
proc drop*(): Pattern = Pattern(orKind: PatternKind.`discard`)
|
||||
## Create a pattern to match any value without capture.
|
||||
|
||||
proc grab*(): Pattern = drop().grab()
|
||||
## Create a pattern to capture any value.
|
||||
|
||||
proc drop*(pr: Value): Pattern =
|
||||
## Convert a `Preserve` value to a `Pattern`.
|
||||
runnableExamples:
|
||||
from std/unittest import check
|
||||
import preserves
|
||||
check:
|
||||
$("""<foo "bar" #"00" [0 1 2.0] {maybe: #t} <_>>""".parsePreserves.drop) ==
|
||||
"""<group <rec foo> {0: <lit "bar"> 1: <lit #"00"> 2: <group <arr> {0: <lit 0> 1: <lit 1> 2: <lit 2.0>}> 3: <group <dict> {maybe: <lit #t>}> 4: <_>}>"""
|
||||
|
||||
case pr.kind
|
||||
of pkBoolean:
|
||||
AnyAtom(orKind: AnyAtomKind.`bool`, bool: pr.bool).toPattern
|
||||
of pkFloat:
|
||||
AnyAtom(orKind: AnyAtomKind.`double`, double: pr.float).toPattern
|
||||
of pkRegister:
|
||||
AnyAtom(orKind: AnyAtomKind.`int`, int: pr.register).toPattern
|
||||
of pkBigInt:
|
||||
raiseAssert "cannot make a pattern over a big integer"
|
||||
of pkString:
|
||||
AnyAtom(orKind: AnyAtomKind.`string`, string: pr.string).toPattern
|
||||
of pkByteString:
|
||||
AnyAtom(orKind: AnyAtomKind.`bytes`, bytes: pr.bytes).toPattern
|
||||
of pkSymbol:
|
||||
AnyAtom(orKind: AnyAtomKind.`symbol`, symbol: pr.symbol).toPattern
|
||||
|
||||
of pkRecord:
|
||||
if pr.isRecord("_", 0):
|
||||
drop()
|
||||
elif pr.isRecord("bind", 1):
|
||||
pr.fields[0].drop
|
||||
else:
|
||||
var group = PatternGroup(`type`: GroupType(orKind: GroupTypeKind.rec))
|
||||
group.`type`.rec.label = pr.label
|
||||
var i: int
|
||||
for v in pr.fields:
|
||||
group.entries[toPreserves i] = drop v
|
||||
inc i
|
||||
group.toPattern
|
||||
|
||||
of pkSequence:
|
||||
var group = PatternGroup(`type`: GroupType(orKind: GroupTypeKind.arr))
|
||||
for i, v in pr.sequence:
|
||||
group.entries[toPreserves i] = drop v
|
||||
group.toPattern
|
||||
|
||||
of pkSet:
|
||||
raiseAssert "cannot construct a pattern over a set literal"
|
||||
|
||||
of pkDictionary:
|
||||
var group = PatternGroup(`type`: GroupType(orKind: GroupTypeKind.dict))
|
||||
for key, val in pr.pairs:
|
||||
group.entries[key] = drop val
|
||||
group.toPattern
|
||||
|
||||
of pkEmbedded:
|
||||
if pr.embeddedRef.isNil: drop()
|
||||
else:
|
||||
AnyAtom(orKind: AnyAtomKind.`embedded`, embedded: pr.embeddedRef).toPattern
|
||||
#else:
|
||||
# raise newException(ValueError, "cannot generate a pattern for unhandled Value type")
|
||||
|
||||
proc drop*[T](x: T): Pattern =
|
||||
## Construct a `Pattern` from value of type `T`.
|
||||
## This proc is called `drop` because the value `x` is matched but discarded.
|
||||
runnableExamples:
|
||||
from std/unittest import check
|
||||
check:
|
||||
$drop(true) == "<lit #t>"
|
||||
$drop(3.14) == "<lit 3.14>"
|
||||
$drop([0, 1, 2, 3]) == "<group <arr> {0: <lit 0> 1: <lit 1> 2: <lit 2> 3: <lit 3>}>"
|
||||
drop(x.toPreserves)
|
||||
|
||||
proc grab*[T](x: T): Pattern {.
|
||||
deprecated: "use drop unless you wish to capture the provided value".} =
|
||||
PatternBind(pattern: drop x).toPattern
|
||||
|
||||
proc grabTypeFlat*(typ: static typedesc): Pattern =
|
||||
## Derive a `Pattern` from type `typ`.
|
||||
## This works for `tuple` and `object` types but in the
|
||||
## general case will return a wildcard binding.
|
||||
runnableExamples:
|
||||
import preserves
|
||||
from std/unittest import check
|
||||
check:
|
||||
$grabTypeFlat(array[3, int]) ==
|
||||
"""<group <arr> {0: <bind <_>> 1: <bind <_>> 2: <bind <_>> 3: <bind <_>>}>"""
|
||||
type
|
||||
Point = tuple[x: int; y: int]
|
||||
Rect {.preservesRecord: "rect".} = tuple[a: Point; B: Point]
|
||||
ColoredRect {.preservesDictionary.} = tuple[color: string; rect: Rect]
|
||||
check:
|
||||
$(grabTypeFlat Point) ==
|
||||
"<group <arr> {0: <bind <_>> 1: <bind <_>>}>"
|
||||
$(grabTypeFlat Rect) ==
|
||||
"<group <rec rect> {0: <group <arr> {0: <bind <_>> 1: <bind <_>>}> 1: <group <arr> {0: <bind <_>> 1: <bind <_>>}>}>"
|
||||
$(grabTypeFlat ColoredRect) ==
|
||||
"<group <dict> {color: <bind <_>> rect: <group <rec rect> {0: <group <arr> {0: <bind <_>> 1: <bind <_>>}> 1: <group <arr> {0: <bind <_>> 1: <bind <_>>}>}>}>"
|
||||
when typ is ref:
|
||||
grabTypeFlat(pointerBase(typ))
|
||||
elif typ.hasPreservesRecordPragma:
|
||||
var group = PatternGroup(`type`: GroupType(orKind: GroupTypeKind.`rec`))
|
||||
group.`type`.rec.label = typ.recordLabel.toSymbol
|
||||
for _, f in fieldPairs(default typ):
|
||||
group.entries[group.entries.len.toPreserves] = grabTypeFlat(typeof f)
|
||||
group.toPattern
|
||||
elif typ.hasPreservesDictionaryPragma:
|
||||
var group = PatternGroup(`type`: GroupType(orKind: GroupTypeKind.`dict`))
|
||||
for key, val in fieldPairs(default typ):
|
||||
group.entries[key.toSymbol] = grabTypeFlat(typeof val)
|
||||
group.toPattern
|
||||
elif typ is tuple:
|
||||
var group = PatternGroup(`type`: GroupType(orKind: GroupTypeKind.`arr`))
|
||||
for _, f in fieldPairs(default typ):
|
||||
group.entries[group.entries.len.toPreserves] = grabTypeFlat(typeof f)
|
||||
group.toPattern
|
||||
elif typ is array:
|
||||
var group = PatternGroup(`type`: GroupType(orKind: GroupTypeKind.`arr`))
|
||||
for i in 0..len(typ):
|
||||
group.entries[toPreserves i] = grab()
|
||||
group.toPattern
|
||||
else:
|
||||
grab()
|
||||
|
||||
proc fieldCount(T: typedesc): int =
|
||||
for _, _ in fieldPairs(default T):
|
||||
inc result
|
||||
|
||||
proc dropType*(typ: static typedesc): Pattern =
|
||||
## Derive a `Pattern` from type `typ` without any bindings.
|
||||
when typ is ref:
|
||||
dropType(pointerBase(typ))
|
||||
elif typ.hasPreservesRecordPragma:
|
||||
var group = PatternGroup(`type`: GroupType(orKind: GroupTypeKind.`rec`))
|
||||
group.`type`.rec.label = typ.recordLabel.toSymbol
|
||||
let high = typ.fieldCount.pred
|
||||
if high >= 0: group.entries[high.toPreserves] = drop()
|
||||
group.toPattern
|
||||
elif typ.hasPreservesDictionaryPragma:
|
||||
PatternGroup(`type`: GroupType(orKind: GroupTypeKind.`dict`)).toPattern
|
||||
elif typ is tuple or typ is array:
|
||||
var group = PatternGroup(`type`: GroupType(orKind: GroupTypeKind.`arr`))
|
||||
let high = typ.fieldCount.pred
|
||||
if high >= 0: group.entries[high.toPreserves] = drop()
|
||||
group.toPattern
|
||||
else:
|
||||
drop()
|
||||
|
||||
proc grabType*(typ: static typedesc): Pattern =
|
||||
PatternBind(pattern: typ.dropType).toPattern
|
||||
|
||||
proc bindEntries(group: var PatternGroup; bindings: openArray[(int, Pattern)]) =
|
||||
## Set `bindings` for a `group`.
|
||||
for (i, pat) in bindings: group.entries[toPreserves i] = pat
|
||||
|
||||
proc grab*(typ: static typedesc; bindings: sink openArray[(int, Pattern)]): Pattern =
|
||||
## Construct a `Pattern` from type `typ` with pattern `bindings` by integer offset.
|
||||
when typ is ptr | ref:
|
||||
grab(pointerBase(typ), bindings)
|
||||
elif typ.hasPreservesRecordPragma:
|
||||
var group = PatternGroup(`type`: GroupType(orKind: GroupTypeKind.`rec`))
|
||||
group.`type`.rec.label = typ.recordLabel.toSymbol
|
||||
bindEntries(group, bindings)
|
||||
group.toPattern
|
||||
elif typ is tuple:
|
||||
var group = PatternGroup(`type`: GroupType(orKind: GroupTypeKind.`arr`))
|
||||
bindEntries(group, bindings)
|
||||
group.toPattern
|
||||
else:
|
||||
{.error: "grab with indexed bindings not implemented for " & $typ.}
|
||||
|
||||
proc grab*(typ: static typedesc; bindings: sink openArray[(Value, Pattern)]): Pattern =
|
||||
## Construct a `Pattern` from type `typ` with dictionary field `bindings`.
|
||||
when typ.hasPreservesDictionaryPragma:
|
||||
var group = PatternGroup(`type`: GroupType(orKind: GroupTypeKind.`dict`))
|
||||
for key, val in bindinds: group.entries[key] = val
|
||||
group.toPattern
|
||||
else:
|
||||
{.error: "grab with dictionary bindings not implemented for " & $typ.}
|
||||
|
||||
proc grabLit*(): Pattern =
|
||||
runnableExamples:
|
||||
from std/unittest import check
|
||||
check:
|
||||
$grabLit() == """<group <rec lit> {0: <bind <_>>}>"""
|
||||
grabTypeFlat(dataspacePatterns.PatternLit)
|
||||
|
||||
proc grabDict*(): Pattern =
|
||||
grabTypeFlat(dataspacePatterns.GroupTypeDict)
|
||||
|
||||
proc unpackLiterals*(pr: Value): Value =
|
||||
result = pr
|
||||
apply(result) do (pr: var Value):
|
||||
if pr.isRecord("lit", 1) or pr.isRecord("dict", 1) or pr.isRecord("arr", 1) or pr.isRecord("set", 1):
|
||||
pr = pr.record[0]
|
||||
|
||||
proc inject*(pattern: sink Pattern; p: Pattern; path: varargs[Value, toPreserves]): Pattern =
|
||||
## Inject `p` inside `pattern` at `path`.
|
||||
## Injects are made at offsets indexed by the discard (`<_>`) patterns in `pat`.
|
||||
proc inject(pat: var Pattern; path: openarray[Value]) =
|
||||
if len(path) == 0:
|
||||
pat = p
|
||||
elif pat.orKind != PatternKind.`group`:
|
||||
raise newException(ValueError, "cannot inject along specified path")
|
||||
else:
|
||||
inject(pat.group.entries[path[0]], path[1..path.high])
|
||||
result = pattern
|
||||
inject(result, path)
|
||||
|
||||
proc grabRecord*(label: Value, fields: varargs[Pattern]): Pattern =
|
||||
runnableExamples:
|
||||
from std/unittest import check
|
||||
import preserves
|
||||
check:
|
||||
$grabRecord("Says".toSymbol, grab(), grab()) ==
|
||||
"""<group <rec Says> {0: <bind <_>> 1: <bind <_>>}>"""
|
||||
var group = PatternGroup(`type`: GroupType(orKind: GroupTypeKind.`rec`))
|
||||
group.`type`.rec.label = label
|
||||
for i, f in fields: group.entries[toPreserves i] = f
|
||||
group.toPattern
|
||||
|
||||
proc grabRecord*(label: Value, fields: sink openArray[(int, Pattern)]): Pattern =
|
||||
runnableExamples:
|
||||
from std/unittest import check
|
||||
import preserves
|
||||
check:
|
||||
$grabRecord("Says".toSymbol, {3: grab(), 4: grab()}) ==
|
||||
"""<group <rec Says> {3: <bind <_>> 4: <bind <_>>}>"""
|
||||
var group = PatternGroup(`type`: GroupType(orKind: GroupTypeKind.`rec`))
|
||||
group.`type`.rec.label = label
|
||||
for (i, p) in fields: group.entries[toPreserves i] = p
|
||||
group.toPattern
|
||||
|
||||
proc grabRecord*(label: string, fields: varargs[Pattern]): Pattern =
|
||||
## Sugar for creating record patterns.
|
||||
## `label` is converted to a symbol value.
|
||||
grabRecord(label.toSymbol, fields)
|
||||
|
||||
proc grabDictionary*(bindings: sink openArray[(Value, Pattern)]): Pattern =
|
||||
## Construct a pattern that grabs some dictionary pairs.
|
||||
var group = PatternGroup(`type`: GroupType(orKind: GroupTypeKind.`dict`))
|
||||
for (key, val) in bindings: group.entries[key] = val
|
||||
group.toPattern
|
||||
|
||||
proc grabDictionary*(bindings: sink openArray[(string, Pattern)]): Pattern =
|
||||
## Construct a pattern that grabs some dictionary pairs.
|
||||
## Keys are converted from strings to symbols.
|
||||
var group = PatternGroup(`type`: GroupType(orKind: GroupTypeKind.`dict`))
|
||||
for (key, val) in bindings: group.entries[toSymbol key] = val
|
||||
group.toPattern
|
||||
|
||||
proc depattern(group: PatternGroup; values: var seq[Value]; index: var int): Value
|
||||
|
||||
proc depattern(pat: Pattern; values: var seq[Value]; index: var int): Value =
|
||||
case pat.orKind
|
||||
of PatternKind.`discard`:
|
||||
discard
|
||||
of PatternKind.`bind`:
|
||||
if index < values.len:
|
||||
result = move values[index]
|
||||
inc index
|
||||
of PatternKind.`lit`:
|
||||
result = pat.`lit`.value.toPreserves
|
||||
of PatternKind.`group`:
|
||||
result = depattern(pat.group, values, index)
|
||||
|
||||
proc depattern(group: PatternGroup; values: var seq[Value]; index: var int): Value =
|
||||
case group.`type`.orKind
|
||||
of GroupTypeKind.rec:
|
||||
result = initRecord(group.`type`.rec.label, group.entries.len)
|
||||
var i: int
|
||||
for key, val in group.entries:
|
||||
if i.fromPreserves key:
|
||||
result[i] = depattern(val, values, index)
|
||||
of GroupTypeKind.arr:
|
||||
result = initSequence(group.entries.len)
|
||||
var i: int
|
||||
for key, val in group.entries:
|
||||
if i.fromPreserves key:
|
||||
result[i] = depattern(val, values, index)
|
||||
of GroupTypeKind.dict:
|
||||
result = initDictionary(Cap)
|
||||
for key, val in group.entries:
|
||||
result[key] = depattern(val, values, index)
|
||||
|
||||
proc depattern*(pat: Pattern; values: sink seq[Value]): Value =
|
||||
## Convert a `Pattern` to a `Value` while replacing binds with `values`.
|
||||
runnableExamples:
|
||||
from std/unittest import check
|
||||
import preserves
|
||||
type Foo {.preservesRecord: "foo".} = object
|
||||
a, b: int
|
||||
let pat = grabTypeFlat Foo
|
||||
let val = depattern(pat, @[1.toPreserves, 5.toPreserves])
|
||||
check $val == "<foo 1 5>"
|
||||
var index: int
|
||||
depattern(pat, values, index)
|
||||
|
||||
type Literal*[T] = object
|
||||
## A wrapper type to deserialize patterns to native values.
|
||||
value*: T
|
||||
|
||||
proc fromPreservesHook*[T](lit: var Literal[T]; pr: Value): bool =
|
||||
var pat: Pattern
|
||||
pat.fromPreserves(pr) and lit.value.fromPreserves(depattern(pat, @[]))
|
||||
|
||||
proc toPreservesHook*[T](lit: Literal[T]): Value =
|
||||
lit.value.grab.toPreserves
|
||||
|
||||
func isGroup(pat: Pattern): bool =
|
||||
pat.orKind == PatternKind.`group`
|
||||
|
||||
func isMetaDict(pat: Pattern): bool =
|
||||
pat.orKind == PatternKind.`group` and
|
||||
pat.group.type.orKind == GroupTypeKind.dict
|
||||
|
||||
proc metaApply(result: var Pattern; pat: Pattern; path: openarray[Value], offset: int) =
|
||||
if offset == path.len:
|
||||
result = pat
|
||||
elif result.isGroup and result.group.entries[1.toPreserves].isMetaDict:
|
||||
if offset == path.high:
|
||||
result.group.entries[1.toPreserves].group.entries[path[offset]] = pat
|
||||
else:
|
||||
metaApply(result.group.entries[1.toPreserves].group.entries[path[offset]], pat, path, succ offset)
|
||||
else:
|
||||
assert result.isGroup, "non-group: " & $result
|
||||
assert result.group.entries[1.toPreserves].isMetaDict, "non-meta-dict: " & $result.group.entries[1.toPreserves]
|
||||
raise newException(ValueError, "cannot inject into non-group pattern " & $result)
|
||||
|
||||
proc observePattern*(pat: Pattern; injects: openarray[(seq[Value], Pattern)]): Pattern =
|
||||
result = dropType Observe
|
||||
var meta = pat.toPreserves.drop
|
||||
for (path, pat) in injects:
|
||||
metaApply(meta, pat, path, 0)
|
||||
result.group.entries[0.toPreserves] = meta
|
||||
|
||||
type
|
||||
Path* = seq[Value]
|
||||
Paths* = seq[Path]
|
||||
Captures* = seq[Value]
|
||||
Analysis* = tuple
|
||||
presentPaths: Paths
|
||||
constPaths: Paths
|
||||
constValues: seq[Value]
|
||||
capturePaths: Paths
|
||||
|
||||
func walk(result: var Analysis; path: var Path; p: Pattern)
|
||||
|
||||
func walk(result: var Analysis; path: var Path; key: Value; pat: Pattern) =
|
||||
path.add(key)
|
||||
walk(result, path, pat)
|
||||
discard path.pop
|
||||
|
||||
func walk(result: var Analysis; path: var Path; p: Pattern) =
|
||||
case p.orKind
|
||||
of PatternKind.group:
|
||||
for k, v in p.group.entries: walk(result, path, k, v)
|
||||
of PatternKind.`bind`:
|
||||
result.capturePaths.add(path)
|
||||
walk(result, path, p.`bind`.pattern)
|
||||
of PatternKind.`discard`:
|
||||
result.presentPaths.add(path)
|
||||
of PatternKind.`lit`:
|
||||
result.constPaths.add(path)
|
||||
result.constValues.add(p.`lit`.value.toPreserves)
|
||||
|
||||
func analyse*(p: Pattern): Analysis =
|
||||
var path: Path
|
||||
walk(result, path, p)
|
||||
|
||||
func checkPresence*(v: Value; present: Paths): bool =
|
||||
result = true
|
||||
for path in present:
|
||||
if not result: break
|
||||
result = step(v, path).isSome
|
||||
|
||||
func projectPaths*(v: Value; paths: Paths): Option[Captures] =
|
||||
var res = newSeq[Value](paths.len)
|
||||
for i, path in paths:
|
||||
var vv = step(v, path)
|
||||
if vv.isSome: res[i] = get(vv)
|
||||
else: return
|
||||
some res
|
||||
|
||||
proc matches*(pat: Pattern; pr: Value): bool =
|
||||
let analysis = analyse(pat)
|
||||
assert analysis.constPaths.len == analysis.constValues.len
|
||||
result = checkPresence(pr, analysis.presentPaths)
|
||||
if result:
|
||||
for i, path in analysis.constPaths:
|
||||
let v = step(pr, path)
|
||||
if v.isNone: return false
|
||||
if analysis.constValues[i] != v.get: return false
|
||||
for path in analysis.capturePaths:
|
||||
if step(pr, path).isNone: return false
|
||||
|
||||
proc capture*(pat: Pattern; pr: Value): seq[Value] =
|
||||
let analysis = analyse(pat)
|
||||
assert analysis.constPaths.len == analysis.constValues.len
|
||||
if checkPresence(pr, analysis.presentPaths):
|
||||
for i, path in analysis.constPaths:
|
||||
let v = step(pr, path)
|
||||
if v.isNone : return @[]
|
||||
if analysis.constValues[i] != v.get: return @[]
|
||||
for path in analysis.capturePaths:
|
||||
let v = step(pr, path)
|
||||
if v.isNone: return @[]
|
||||
result.add(get v)
|
||||
|
||||
when isMainModule:
|
||||
stdout.writeLine stdin.readAll.parsePreserves.grab
|
|
@ -1,43 +0,0 @@
|
|||
# SPDX-FileCopyrightText: 2021 ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import nimSHA2
|
||||
|
||||
proc fillPad(pad: var openarray[byte]; key: openarray[byte]; fillByte: byte) =
|
||||
for i in 0..key.high: pad[i] = fillByte xor key[i].uint8
|
||||
for i in key.len..pad.high: pad[i] = fillByte
|
||||
|
||||
proc hmacSha256*[T:char|byte](key: openarray[byte]; msg: openarray[T]; outLength = 32): seq[byte] =
|
||||
const blockSize = 64
|
||||
assert(outLength <= 32)
|
||||
var
|
||||
hash: SHA256
|
||||
pad: array[blockSize, byte]
|
||||
block:
|
||||
const xorByte = 0x36'u8
|
||||
if key.len < blockSize:
|
||||
fillPad(pad, key, xorByte)
|
||||
else:
|
||||
initSHA(hash)
|
||||
update(hash, key)
|
||||
var keyDigest = final(hash)
|
||||
fillPad(pad, keyDigest, xorByte)
|
||||
initSHA(hash)
|
||||
update(hash, pad)
|
||||
update(hash, msg)
|
||||
var digest = final(hash)
|
||||
block:
|
||||
const xorByte = 0x5c'u8
|
||||
if key.len < blockSize:
|
||||
fillPad(pad, key, xorByte)
|
||||
else:
|
||||
initSHA(hash)
|
||||
update(hash, key)
|
||||
var keyDigest = final(hash)
|
||||
fillPad(pad, keyDigest, xorByte)
|
||||
initSHA(hash)
|
||||
update(hash, pad)
|
||||
update(hash, digest)
|
||||
digest = final(hash)
|
||||
result.setLen(outLength)
|
||||
copyMem(result[0].addr, digest[0].addr, result.len)
|
|
@ -1,8 +0,0 @@
|
|||
all: schema-bundle.bin
|
||||
|
||||
clean:
|
||||
rm -f schema-bundle.bin
|
||||
|
||||
schema-bundle.bin: schemas/*.prs
|
||||
preserves-schemac schemas/*.prs > $@.tmp
|
||||
mv $@.tmp $@
|
|
@ -0,0 +1,22 @@
|
|||
include_rules
|
||||
modules += dataspace.nim
|
||||
modules += dataspacePatterns.nim
|
||||
modules += gatekeeper.nim
|
||||
modules += http.nim
|
||||
modules += noise.nim
|
||||
modules += protocol.nim
|
||||
modules += service.nim
|
||||
modules += stdenv.nim
|
||||
modules += stream.nim
|
||||
modules += sturdy.nim
|
||||
modules += tcp.nim
|
||||
modules += timer.nim
|
||||
modules += trace.nim
|
||||
modules += transportAddress.nim
|
||||
modules += worker.nim
|
||||
|
||||
: ../../../../syndicate-protocols/schema-bundle.bin \
|
||||
|> !preserves_schema_nim \
|
||||
|> $(modules) | $(SYNDICATE_PROTOCOL)
|
||||
|
||||
: foreach $(modules) | $(modules) |> !nim_check |>
|
|
@ -0,0 +1,14 @@
|
|||
|
||||
import
|
||||
preserves, dataspacePatterns
|
||||
|
||||
type
|
||||
Observe* {.preservesRecord: "Observe".} = object
|
||||
`pattern`*: dataspacePatterns.Pattern
|
||||
`observer`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
proc `$`*(x: Observe): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: Observe): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,86 @@
|
|||
|
||||
import
|
||||
preserves, std/tables
|
||||
|
||||
type
|
||||
AnyAtomKind* {.pure.} = enum
|
||||
`bool`, `double`, `int`, `string`, `bytes`, `symbol`, `embedded`
|
||||
`AnyAtom`* {.preservesOr.} = object
|
||||
case orKind*: AnyAtomKind
|
||||
of AnyAtomKind.`bool`:
|
||||
`bool`*: bool
|
||||
|
||||
of AnyAtomKind.`double`:
|
||||
`double`*: float
|
||||
|
||||
of AnyAtomKind.`int`:
|
||||
`int`*: BiggestInt
|
||||
|
||||
of AnyAtomKind.`string`:
|
||||
`string`*: string
|
||||
|
||||
of AnyAtomKind.`bytes`:
|
||||
`bytes`*: seq[byte]
|
||||
|
||||
of AnyAtomKind.`symbol`:
|
||||
`symbol`*: Symbol
|
||||
|
||||
of AnyAtomKind.`embedded`:
|
||||
`embedded`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
|
||||
GroupTypeKind* {.pure.} = enum
|
||||
`rec`, `arr`, `dict`
|
||||
GroupTypeRec* {.preservesRecord: "rec".} = object
|
||||
`label`*: Value
|
||||
|
||||
GroupTypeArr* {.preservesRecord: "arr".} = object
|
||||
|
||||
GroupTypeDict* {.preservesRecord: "dict".} = object
|
||||
|
||||
`GroupType`* {.preservesOr.} = object
|
||||
case orKind*: GroupTypeKind
|
||||
of GroupTypeKind.`rec`:
|
||||
`rec`*: GroupTypeRec
|
||||
|
||||
of GroupTypeKind.`arr`:
|
||||
`arr`*: GroupTypeArr
|
||||
|
||||
of GroupTypeKind.`dict`:
|
||||
`dict`*: GroupTypeDict
|
||||
|
||||
|
||||
PatternKind* {.pure.} = enum
|
||||
`discard`, `bind`, `lit`, `group`
|
||||
PatternDiscard* {.preservesRecord: "_".} = object
|
||||
|
||||
PatternBind* {.preservesRecord: "bind".} = object
|
||||
`pattern`*: Pattern
|
||||
|
||||
PatternLit* {.preservesRecord: "lit".} = object
|
||||
`value`*: AnyAtom
|
||||
|
||||
PatternGroup* {.preservesRecord: "group".} = object
|
||||
`type`*: GroupType
|
||||
`entries`*: Table[Value, Pattern]
|
||||
|
||||
`Pattern`* {.acyclic, preservesOr.} = ref object
|
||||
case orKind*: PatternKind
|
||||
of PatternKind.`discard`:
|
||||
`discard`*: PatternDiscard
|
||||
|
||||
of PatternKind.`bind`:
|
||||
`bind`* {.preservesEmbedded.}: PatternBind
|
||||
|
||||
of PatternKind.`lit`:
|
||||
`lit`* {.preservesEmbedded.}: PatternLit
|
||||
|
||||
of PatternKind.`group`:
|
||||
`group`* {.preservesEmbedded.}: PatternGroup
|
||||
|
||||
|
||||
proc `$`*(x: AnyAtom | GroupType | Pattern): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: AnyAtom | GroupType | Pattern): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,103 @@
|
|||
|
||||
import
|
||||
preserves
|
||||
|
||||
type
|
||||
Bind* {.preservesRecord: "bind".} = object
|
||||
`description`*: Description
|
||||
`target`* {.preservesEmbedded.}: EmbeddedRef
|
||||
`observer`*: BindObserver
|
||||
|
||||
Route* {.preservesRecord: "route".} = object
|
||||
`transports`*: seq[Value]
|
||||
`pathSteps`* {.preservesTupleTail.}: seq[PathStep]
|
||||
|
||||
BindObserverKind* {.pure.} = enum
|
||||
`present`, `absent`
|
||||
`BindObserver`* {.preservesOr.} = object
|
||||
case orKind*: BindObserverKind
|
||||
of BindObserverKind.`present`:
|
||||
`present`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
of BindObserverKind.`absent`:
|
||||
`absent`* {.preservesLiteral: "#f".}: bool
|
||||
|
||||
|
||||
TransportConnection* {.preservesRecord: "connect-transport".} = object
|
||||
`addr`*: Value
|
||||
`control`* {.preservesEmbedded.}: EmbeddedRef
|
||||
`resolved`*: Resolved
|
||||
|
||||
Step* = Value
|
||||
ResolvedPathStep* {.preservesRecord: "path-step".} = object
|
||||
`origin`* {.preservesEmbedded.}: EmbeddedRef
|
||||
`pathStep`*: PathStep
|
||||
`resolved`*: Resolved
|
||||
|
||||
BoundKind* {.pure.} = enum
|
||||
`bound`, `Rejected`
|
||||
BoundBound* {.preservesRecord: "bound".} = object
|
||||
`pathStep`*: PathStep
|
||||
|
||||
`Bound`* {.preservesOr.} = object
|
||||
case orKind*: BoundKind
|
||||
of BoundKind.`bound`:
|
||||
`bound`*: BoundBound
|
||||
|
||||
of BoundKind.`Rejected`:
|
||||
`rejected`*: Rejected
|
||||
|
||||
|
||||
ForceDisconnect* {.preservesRecord: "force-disconnect".} = object
|
||||
|
||||
Description* = Value
|
||||
Rejected* {.preservesRecord: "rejected".} = object
|
||||
`detail`*: Value
|
||||
|
||||
Resolve* {.preservesRecord: "resolve".} = object
|
||||
`step`*: Step
|
||||
`observer`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
ResolvedKind* {.pure.} = enum
|
||||
`accepted`, `Rejected`
|
||||
ResolvedAccepted* {.preservesRecord: "accepted".} = object
|
||||
`responderSession`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
`Resolved`* {.preservesOr.} = object
|
||||
case orKind*: ResolvedKind
|
||||
of ResolvedKind.`accepted`:
|
||||
`accepted`* {.preservesEmbedded.}: ResolvedAccepted
|
||||
|
||||
of ResolvedKind.`Rejected`:
|
||||
`rejected`*: Rejected
|
||||
|
||||
|
||||
TransportControl* = ForceDisconnect
|
||||
ResolvePath* {.preservesRecord: "resolve-path".} = object
|
||||
`route`*: Route
|
||||
`addr`*: Value
|
||||
`control`* {.preservesEmbedded.}: EmbeddedRef
|
||||
`resolved`*: Resolved
|
||||
|
||||
PathStep* = Value
|
||||
proc `$`*(x: Bind | Route | BindObserver | TransportConnection |
|
||||
ResolvedPathStep |
|
||||
Bound |
|
||||
ForceDisconnect |
|
||||
Rejected |
|
||||
Resolve |
|
||||
Resolved |
|
||||
TransportControl |
|
||||
ResolvePath): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: Bind | Route | BindObserver | TransportConnection |
|
||||
ResolvedPathStep |
|
||||
Bound |
|
||||
ForceDisconnect |
|
||||
Rejected |
|
||||
Resolve |
|
||||
Resolved |
|
||||
TransportControl |
|
||||
ResolvePath): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,182 @@
|
|||
|
||||
import
|
||||
preserves, std/tables
|
||||
|
||||
type
|
||||
HostPatternKind* {.pure.} = enum
|
||||
`host`, `any`
|
||||
`HostPattern`* {.preservesOr.} = object
|
||||
case orKind*: HostPatternKind
|
||||
of HostPatternKind.`host`:
|
||||
`host`*: string
|
||||
|
||||
of HostPatternKind.`any`:
|
||||
`any`* {.preservesLiteral: "#f".}: bool
|
||||
|
||||
|
||||
HttpListener* {.preservesRecord: "http-listener".} = object
|
||||
`port`*: BiggestInt
|
||||
|
||||
MethodPatternKind* {.pure.} = enum
|
||||
`any`, `specific`
|
||||
`MethodPattern`* {.preservesOr.} = object
|
||||
case orKind*: MethodPatternKind
|
||||
of MethodPatternKind.`any`:
|
||||
`any`* {.preservesLiteral: "#f".}: bool
|
||||
|
||||
of MethodPatternKind.`specific`:
|
||||
`specific`*: Symbol
|
||||
|
||||
|
||||
MimeType* = Symbol
|
||||
QueryValueKind* {.pure.} = enum
|
||||
`string`, `file`
|
||||
QueryValueFile* {.preservesRecord: "file".} = object
|
||||
`filename`*: string
|
||||
`headers`*: Headers
|
||||
`body`*: seq[byte]
|
||||
|
||||
`QueryValue`* {.preservesOr.} = object
|
||||
case orKind*: QueryValueKind
|
||||
of QueryValueKind.`string`:
|
||||
`string`*: string
|
||||
|
||||
of QueryValueKind.`file`:
|
||||
`file`*: QueryValueFile
|
||||
|
||||
|
||||
HttpRequest* {.preservesRecord: "http-request".} = object
|
||||
`sequenceNumber`*: BiggestInt
|
||||
`host`*: RequestHost
|
||||
`port`*: BiggestInt
|
||||
`method`*: Symbol
|
||||
`path`*: seq[string]
|
||||
`headers`*: Headers
|
||||
`query`*: Table[Symbol, seq[QueryValue]]
|
||||
`body`*: RequestBody
|
||||
|
||||
RequestBodyKind* {.pure.} = enum
|
||||
`absent`, `present`
|
||||
`RequestBody`* {.preservesOr.} = object
|
||||
case orKind*: RequestBodyKind
|
||||
of RequestBodyKind.`absent`:
|
||||
`absent`* {.preservesLiteral: "#f".}: bool
|
||||
|
||||
of RequestBodyKind.`present`:
|
||||
`present`*: seq[byte]
|
||||
|
||||
|
||||
Headers* = Table[Symbol, string]
|
||||
HttpResponseKind* {.pure.} = enum
|
||||
`status`, `header`, `chunk`, `done`
|
||||
HttpResponseStatus* {.preservesRecord: "status".} = object
|
||||
`code`*: BiggestInt
|
||||
`message`*: string
|
||||
|
||||
HttpResponseHeader* {.preservesRecord: "header".} = object
|
||||
`name`*: Symbol
|
||||
`value`*: string
|
||||
|
||||
HttpResponseChunk* {.preservesRecord: "chunk".} = object
|
||||
`chunk`*: Chunk
|
||||
|
||||
HttpResponseDone* {.preservesRecord: "done".} = object
|
||||
`chunk`*: Chunk
|
||||
|
||||
`HttpResponse`* {.preservesOr.} = object
|
||||
case orKind*: HttpResponseKind
|
||||
of HttpResponseKind.`status`:
|
||||
`status`*: HttpResponseStatus
|
||||
|
||||
of HttpResponseKind.`header`:
|
||||
`header`*: HttpResponseHeader
|
||||
|
||||
of HttpResponseKind.`chunk`:
|
||||
`chunk`*: HttpResponseChunk
|
||||
|
||||
of HttpResponseKind.`done`:
|
||||
`done`*: HttpResponseDone
|
||||
|
||||
|
||||
HttpService* {.preservesRecord: "http-service".} = object
|
||||
`host`*: HostPattern
|
||||
`port`*: BiggestInt
|
||||
`method`*: MethodPattern
|
||||
`path`*: PathPattern
|
||||
|
||||
HttpBinding* {.preservesRecord: "http-bind".} = object
|
||||
`host`*: HostPattern
|
||||
`port`*: BiggestInt
|
||||
`method`*: MethodPattern
|
||||
`path`*: PathPattern
|
||||
`handler`* {.preservesEmbedded.}: Value
|
||||
|
||||
HttpContext* {.preservesRecord: "request".} = object
|
||||
`req`*: HttpRequest
|
||||
`res`* {.preservesEmbedded.}: Value
|
||||
|
||||
RequestHostKind* {.pure.} = enum
|
||||
`absent`, `present`
|
||||
`RequestHost`* {.preservesOr.} = object
|
||||
case orKind*: RequestHostKind
|
||||
of RequestHostKind.`absent`:
|
||||
`absent`* {.preservesLiteral: "#f".}: bool
|
||||
|
||||
of RequestHostKind.`present`:
|
||||
`present`*: string
|
||||
|
||||
|
||||
PathPatternElementKind* {.pure.} = enum
|
||||
`label`, `wildcard`, `rest`
|
||||
`PathPatternElement`* {.preservesOr.} = object
|
||||
case orKind*: PathPatternElementKind
|
||||
of PathPatternElementKind.`label`:
|
||||
`label`*: string
|
||||
|
||||
of PathPatternElementKind.`wildcard`:
|
||||
`wildcard`* {.preservesLiteral: "_".}: bool
|
||||
|
||||
of PathPatternElementKind.`rest`:
|
||||
`rest`* {.preservesLiteral: "|...|".}: bool
|
||||
|
||||
|
||||
ChunkKind* {.pure.} = enum
|
||||
`string`, `bytes`
|
||||
`Chunk`* {.preservesOr.} = object
|
||||
case orKind*: ChunkKind
|
||||
of ChunkKind.`string`:
|
||||
`string`*: string
|
||||
|
||||
of ChunkKind.`bytes`:
|
||||
`bytes`*: seq[byte]
|
||||
|
||||
|
||||
PathPattern* = seq[PathPatternElement]
|
||||
proc `$`*(x: HostPattern | HttpListener | MethodPattern | MimeType | QueryValue |
|
||||
HttpRequest |
|
||||
RequestBody |
|
||||
Headers |
|
||||
HttpResponse |
|
||||
HttpService |
|
||||
HttpBinding |
|
||||
HttpContext |
|
||||
RequestHost |
|
||||
PathPatternElement |
|
||||
Chunk |
|
||||
PathPattern): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: HostPattern | HttpListener | MethodPattern | MimeType |
|
||||
QueryValue |
|
||||
HttpRequest |
|
||||
RequestBody |
|
||||
Headers |
|
||||
HttpResponse |
|
||||
HttpService |
|
||||
HttpBinding |
|
||||
HttpContext |
|
||||
RequestHost |
|
||||
PathPatternElement |
|
||||
Chunk |
|
||||
PathPattern): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,139 @@
|
|||
|
||||
import
|
||||
preserves, std/options
|
||||
|
||||
type
|
||||
NoiseDescriptionDetail* = NoiseServiceSpec
|
||||
NoisePreSharedKeysKind* {.pure.} = enum
|
||||
`present`, `invalid`, `absent`
|
||||
NoisePreSharedKeysPresent* {.preservesDictionary.} = object
|
||||
`preSharedKeys`*: seq[seq[byte]]
|
||||
|
||||
NoisePreSharedKeysInvalid* {.preservesDictionary.} = object
|
||||
`preSharedKeys`*: Value
|
||||
|
||||
NoisePreSharedKeysAbsent* {.preservesDictionary.} = object
|
||||
|
||||
`NoisePreSharedKeys`* {.preservesOr.} = object
|
||||
case orKind*: NoisePreSharedKeysKind
|
||||
of NoisePreSharedKeysKind.`present`:
|
||||
`present`*: NoisePreSharedKeysPresent
|
||||
|
||||
of NoisePreSharedKeysKind.`invalid`:
|
||||
`invalid`*: NoisePreSharedKeysInvalid
|
||||
|
||||
of NoisePreSharedKeysKind.`absent`:
|
||||
`absent`*: NoisePreSharedKeysAbsent
|
||||
|
||||
|
||||
SecretKeyFieldKind* {.pure.} = enum
|
||||
`present`, `invalid`, `absent`
|
||||
SecretKeyFieldPresent* {.preservesDictionary.} = object
|
||||
`secretKey`*: seq[byte]
|
||||
|
||||
SecretKeyFieldInvalid* {.preservesDictionary.} = object
|
||||
`secretKey`*: Value
|
||||
|
||||
SecretKeyFieldAbsent* {.preservesDictionary.} = object
|
||||
|
||||
`SecretKeyField`* {.preservesOr.} = object
|
||||
case orKind*: SecretKeyFieldKind
|
||||
of SecretKeyFieldKind.`present`:
|
||||
`present`*: SecretKeyFieldPresent
|
||||
|
||||
of SecretKeyFieldKind.`invalid`:
|
||||
`invalid`*: SecretKeyFieldInvalid
|
||||
|
||||
of SecretKeyFieldKind.`absent`:
|
||||
`absent`*: SecretKeyFieldAbsent
|
||||
|
||||
|
||||
SessionItemKind* {.pure.} = enum
|
||||
`Initiator`, `Packet`
|
||||
`SessionItem`* {.preservesOr.} = object
|
||||
case orKind*: SessionItemKind
|
||||
of SessionItemKind.`Initiator`:
|
||||
`initiator`* {.preservesEmbedded.}: Initiator
|
||||
|
||||
of SessionItemKind.`Packet`:
|
||||
`packet`*: Packet
|
||||
|
||||
|
||||
NoiseProtocolKind* {.pure.} = enum
|
||||
`present`, `invalid`, `absent`
|
||||
NoiseProtocolPresent* {.preservesDictionary.} = object
|
||||
`protocol`*: string
|
||||
|
||||
NoiseProtocolInvalid* {.preservesDictionary.} = object
|
||||
`protocol`*: Value
|
||||
|
||||
NoiseProtocolAbsent* {.preservesDictionary.} = object
|
||||
|
||||
`NoiseProtocol`* {.preservesOr.} = object
|
||||
case orKind*: NoiseProtocolKind
|
||||
of NoiseProtocolKind.`present`:
|
||||
`present`*: NoiseProtocolPresent
|
||||
|
||||
of NoiseProtocolKind.`invalid`:
|
||||
`invalid`*: NoiseProtocolInvalid
|
||||
|
||||
of NoiseProtocolKind.`absent`:
|
||||
`absent`*: NoiseProtocolAbsent
|
||||
|
||||
|
||||
NoisePathStepDetail* = NoiseSpec
|
||||
NoiseServiceSpecKey* = seq[byte]
|
||||
NoiseServiceSpecPreSharedKeys* = Option[Value]
|
||||
NoiseServiceSpecProtocol* = Option[Value]
|
||||
NoiseServiceSpecSecretKey* = Option[Value]
|
||||
`NoiseServiceSpec`* {.preservesDictionary.} = object
|
||||
`key`*: seq[byte]
|
||||
`preSharedKeys`*: Option[Value]
|
||||
`protocol`*: Option[Value]
|
||||
`secretKey`*: Option[Value]
|
||||
`service`*: ServiceSelector
|
||||
|
||||
ServiceSelector* = Value
|
||||
Initiator* {.preservesRecord: "initiator".} = object
|
||||
`initiatorSession`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
NoiseStepDetail* = ServiceSelector
|
||||
NoiseSpecKey* = seq[byte]
|
||||
NoiseSpecPreSharedKeys* = Option[Value]
|
||||
NoiseSpecProtocol* = Option[Value]
|
||||
`NoiseSpec`* {.preservesDictionary.} = object
|
||||
`key`*: seq[byte]
|
||||
`preSharedKeys`*: Option[Value]
|
||||
`protocol`*: Option[Value]
|
||||
`service`*: ServiceSelector
|
||||
|
||||
PacketKind* {.pure.} = enum
|
||||
`complete`, `fragmented`
|
||||
`Packet`* {.preservesOr.} = object
|
||||
case orKind*: PacketKind
|
||||
of PacketKind.`complete`:
|
||||
`complete`*: seq[byte]
|
||||
|
||||
of PacketKind.`fragmented`:
|
||||
`fragmented`*: seq[seq[byte]]
|
||||
|
||||
|
||||
proc `$`*(x: NoiseDescriptionDetail | NoisePreSharedKeys | SecretKeyField |
|
||||
SessionItem |
|
||||
NoiseProtocol |
|
||||
NoisePathStepDetail |
|
||||
NoiseServiceSpec |
|
||||
Initiator |
|
||||
NoiseSpec |
|
||||
Packet): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: NoiseDescriptionDetail | NoisePreSharedKeys | SecretKeyField |
|
||||
SessionItem |
|
||||
NoiseProtocol |
|
||||
NoisePathStepDetail |
|
||||
NoiseServiceSpec |
|
||||
Initiator |
|
||||
NoiseSpec |
|
||||
Packet): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,77 @@
|
|||
|
||||
import
|
||||
preserves
|
||||
|
||||
type
|
||||
Error* {.preservesRecord: "error".} = object
|
||||
`message`*: string
|
||||
`detail`*: Value
|
||||
|
||||
Turn* = seq[TurnEvent]
|
||||
Message* {.preservesRecord: "M".} = object
|
||||
`body`*: Assertion
|
||||
|
||||
Retract* {.preservesRecord: "R".} = object
|
||||
`handle`*: Handle
|
||||
|
||||
Assert* {.preservesRecord: "A".} = object
|
||||
`assertion`*: Assertion
|
||||
`handle`*: Handle
|
||||
|
||||
Extension* = Value
|
||||
Sync* {.preservesRecord: "S".} = object
|
||||
`peer`* {.preservesEmbedded.}: Value
|
||||
|
||||
TurnEvent* {.preservesTuple.} = object
|
||||
`oid`*: Oid
|
||||
`event`*: Event
|
||||
|
||||
Oid* = BiggestInt
|
||||
Assertion* = Value
|
||||
Handle* = BiggestInt
|
||||
PacketKind* {.pure.} = enum
|
||||
`Turn`, `Error`, `Extension`, `Nop`
|
||||
`Packet`* {.preservesOr.} = object
|
||||
case orKind*: PacketKind
|
||||
of PacketKind.`Turn`:
|
||||
`turn`* {.preservesEmbedded.}: Turn
|
||||
|
||||
of PacketKind.`Error`:
|
||||
`error`*: Error
|
||||
|
||||
of PacketKind.`Extension`:
|
||||
`extension`*: Extension
|
||||
|
||||
of PacketKind.`Nop`:
|
||||
`nop`* {.preservesLiteral: "#f".}: bool
|
||||
|
||||
|
||||
EventKind* {.pure.} = enum
|
||||
`Assert`, `Retract`, `Message`, `Sync`
|
||||
`Event`* {.preservesOr.} = object
|
||||
case orKind*: EventKind
|
||||
of EventKind.`Assert`:
|
||||
`assert`*: Assert
|
||||
|
||||
of EventKind.`Retract`:
|
||||
`retract`*: Retract
|
||||
|
||||
of EventKind.`Message`:
|
||||
`message`*: Message
|
||||
|
||||
of EventKind.`Sync`:
|
||||
`sync`* {.preservesEmbedded.}: Sync
|
||||
|
||||
|
||||
proc `$`*(x: Error | Turn | Message | Retract | Assert | Sync | TurnEvent | Oid |
|
||||
Handle |
|
||||
Packet |
|
||||
Event): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: Error | Turn | Message | Retract | Assert | Sync | TurnEvent |
|
||||
Oid |
|
||||
Handle |
|
||||
Packet |
|
||||
Event): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -1,14 +0,0 @@
|
|||
´³bundle·µ³tcp„´³schema·³version‘³definitions·³TcpLocal´³rec´³lit³ tcp-local„´³tupleµ´³named³host´³atom³String„„´³named³port´³atom³
SignedInteger„„„„„³ TcpRemote´³rec´³lit³
|
||||
tcp-remote„´³tupleµ´³named³host´³atom³String„„´³named³port´³atom³
SignedInteger„„„„„³TcpPeerInfo´³rec´³lit³tcp-peer„´³tupleµ´³named³handle´³embedded³any„„´³named³local´³refµ„³TcpLocal„„´³named³remote´³refµ„³ TcpRemote„„„„„„³embeddedType´³refµ³ EntityRef„³Cap„„„µ³timer„´³schema·³version‘³definitions·³SetTimer´³rec´³lit³ set-timer„´³tupleµ´³named³label³any„´³named³msecs´³atom³Double„„´³named³kind´³refµ„³ TimerKind„„„„„³ LaterThan´³rec´³lit³
|
||||
later-than„´³tupleµ´³named³msecs´³atom³Double„„„„„³ TimerKind´³orµµ±relative´³lit³relative„„µ±absolute´³lit³absolute„„µ±clear´³lit³clear„„„„³TimerExpired´³rec´³lit³
timer-expired„´³tupleµ´³named³label³any„´³named³msecs´³atom³Double„„„„„„³embeddedType€„„µ³stream„´³schema·³version‘³definitions·³Mode´³orµµ±bytes´³lit³bytes„„µ±lines´³refµ„³LineMode„„µ±packet´³rec´³lit³packet„´³tupleµ´³named³size´³atom³
SignedInteger„„„„„„µ±object´³rec´³lit³object„´³tupleµ´³named³description³any„„„„„„„³Sink´³orµµ±source´³rec´³lit³source„´³tupleµ´³named³
|
||||
controller´³embedded´³refµ„³Source„„„„„„„µ±StreamError´³refµ„³StreamError„„µ±data´³rec´³lit³data„´³tupleµ´³named³payload³any„´³named³mode´³refµ„³Mode„„„„„„µ±eof´³rec´³lit³eof„´³tupleµ„„„„„„³Source´³orµµ±sink´³rec´³lit³sink„´³tupleµ´³named³
|
||||
controller´³embedded´³refµ„³Sink„„„„„„„µ±StreamError´³refµ„³StreamError„„µ±credit´³rec´³lit³credit„´³tupleµ´³named³amount´³refµ„³CreditAmount„„´³named³mode´³refµ„³Mode„„„„„„„„³LineMode´³orµµ±lf´³lit³lf„„µ±crlf´³lit³crlf„„„„³StreamError´³rec´³lit³error„´³tupleµ´³named³message´³atom³String„„„„„³CreditAmount´³orµµ±count´³atom³
SignedInteger„„µ± unbounded´³lit³ unbounded„„„„³StreamConnection´³rec´³lit³stream-connection„´³tupleµ´³named³source´³embedded´³refµ„³Source„„„´³named³sink´³embedded´³refµ„³Sink„„„´³named³spec³any„„„„³StreamListenerError´³rec´³lit³stream-listener-error„´³tupleµ´³named³spec³any„´³named³message´³atom³String„„„„„³StreamListenerReady´³rec´³lit³stream-listener-ready„´³tupleµ´³named³spec³any„„„„„³embeddedType´³refµ³ EntityRef„³Cap„„„µ³sturdy„´³schema·³version‘³definitions·³Lit´³rec´³lit³lit„´³tupleµ´³named³value³any„„„„³Oid´³atom³
SignedInteger„³Alts´³rec´³lit³or„´³tupleµ´³named³alternatives´³seqof´³refµ„³Rewrite„„„„„„³CArr´³rec´³lit³arr„´³tupleµ´³named³arity´³atom³
SignedInteger„„„„„³CRec´³rec´³lit³rec„´³tupleµ´³named³label³any„´³named³arity´³atom³
SignedInteger„„„„„³PAnd´³rec´³lit³and„´³tupleµ´³named³patterns´³seqof´³refµ„³Pattern„„„„„„³PNot´³rec´³lit³not„´³tupleµ´³named³pattern´³refµ„³Pattern„„„„„³TRef´³rec´³lit³ref„´³tupleµ´³named³binding´³atom³
SignedInteger„„„„„³CDict´³rec´³lit³dict„´³tupleµ„„„³PAtom´³orµµ±Boolean´³lit³Boolean„„µ±Float´³lit³Float„„µ±Double´³lit³Double„„µ±
SignedInteger´³lit³
SignedInteger„„µ±String´³lit³String„„µ±
|
||||
ByteString´³lit³
|
||||
ByteString„„µ±Symbol´³lit³Symbol„„„„³PBind´³rec´³lit³bind„´³tupleµ´³named³pattern´³refµ„³Pattern„„„„„³Caveat´³orµµ±Rewrite´³refµ„³Rewrite„„µ±Alts´³refµ„³Alts„„„„³Pattern´³orµµ±PDiscard´³refµ„³PDiscard„„µ±PAtom´³refµ„³PAtom„„µ± PEmbedded´³refµ„³ PEmbedded„„µ±PBind´³refµ„³PBind„„µ±PAnd´³refµ„³PAnd„„µ±PNot´³refµ„³PNot„„µ±Lit´³refµ„³Lit„„µ± PCompound´³refµ„³ PCompound„„„„³Rewrite´³rec´³lit³rewrite„´³tupleµ´³named³pattern´³refµ„³Pattern„„´³named³template´³refµ„³Template„„„„„³WireRef´³orµµ±mine´³tupleµ´³lit<69>„´³named³oid´³refµ„³Oid„„„„„µ±yours´³tuplePrefixµ´³lit‘„´³named³oid´³refµ„³Oid„„„´³named³attenuation´³seqof´³refµ„³Caveat„„„„„„„³PDiscard´³rec´³lit³_„´³tupleµ„„„³Template´³orµµ±
|
||||
TAttenuate´³refµ„³
|
||||
TAttenuate„„µ±TRef´³refµ„³TRef„„µ±Lit´³refµ„³Lit„„µ± TCompound´³refµ„³ TCompound„„„„³ PCompound´³rec´³lit³compound„´³tupleµ´³named³ctor´³refµ„³ConstructorSpec„„´³named³members´³refµ„³PCompoundMembers„„„„„³ PEmbedded´³lit³Embedded„³ SturdyRef´³rec´³lit³ref„´³tupleµ´³named³oid³any„´³named³caveatChain´³seqof´³refµ„³Attenuation„„„´³named³sig´³atom³
|
||||
ByteString„„„„„³ TCompound´³rec´³lit³compound„´³tupleµ´³named³ctor´³refµ„³ConstructorSpec„„´³named³members´³refµ„³TCompoundMembers„„„„„³
|
||||
TAttenuate´³rec´³lit³ attenuate„´³tupleµ´³named³template´³refµ„³Template„„´³named³attenuation´³refµ„³Attenuation„„„„„³Attenuation´³seqof´³refµ„³Caveat„„³ConstructorSpec´³orµµ±CRec´³refµ„³CRec„„µ±CArr´³refµ„³CArr„„µ±CDict´³refµ„³CDict„„„„³PCompoundMembers´³dictof³any´³refµ„³Pattern„„³TCompoundMembers´³dictof³any´³refµ„³Template„„„³embeddedType´³refµ³ EntityRef„³Cap„„„µ³worker„´³schema·³version‘³definitions·³Instance´³rec´³lit³Instance„´³tupleµ´³named³name´³atom³String„„´³named³argument³any„„„„„³embeddedType´³refµ³ EntityRef„³Cap„„„µ³service„´³schema·³version‘³definitions·³RequireService´³rec´³lit³require-service„´³tupleµ´³named³serviceName³any„„„„³ServiceRunning´³rec´³lit³service-running„´³tupleµ´³named³serviceName³any„„„„„³embeddedType´³refµ³ EntityRef„³Cap„„„µ³ dataspace„´³schema·³version‘³definitions·³Observe´³rec´³lit³Observe„´³tupleµ´³named³pattern´³refµ³dataspacePatterns„³Pattern„„´³named³observer´³embedded³any„„„„„„³embeddedType´³refµ³ EntityRef„³Cap„„„µ³
|
||||
gatekeeper„´³schema·³version‘³definitions·³Bind´³rec´³lit³bind„´³tupleµ´³named³oid³any„´³named³key´³atom³
|
||||
ByteString„„´³named³target´³embedded³any„„„„„³Resolve´³rec´³lit³resolve„´³tupleµ´³named³ sturdyref´³refµ³sturdy„³ SturdyRef„„´³named³observer´³embedded´³embedded³any„„„„„„„³embeddedType´³refµ³ EntityRef„³Cap„„„µ³racketEvent„´³schema·³version‘³definitions·³RacketEvent´³rec´³lit³racket-event„´³tupleµ´³named³source´³embedded³any„„´³named³event´³embedded³any„„„„„„³embeddedType€„„µ³genericProtocol„´³schema·³version‘³definitions·³Oid´³atom³
SignedInteger„³Sync´³rec´³lit³sync„´³tupleµ´³named³peer´³embedded´³lit<69>„„„„„„³Turn´³seqof´³refµ„³ TurnEvent„„³Error´³rec´³lit³error„´³tupleµ´³named³message´³atom³String„„´³named³detail³any„„„„³Event´³orµµ±Assert´³refµ„³Assert„„µ±Retract´³refµ„³Retract„„µ±Message´³refµ„³Message„„µ±Sync´³refµ„³Sync„„„„³Assert´³rec´³lit³assert„´³tupleµ´³named³ assertion´³refµ„³ Assertion„„´³named³handle´³refµ„³Handle„„„„„³Handle´³atom³
SignedInteger„³Packet´³orµµ±Turn´³refµ„³Turn„„µ±Error´³refµ„³Error„„„„³Message´³rec´³lit³message„´³tupleµ´³named³body´³refµ„³ Assertion„„„„„³Retract´³rec´³lit³retract„´³tupleµ´³named³handle´³refµ„³Handle„„„„„³ Assertion³any³ TurnEvent´³tupleµ´³named³oid´³refµ„³Oid„„´³named³event´³refµ„³Event„„„„„³embeddedType€„„µ³externalProtocol„´³schema·³version‘³definitions·³Oid´³atom³
SignedInteger„³Sync´³rec´³lit³sync„´³tupleµ´³named³peer´³embedded´³lit<69>„„„„„„³Turn´³seqof´³refµ„³ TurnEvent„„³Error´³rec´³lit³error„´³tupleµ´³named³message´³atom³String„„´³named³detail³any„„„„³Event´³orµµ±Assert´³refµ„³Assert„„µ±Retract´³refµ„³Retract„„µ±Message´³refµ„³Message„„µ±Sync´³refµ„³Sync„„„„³Assert´³rec´³lit³assert„´³tupleµ´³named³ assertion´³refµ„³ Assertion„„´³named³handle´³refµ„³Handle„„„„„³Handle´³atom³
SignedInteger„³Packet´³orµµ±Turn´³refµ„³Turn„„µ±Error´³refµ„³Error„„„„³Message´³rec´³lit³message„´³tupleµ´³named³body´³refµ„³ Assertion„„„„„³Retract´³rec´³lit³retract„´³tupleµ´³named³handle´³refµ„³Handle„„„„„³ Assertion³any³ TurnEvent´³tupleµ´³named³oid´³refµ„³Oid„„´³named³event´³refµ„³Event„„„„„³embeddedType´³refµ³sturdy„³WireRef„„„µ³internalProtocol„´³schema·³version‘³definitions·³Oid´³atom³
SignedInteger„³Sync´³rec´³lit³sync„´³tupleµ´³named³peer´³embedded´³lit<69>„„„„„„³Turn´³seqof´³refµ„³ TurnEvent„„³Error´³rec´³lit³error„´³tupleµ´³named³message´³atom³String„„´³named³detail³any„„„„³Event´³orµµ±Assert´³refµ„³Assert„„µ±Retract´³refµ„³Retract„„µ±Message´³refµ„³Message„„µ±Sync´³refµ„³Sync„„„„³Assert´³rec´³lit³assert„´³tupleµ´³named³ assertion´³refµ„³ Assertion„„´³named³handle´³refµ„³Handle„„„„„³Handle´³atom³
SignedInteger„³Packet´³orµµ±Turn´³refµ„³Turn„„µ±Error´³refµ„³Error„„„„³Message´³rec´³lit³message„´³tupleµ´³named³body´³refµ„³ Assertion„„„„„³Retract´³rec´³lit³retract„´³tupleµ´³named³handle´³refµ„³Handle„„„„„³ Assertion³any³ TurnEvent´³tupleµ´³named³oid´³refµ„³Oid„„´³named³event´³refµ„³Event„„„„„³embeddedType´³refµ³ EntityRef„³Cap„„„µ³transportAddress„´³schema·³version‘³definitions·³Tcp´³rec´³lit³tcp„´³tupleµ´³named³host´³atom³String„„´³named³port´³atom³
SignedInteger„„„„„³Unix´³rec´³lit³unix„´³tupleµ´³named³path´³atom³String„„„„„³Stdio´³rec´³lit³stdio„´³tupleµ„„„³ WebSocket´³rec´³lit³ws„´³tupleµ´³named³url´³atom³String„„„„„„³embeddedType€„„µ³dataspacePatterns„´³schema·³version‘³definitions·³CArr´³rec´³lit³arr„´³tupleµ´³named³arity´³atom³
SignedInteger„„„„„³CRec´³rec´³lit³rec„´³tupleµ´³named³label³any„´³named³arity´³atom³
SignedInteger„„„„„³DLit´³rec´³lit³lit„´³tupleµ´³named³value³any„„„„³CDict´³rec´³lit³dict„´³tupleµ„„„³DBind´³rec´³lit³bind„´³tupleµ´³named³pattern´³refµ„³Pattern„„„„„³Pattern´³orµµ±DDiscard´³refµ„³DDiscard„„µ±DBind´³refµ„³DBind„„µ±DLit´³refµ„³DLit„„µ± DCompound´³refµ„³ DCompound„„„„³DDiscard´³rec´³lit³_„´³tupleµ„„„³ DCompound´³orµµ±rec´³rec´³lit³compound„´³tupleµ´³named³ctor´³refµ„³CRec„„´³named³members´³dictof´³atom³
SignedInteger„´³refµ„³Pattern„„„„„„„µ±arr´³rec´³lit³compound„´³tupleµ´³named³ctor´³refµ„³CArr„„´³named³members´³dictof´³atom³
SignedInteger„´³refµ„³Pattern„„„„„„„µ±dict´³rec´³lit³compound„´³tupleµ´³named³ctor´³refµ„³CDict„„´³named³members´³dictof³any´³refµ„³Pattern„„„„„„„„„„³embeddedType´³refµ³ EntityRef„³Cap„„„µ³secureChatProtocol„´³schema·³version‘³definitions·³Join´³rec´³lit³
|
||||
joinedUser„´³tupleµ´³named³uid´³refµ„³UserId„„´³named³handle´³embedded´³refµ„³Session„„„„„„³Says´³rec´³lit³says„´³tupleµ´³named³who´³refµ„³UserId„„´³named³what´³atom³String„„„„„³UserId´³atom³
SignedInteger„³Session´³orµµ±observeUsers´³rec´³lit³Observe„´³tupleµ´³lit³user„´³named³observer´³embedded´³refµ„³UserInfo„„„„„„„µ±
observeSpeech´³rec´³lit³Observe„´³tupleµ´³lit³says„´³named³observer´³embedded´³refµ„³Says„„„„„„„µ± NickClaim´³refµ„³ NickClaim„„µ±Says´³refµ„³Says„„„„³UserInfo´³rec´³lit³user„´³tupleµ´³named³uid´³refµ„³UserId„„´³named³name´³atom³String„„„„„³ NickClaim´³rec´³lit³ claimNick„´³tupleµ´³named³uid´³refµ„³UserId„„´³named³name´³atom³String„„´³named³k´³embedded´³refµ„³NickClaimResponse„„„„„„³NickConflict´³rec´³lit³nickConflict„´³tupleµ„„„³NickClaimResponse´³orµµ±true´³lit<69>„„µ±NickConflict´³refµ„³NickConflict„„„„„³embeddedType´³refµ³ EntityRef„³Cap„„„µ³simpleChatProtocol„´³schema·³version‘³definitions·³Says´³rec´³lit³Says„´³tupleµ´³named³who´³atom³String„„´³named³what´³atom³String„„„„„³Present´³rec´³lit³Present„´³tupleµ´³named³username´³atom³String„„„„„„³embeddedType´³refµ³ EntityRef„³Cap„„„„„
|
|
@ -1,2 +0,0 @@
|
|||
type Cap* = object
|
||||
discard
|
|
@ -1,5 +0,0 @@
|
|||
PRESERVES_SRC_DIR = ../../../../preserves/src
|
||||
|
||||
: $(PRESERVES_SRC_DIR)/preserves/private/preserves_schema_nim.nim |> nim c -o:%o %f |> preserves_schema_nim
|
||||
|
||||
: foreach *.prs | preserves_schema_nim |> ./preserves_schema_nim %f; nim check --path:$(PRESERVES_SRC_DIR) %o |> %B.nim {gen}
|
|
@ -1,4 +0,0 @@
|
|||
version 1 .
|
||||
embeddedType EntityRef.Cap .
|
||||
|
||||
Observe = <Observe @pattern dataspacePatterns.Pattern @observer #!any>.
|
|
@ -1,16 +0,0 @@
|
|||
version 1 .
|
||||
embeddedType EntityRef.Cap .
|
||||
|
||||
; Dataspace patterns: a sublanguage of attenuation patterns.
|
||||
Pattern = DDiscard / DBind / DLit / DCompound .
|
||||
|
||||
DDiscard = <_>.
|
||||
DBind = <bind @pattern Pattern>.
|
||||
DLit = <lit @value any>.
|
||||
DCompound = @rec <compound @ctor CRec @members { int: Pattern ...:... }>
|
||||
/ @arr <compound @ctor CArr @members { int: Pattern ...:... }>
|
||||
/ @dict <compound @ctor CDict @members { any: Pattern ...:... }> .
|
||||
|
||||
CRec = <rec @label any @arity int>.
|
||||
CArr = <arr @arity int>.
|
||||
CDict = <dict>.
|
|
@ -1,3 +0,0 @@
|
|||
version 1 .
|
||||
embeddedType sturdy.WireRef .
|
||||
include "genericProtocol.prs".
|
|
@ -1,5 +0,0 @@
|
|||
version 1 .
|
||||
embeddedType EntityRef.Cap .
|
||||
|
||||
Resolve = <resolve @sturdyref sturdy.SturdyRef @observer #!#!any>.
|
||||
Bind = <bind @oid any @key bytes @target #!any>.
|
|
@ -1,17 +0,0 @@
|
|||
version 1 .
|
||||
|
||||
Packet = Turn / Error .
|
||||
|
||||
Error = <error @message string @detail any>.
|
||||
|
||||
Assertion = any .
|
||||
Handle = int .
|
||||
Event = Assert / Retract / Message / Sync .
|
||||
Oid = int .
|
||||
Turn = [TurnEvent ...].
|
||||
TurnEvent = [@oid Oid @event Event].
|
||||
|
||||
Assert = <assert @assertion Assertion @handle Handle>.
|
||||
Retract = <retract @handle Handle>.
|
||||
Message = <message @body Assertion>.
|
||||
Sync = <sync @peer #!#t>.
|
|
@ -1,3 +0,0 @@
|
|||
version 1 .
|
||||
embeddedType EntityRef.Cap .
|
||||
include "genericProtocol.prs".
|
|
@ -1,3 +0,0 @@
|
|||
version 1 .
|
||||
|
||||
RacketEvent = <racket-event @source #!any @event #!any>.
|
|
@ -1,21 +0,0 @@
|
|||
version 1 .
|
||||
embeddedType EntityRef.Cap .
|
||||
|
||||
UserId = int .
|
||||
|
||||
Join = <joinedUser @uid UserId @handle #!Session>.
|
||||
|
||||
Session = @observeUsers <Observe =user @observer #!UserInfo>
|
||||
/ @observeSpeech <Observe =says @observer #!Says>
|
||||
/ NickClaim
|
||||
/ Says
|
||||
.
|
||||
|
||||
NickClaim = <claimNick @uid UserId @name string @k #!NickClaimResponse>.
|
||||
NickClaimResponse = #t / NickConflict .
|
||||
|
||||
UserInfo = <user @uid UserId @name string>.
|
||||
|
||||
Says = <says @who UserId @what string>.
|
||||
|
||||
NickConflict = <nickConflict>.
|
|
@ -1,5 +0,0 @@
|
|||
version 1 .
|
||||
embeddedType EntityRef.Cap .
|
||||
|
||||
RequireService = <require-service @serviceName any>.
|
||||
ServiceRunning = <service-running @serviceName any>.
|
|
@ -1,5 +0,0 @@
|
|||
version 1 .
|
||||
embeddedType EntityRef.Cap .
|
||||
|
||||
Present = <Present @username string>.
|
||||
Says = <Says @who string @what string>.
|
|
@ -1,38 +0,0 @@
|
|||
version 1 .
|
||||
embeddedType EntityRef.Cap .
|
||||
|
||||
; Assertion:
|
||||
StreamConnection = <stream-connection @source #!Source @sink #!Sink @spec any>.
|
||||
|
||||
; Assertions:
|
||||
StreamListenerReady = <stream-listener-ready @spec any>.
|
||||
StreamListenerError = <stream-listener-error @spec any @message string>.
|
||||
|
||||
; Assertion:
|
||||
StreamError = <error @message string>.
|
||||
|
||||
Source =
|
||||
; Assertions:
|
||||
/ <sink @controller #!Sink>
|
||||
/ StreamError
|
||||
|
||||
; Messages:
|
||||
/ <credit @amount CreditAmount @mode Mode>
|
||||
.
|
||||
|
||||
Sink =
|
||||
; Assertions:
|
||||
/ <source @controller #!Source>
|
||||
/ StreamError
|
||||
|
||||
; Messages:
|
||||
/ <data @payload any @mode Mode>
|
||||
/ <eof>
|
||||
.
|
||||
|
||||
; Value:
|
||||
CreditAmount = @count int / @unbounded =unbounded .
|
||||
|
||||
; Value:
|
||||
Mode = =bytes / @lines LineMode / <packet @size int> / <object @description any>.
|
||||
LineMode = =lf / =crlf .
|
|
@ -1,44 +0,0 @@
|
|||
version 1 .
|
||||
embeddedType EntityRef.Cap .
|
||||
|
||||
; Each Attenuation is a stage. The sequence of Attenuations is run RIGHT-TO-LEFT.
|
||||
; That is, the newest Attenuations are at the right.
|
||||
SturdyRef = <ref @oid any @caveatChain [Attenuation ...] @sig bytes>.
|
||||
|
||||
; An individual Attenuation is run RIGHT-TO-LEFT.
|
||||
; That is, the newest Caveats are at the right.
|
||||
Attenuation = [Caveat ...].
|
||||
|
||||
; embodies 1st-party caveats over assertion structure, but nothing else
|
||||
; can add 3rd-party caveats and richer predicates later
|
||||
Caveat = Rewrite / Alts .
|
||||
Rewrite = <rewrite @pattern Pattern @template Template>.
|
||||
Alts = <or @alternatives [Rewrite ...]>.
|
||||
|
||||
Oid = int .
|
||||
WireRef = @mine [0 @oid Oid] / @yours [1 @oid Oid @attenuation Caveat ...].
|
||||
|
||||
;---------------------------------------------------------------------------
|
||||
|
||||
ConstructorSpec = CRec / CArr / CDict .
|
||||
CRec = <rec @label any @arity int>.
|
||||
CArr = <arr @arity int>.
|
||||
CDict = <dict>.
|
||||
|
||||
Lit = <lit @value any>.
|
||||
|
||||
Pattern = PDiscard / PAtom / PEmbedded / PBind / PAnd / PNot / Lit / PCompound .
|
||||
PDiscard = <_>.
|
||||
PAtom = =Boolean / =Float / =Double / =SignedInteger / =String / =ByteString / =Symbol .
|
||||
PEmbedded = =Embedded .
|
||||
PBind = <bind @pattern Pattern>.
|
||||
PAnd = <and @patterns [Pattern ...]>.
|
||||
PNot = <not @pattern Pattern>.
|
||||
PCompound = <compound @ctor ConstructorSpec @members PCompoundMembers>.
|
||||
PCompoundMembers = { any: Pattern ...:... }.
|
||||
|
||||
Template = TAttenuate / TRef / Lit / TCompound .
|
||||
TAttenuate = <attenuate @template Template @attenuation Attenuation>.
|
||||
TRef = <ref @binding int>.
|
||||
TCompound = <compound @ctor ConstructorSpec @members TCompoundMembers>.
|
||||
TCompoundMembers = { any: Template ...:... }.
|
|
@ -1,7 +0,0 @@
|
|||
version 1 .
|
||||
embeddedType EntityRef.Cap .
|
||||
|
||||
TcpRemote = <tcp-remote @host string @port int>.
|
||||
TcpLocal = <tcp-local @host string @port int>.
|
||||
|
||||
TcpPeerInfo = <tcp-peer @handle #!any @local TcpLocal @remote TcpRemote>.
|
|
@ -1,7 +0,0 @@
|
|||
version 1 .
|
||||
|
||||
SetTimer = <set-timer @label any @msecs double @kind TimerKind>.
|
||||
TimerExpired = <timer-expired @label any @msecs double>.
|
||||
TimerKind = =relative / =absolute / =clear .
|
||||
|
||||
LaterThan = <later-than @msecs double>.
|
|
@ -1,6 +0,0 @@
|
|||
version 1 .
|
||||
|
||||
Tcp = <tcp @host string @port int>.
|
||||
Unix = <unix @path string>.
|
||||
WebSocket = <ws @url string>.
|
||||
Stdio = <stdio>.
|
|
@ -1,4 +0,0 @@
|
|||
version 1 .
|
||||
embeddedType EntityRef.Cap .
|
||||
|
||||
Instance = <Instance @name string @argument any>.
|
|
@ -0,0 +1,57 @@
|
|||
|
||||
import
|
||||
preserves
|
||||
|
||||
type
|
||||
StateKind* {.pure.} = enum
|
||||
`started`, `ready`, `failed`, `complete`, `userDefined`
|
||||
`State`* {.preservesOr.} = object
|
||||
case orKind*: StateKind
|
||||
of StateKind.`started`:
|
||||
`started`* {.preservesLiteral: "started".}: bool
|
||||
|
||||
of StateKind.`ready`:
|
||||
`ready`* {.preservesLiteral: "ready".}: bool
|
||||
|
||||
of StateKind.`failed`:
|
||||
`failed`* {.preservesLiteral: "failed".}: bool
|
||||
|
||||
of StateKind.`complete`:
|
||||
`complete`* {.preservesLiteral: "complete".}: bool
|
||||
|
||||
of StateKind.`userDefined`:
|
||||
`userdefined`*: Value
|
||||
|
||||
|
||||
ServiceObject* {.preservesRecord: "service-object".} = object
|
||||
`serviceName`*: Value
|
||||
`object`*: Value
|
||||
|
||||
RequireService* {.preservesRecord: "require-service".} = object
|
||||
`serviceName`*: Value
|
||||
|
||||
RestartService* {.preservesRecord: "restart-service".} = object
|
||||
`serviceName`*: Value
|
||||
|
||||
RunService* {.preservesRecord: "run-service".} = object
|
||||
`serviceName`*: Value
|
||||
|
||||
ServiceState* {.preservesRecord: "service-state".} = object
|
||||
`serviceName`*: Value
|
||||
`state`*: State
|
||||
|
||||
ServiceDependency* {.preservesRecord: "depends-on".} = object
|
||||
`depender`*: Value
|
||||
`dependee`*: ServiceState
|
||||
|
||||
proc `$`*(x: State | ServiceObject | RequireService | RestartService |
|
||||
RunService |
|
||||
ServiceState |
|
||||
ServiceDependency): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: State | ServiceObject | RequireService | RestartService |
|
||||
RunService |
|
||||
ServiceState |
|
||||
ServiceDependency): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,40 @@
|
|||
|
||||
import
|
||||
preserves, sturdy, gatekeeper
|
||||
|
||||
type
|
||||
StandardTransportKind* {.pure.} = enum
|
||||
`wsUrl`, `other`
|
||||
`StandardTransport`* {.preservesOr.} = object
|
||||
case orKind*: StandardTransportKind
|
||||
of StandardTransportKind.`wsUrl`:
|
||||
`wsurl`*: string
|
||||
|
||||
of StandardTransportKind.`other`:
|
||||
`other`*: Value
|
||||
|
||||
|
||||
StandardRouteKind* {.pure.} = enum
|
||||
`standard`, `general`
|
||||
StandardRouteStandard* {.preservesTuple.} = object
|
||||
`transports`*: seq[StandardTransport]
|
||||
`key`*: seq[byte]
|
||||
`service`*: Value
|
||||
`sig`*: seq[byte]
|
||||
`oid`*: Value
|
||||
`caveats`* {.preservesTupleTail.}: seq[sturdy.Caveat]
|
||||
|
||||
`StandardRoute`* {.preservesOr.} = object
|
||||
case orKind*: StandardRouteKind
|
||||
of StandardRouteKind.`standard`:
|
||||
`standard`*: StandardRouteStandard
|
||||
|
||||
of StandardRouteKind.`general`:
|
||||
`general`*: gatekeeper.Route
|
||||
|
||||
|
||||
proc `$`*(x: StandardTransport | StandardRoute): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: StandardTransport | StandardRoute): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,117 @@
|
|||
|
||||
import
|
||||
preserves
|
||||
|
||||
type
|
||||
CreditAmountKind* {.pure.} = enum
|
||||
`count`, `unbounded`
|
||||
`CreditAmount`* {.preservesOr.} = object
|
||||
case orKind*: CreditAmountKind
|
||||
of CreditAmountKind.`count`:
|
||||
`count`*: BiggestInt
|
||||
|
||||
of CreditAmountKind.`unbounded`:
|
||||
`unbounded`* {.preservesLiteral: "unbounded".}: bool
|
||||
|
||||
|
||||
StreamError* {.preservesRecord: "error".} = object
|
||||
`message`*: string
|
||||
|
||||
StreamListenerError* {.preservesRecord: "stream-listener-error".} = object
|
||||
`spec`*: Value
|
||||
`message`*: string
|
||||
|
||||
StreamConnection* {.preservesRecord: "stream-connection".} = object
|
||||
`source`* {.preservesEmbedded.}: EmbeddedRef
|
||||
`sink`* {.preservesEmbedded.}: EmbeddedRef
|
||||
`spec`*: Value
|
||||
|
||||
`LineMode`* {.preservesOr, pure.} = enum
|
||||
`lf`, `crlf`
|
||||
SourceKind* {.pure.} = enum
|
||||
`sink`, `StreamError`, `credit`
|
||||
SourceSink* {.preservesRecord: "sink".} = object
|
||||
`controller`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
SourceCredit* {.preservesRecord: "credit".} = object
|
||||
`amount`*: CreditAmount
|
||||
`mode`*: Mode
|
||||
|
||||
`Source`* {.acyclic, preservesOr.} = ref object
|
||||
case orKind*: SourceKind
|
||||
of SourceKind.`sink`:
|
||||
`sink`* {.preservesEmbedded.}: SourceSink
|
||||
|
||||
of SourceKind.`StreamError`:
|
||||
`streamerror`*: StreamError
|
||||
|
||||
of SourceKind.`credit`:
|
||||
`credit`*: SourceCredit
|
||||
|
||||
|
||||
SinkKind* {.pure.} = enum
|
||||
`source`, `StreamError`, `data`, `eof`
|
||||
SinkSource* {.preservesRecord: "source".} = object
|
||||
`controller`* {.preservesEmbedded.}: EmbeddedRef
|
||||
|
||||
SinkData* {.preservesRecord: "data".} = object
|
||||
`payload`*: Value
|
||||
`mode`*: Mode
|
||||
|
||||
SinkEof* {.preservesRecord: "eof".} = object
|
||||
|
||||
`Sink`* {.acyclic, preservesOr.} = ref object
|
||||
case orKind*: SinkKind
|
||||
of SinkKind.`source`:
|
||||
`source`* {.preservesEmbedded.}: SinkSource
|
||||
|
||||
of SinkKind.`StreamError`:
|
||||
`streamerror`*: StreamError
|
||||
|
||||
of SinkKind.`data`:
|
||||
`data`*: SinkData
|
||||
|
||||
of SinkKind.`eof`:
|
||||
`eof`*: SinkEof
|
||||
|
||||
|
||||
StreamListenerReady* {.preservesRecord: "stream-listener-ready".} = object
|
||||
`spec`*: Value
|
||||
|
||||
ModeKind* {.pure.} = enum
|
||||
`bytes`, `lines`, `packet`, `object`
|
||||
ModePacket* {.preservesRecord: "packet".} = object
|
||||
`size`*: BiggestInt
|
||||
|
||||
ModeObject* {.preservesRecord: "object".} = object
|
||||
`description`*: Value
|
||||
|
||||
`Mode`* {.preservesOr.} = object
|
||||
case orKind*: ModeKind
|
||||
of ModeKind.`bytes`:
|
||||
`bytes`* {.preservesLiteral: "bytes".}: bool
|
||||
|
||||
of ModeKind.`lines`:
|
||||
`lines`*: LineMode
|
||||
|
||||
of ModeKind.`packet`:
|
||||
`packet`*: ModePacket
|
||||
|
||||
of ModeKind.`object`:
|
||||
`object`*: ModeObject
|
||||
|
||||
|
||||
proc `$`*(x: CreditAmount | StreamError | StreamListenerError | StreamConnection |
|
||||
Source |
|
||||
Sink |
|
||||
StreamListenerReady |
|
||||
Mode): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: CreditAmount | StreamError | StreamListenerError |
|
||||
StreamConnection |
|
||||
Source |
|
||||
Sink |
|
||||
StreamListenerReady |
|
||||
Mode): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,252 @@
|
|||
|
||||
import
|
||||
preserves, std/tables, std/options
|
||||
|
||||
type
|
||||
PCompoundKind* {.pure.} = enum
|
||||
`rec`, `arr`, `dict`
|
||||
PCompoundRec* {.preservesRecord: "rec".} = object
|
||||
`label`*: Value
|
||||
`fields`*: seq[Pattern]
|
||||
|
||||
PCompoundArr* {.preservesRecord: "arr".} = object
|
||||
`items`*: seq[Pattern]
|
||||
|
||||
PCompoundDict* {.preservesRecord: "dict".} = object
|
||||
`entries`*: Table[Value, Pattern]
|
||||
|
||||
`PCompound`* {.preservesOr.} = object
|
||||
case orKind*: PCompoundKind
|
||||
of PCompoundKind.`rec`:
|
||||
`rec`*: PCompoundRec
|
||||
|
||||
of PCompoundKind.`arr`:
|
||||
`arr`*: PCompoundArr
|
||||
|
||||
of PCompoundKind.`dict`:
|
||||
`dict`*: PCompoundDict
|
||||
|
||||
|
||||
Reject* {.preservesRecord: "reject".} = object
|
||||
`pattern`*: Pattern
|
||||
|
||||
CaveatsFieldKind* {.pure.} = enum
|
||||
`present`, `invalid`, `absent`
|
||||
CaveatsFieldPresent* {.preservesDictionary.} = object
|
||||
`caveats`*: seq[Caveat]
|
||||
|
||||
CaveatsFieldInvalid* {.preservesDictionary.} = object
|
||||
`caveats`*: Value
|
||||
|
||||
CaveatsFieldAbsent* {.preservesDictionary.} = object
|
||||
|
||||
`CaveatsField`* {.preservesOr.} = object
|
||||
case orKind*: CaveatsFieldKind
|
||||
of CaveatsFieldKind.`present`:
|
||||
`present`*: CaveatsFieldPresent
|
||||
|
||||
of CaveatsFieldKind.`invalid`:
|
||||
`invalid`*: CaveatsFieldInvalid
|
||||
|
||||
of CaveatsFieldKind.`absent`:
|
||||
`absent`*: CaveatsFieldAbsent
|
||||
|
||||
|
||||
SturdyDescriptionDetail* {.preservesDictionary.} = object
|
||||
`key`*: seq[byte]
|
||||
`oid`*: Value
|
||||
|
||||
PAnd* {.preservesRecord: "and".} = object
|
||||
`patterns`*: seq[Pattern]
|
||||
|
||||
SturdyStepDetail* = Parameters
|
||||
Rewrite* {.preservesRecord: "rewrite".} = object
|
||||
`pattern`*: Pattern
|
||||
`template`*: Template
|
||||
|
||||
ParametersCaveats* = Option[Value]
|
||||
ParametersOid* = Value
|
||||
ParametersSig* = seq[byte]
|
||||
`Parameters`* {.preservesDictionary.} = object
|
||||
`caveats`*: Option[Value]
|
||||
`oid`*: Value
|
||||
`sig`*: seq[byte]
|
||||
|
||||
TRef* {.preservesRecord: "ref".} = object
|
||||
`binding`*: BiggestInt
|
||||
|
||||
PBind* {.preservesRecord: "bind".} = object
|
||||
`pattern`*: Pattern
|
||||
|
||||
Lit* {.preservesRecord: "lit".} = object
|
||||
`value`*: Value
|
||||
|
||||
TCompoundKind* {.pure.} = enum
|
||||
`rec`, `arr`, `dict`
|
||||
TCompoundRec* {.preservesRecord: "rec".} = object
|
||||
`label`*: Value
|
||||
`fields`*: seq[Template]
|
||||
|
||||
TCompoundArr* {.preservesRecord: "arr".} = object
|
||||
`items`*: seq[Template]
|
||||
|
||||
TCompoundDict* {.preservesRecord: "dict".} = object
|
||||
`entries`*: Table[Value, Template]
|
||||
|
||||
`TCompound`* {.preservesOr.} = object
|
||||
case orKind*: TCompoundKind
|
||||
of TCompoundKind.`rec`:
|
||||
`rec`*: TCompoundRec
|
||||
|
||||
of TCompoundKind.`arr`:
|
||||
`arr`*: TCompoundArr
|
||||
|
||||
of TCompoundKind.`dict`:
|
||||
`dict`*: TCompoundDict
|
||||
|
||||
|
||||
SturdyPathStepDetail* = Parameters
|
||||
`PAtom`* {.preservesOr, pure.} = enum
|
||||
`Boolean`, `Double`, `SignedInteger`, `String`, `ByteString`, `Symbol`
|
||||
PDiscard* {.preservesRecord: "_".} = object
|
||||
|
||||
TemplateKind* {.pure.} = enum
|
||||
`TAttenuate`, `TRef`, `Lit`, `TCompound`
|
||||
`Template`* {.acyclic, preservesOr.} = ref object
|
||||
case orKind*: TemplateKind
|
||||
of TemplateKind.`TAttenuate`:
|
||||
`tattenuate`*: TAttenuate
|
||||
|
||||
of TemplateKind.`TRef`:
|
||||
`tref`*: TRef
|
||||
|
||||
of TemplateKind.`Lit`:
|
||||
`lit`*: Lit
|
||||
|
||||
of TemplateKind.`TCompound`:
|
||||
`tcompound`*: TCompound
|
||||
|
||||
|
||||
CaveatKind* {.pure.} = enum
|
||||
`Rewrite`, `Alts`, `Reject`, `unknown`
|
||||
`Caveat`* {.preservesOr.} = object
|
||||
case orKind*: CaveatKind
|
||||
of CaveatKind.`Rewrite`:
|
||||
`rewrite`*: Rewrite
|
||||
|
||||
of CaveatKind.`Alts`:
|
||||
`alts`*: Alts
|
||||
|
||||
of CaveatKind.`Reject`:
|
||||
`reject`*: Reject
|
||||
|
||||
of CaveatKind.`unknown`:
|
||||
`unknown`*: Value
|
||||
|
||||
|
||||
PNot* {.preservesRecord: "not".} = object
|
||||
`pattern`*: Pattern
|
||||
|
||||
SturdyRef* {.preservesRecord: "ref".} = object
|
||||
`parameters`*: Parameters
|
||||
|
||||
WireRefKind* {.pure.} = enum
|
||||
`mine`, `yours`
|
||||
WireRefMine* {.preservesTuple.} = object
|
||||
`field0`* {.preservesLiteral: "0".}: tuple[]
|
||||
`oid`*: Oid
|
||||
|
||||
WireRefYours* {.preservesTuple.} = object
|
||||
`field0`* {.preservesLiteral: "1".}: tuple[]
|
||||
`oid`*: Oid
|
||||
`attenuation`* {.preservesTupleTail.}: seq[Caveat]
|
||||
|
||||
`WireRef`* {.preservesOr.} = object
|
||||
case orKind*: WireRefKind
|
||||
of WireRefKind.`mine`:
|
||||
`mine`*: WireRefMine
|
||||
|
||||
of WireRefKind.`yours`:
|
||||
`yours`*: WireRefYours
|
||||
|
||||
|
||||
TAttenuate* {.preservesRecord: "attenuate".} = object
|
||||
`template`*: Template
|
||||
`attenuation`*: seq[Caveat]
|
||||
|
||||
Oid* = BiggestInt
|
||||
Alts* {.preservesRecord: "or".} = object
|
||||
`alternatives`*: seq[Rewrite]
|
||||
|
||||
PatternKind* {.pure.} = enum
|
||||
`PDiscard`, `PAtom`, `PEmbedded`, `PBind`, `PAnd`, `PNot`, `Lit`,
|
||||
`PCompound`
|
||||
`Pattern`* {.acyclic, preservesOr.} = ref object
|
||||
case orKind*: PatternKind
|
||||
of PatternKind.`PDiscard`:
|
||||
`pdiscard`*: PDiscard
|
||||
|
||||
of PatternKind.`PAtom`:
|
||||
`patom`*: PAtom
|
||||
|
||||
of PatternKind.`PEmbedded`:
|
||||
`pembedded`* {.preservesLiteral: "Embedded".}: bool
|
||||
|
||||
of PatternKind.`PBind`:
|
||||
`pbind`*: PBind
|
||||
|
||||
of PatternKind.`PAnd`:
|
||||
`pand`*: PAnd
|
||||
|
||||
of PatternKind.`PNot`:
|
||||
`pnot`*: PNot
|
||||
|
||||
of PatternKind.`Lit`:
|
||||
`lit`*: Lit
|
||||
|
||||
of PatternKind.`PCompound`:
|
||||
`pcompound`*: PCompound
|
||||
|
||||
|
||||
proc `$`*(x: PCompound | Reject | CaveatsField | SturdyDescriptionDetail | PAnd |
|
||||
SturdyStepDetail |
|
||||
Rewrite |
|
||||
Parameters |
|
||||
TRef |
|
||||
PBind |
|
||||
Lit |
|
||||
TCompound |
|
||||
SturdyPathStepDetail |
|
||||
PDiscard |
|
||||
Template |
|
||||
Caveat |
|
||||
PNot |
|
||||
SturdyRef |
|
||||
WireRef |
|
||||
TAttenuate |
|
||||
Oid |
|
||||
Alts |
|
||||
Pattern): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: PCompound | Reject | CaveatsField | SturdyDescriptionDetail |
|
||||
PAnd |
|
||||
SturdyStepDetail |
|
||||
Rewrite |
|
||||
Parameters |
|
||||
TRef |
|
||||
PBind |
|
||||
Lit |
|
||||
TCompound |
|
||||
SturdyPathStepDetail |
|
||||
PDiscard |
|
||||
Template |
|
||||
Caveat |
|
||||
PNot |
|
||||
SturdyRef |
|
||||
WireRef |
|
||||
TAttenuate |
|
||||
Oid |
|
||||
Alts |
|
||||
Pattern): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,23 @@
|
|||
|
||||
import
|
||||
preserves
|
||||
|
||||
type
|
||||
TcpLocal* {.preservesRecord: "tcp-local".} = object
|
||||
`host`*: string
|
||||
`port`*: BiggestInt
|
||||
|
||||
TcpPeerInfo* {.preservesRecord: "tcp-peer".} = object
|
||||
`handle`* {.preservesEmbedded.}: EmbeddedRef
|
||||
`local`*: TcpLocal
|
||||
`remote`*: TcpRemote
|
||||
|
||||
TcpRemote* {.preservesRecord: "tcp-remote".} = object
|
||||
`host`*: string
|
||||
`port`*: BiggestInt
|
||||
|
||||
proc `$`*(x: TcpLocal | TcpPeerInfo | TcpRemote): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: TcpLocal | TcpPeerInfo | TcpRemote): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,24 @@
|
|||
|
||||
import
|
||||
preserves
|
||||
|
||||
type
|
||||
TimerExpired* {.preservesRecord: "timer-expired".} = object
|
||||
`label`*: Value
|
||||
`seconds`*: float
|
||||
|
||||
SetTimer* {.preservesRecord: "set-timer".} = object
|
||||
`label`*: Value
|
||||
`seconds`*: float
|
||||
`kind`*: TimerKind
|
||||
|
||||
`TimerKind`* {.preservesOr, pure.} = enum
|
||||
`relative`, `absolute`, `clear`
|
||||
LaterThan* {.preservesRecord: "later-than".} = object
|
||||
`seconds`*: float
|
||||
|
||||
proc `$`*(x: TimerExpired | SetTimer | LaterThan): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: TimerExpired | SetTimer | LaterThan): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,263 @@
|
|||
|
||||
import
|
||||
preserves, protocol
|
||||
|
||||
type
|
||||
TargetedTurnEvent* {.preservesRecord: "event".} = object
|
||||
`target`*: Target
|
||||
`detail`*: TurnEvent
|
||||
|
||||
`LinkedTaskReleaseReason`* {.preservesOr, pure.} = enum
|
||||
`cancelled`, `normal`
|
||||
TurnId* = Value
|
||||
AssertionDescriptionKind* {.pure.} = enum
|
||||
`value`, `opaque`
|
||||
AssertionDescriptionValue* {.preservesRecord: "value".} = object
|
||||
`value`*: Value
|
||||
|
||||
AssertionDescriptionOpaque* {.preservesRecord: "opaque".} = object
|
||||
`description`*: Value
|
||||
|
||||
`AssertionDescription`* {.preservesOr.} = object
|
||||
case orKind*: AssertionDescriptionKind
|
||||
of AssertionDescriptionKind.`value`:
|
||||
`value`*: AssertionDescriptionValue
|
||||
|
||||
of AssertionDescriptionKind.`opaque`:
|
||||
`opaque`*: AssertionDescriptionOpaque
|
||||
|
||||
|
||||
NameKind* {.pure.} = enum
|
||||
`anonymous`, `named`
|
||||
NameAnonymous* {.preservesRecord: "anonymous".} = object
|
||||
|
||||
NameNamed* {.preservesRecord: "named".} = object
|
||||
`name`*: Value
|
||||
|
||||
`Name`* {.preservesOr.} = object
|
||||
case orKind*: NameKind
|
||||
of NameKind.`anonymous`:
|
||||
`anonymous`*: NameAnonymous
|
||||
|
||||
of NameKind.`named`:
|
||||
`named`*: NameNamed
|
||||
|
||||
|
||||
ActorId* = Value
|
||||
FacetId* = Value
|
||||
`FacetStopReason`* {.preservesOr, pure.} = enum
|
||||
`explicitAction`, `inert`, `parentStopping`, `actorStopping`
|
||||
TaskId* = Value
|
||||
ActorActivationKind* {.pure.} = enum
|
||||
`start`, `turn`, `stop`
|
||||
ActorActivationStart* {.preservesRecord: "start".} = object
|
||||
`actorName`*: Name
|
||||
|
||||
ActorActivationStop* {.preservesRecord: "stop".} = object
|
||||
`status`*: ExitStatus
|
||||
|
||||
`ActorActivation`* {.preservesOr.} = object
|
||||
case orKind*: ActorActivationKind
|
||||
of ActorActivationKind.`start`:
|
||||
`start`*: ActorActivationStart
|
||||
|
||||
of ActorActivationKind.`turn`:
|
||||
`turn`*: TurnDescription
|
||||
|
||||
of ActorActivationKind.`stop`:
|
||||
`stop`*: ActorActivationStop
|
||||
|
||||
|
||||
Target* {.preservesRecord: "entity".} = object
|
||||
`actor`*: ActorId
|
||||
`facet`*: FacetId
|
||||
`oid`*: Oid
|
||||
|
||||
TurnCauseKind* {.pure.} = enum
|
||||
`turn`, `cleanup`, `linkedTaskRelease`, `periodicActivation`, `delay`,
|
||||
`external`
|
||||
TurnCauseTurn* {.preservesRecord: "caused-by".} = object
|
||||
`id`*: TurnId
|
||||
|
||||
TurnCauseCleanup* {.preservesRecord: "cleanup".} = object
|
||||
|
||||
TurnCauseLinkedTaskRelease* {.preservesRecord: "linked-task-release".} = object
|
||||
`id`*: TaskId
|
||||
`reason`*: LinkedTaskReleaseReason
|
||||
|
||||
TurnCausePeriodicActivation* {.preservesRecord: "periodic-activation".} = object
|
||||
`period`*: float
|
||||
|
||||
TurnCauseDelay* {.preservesRecord: "delay".} = object
|
||||
`causingTurn`*: TurnId
|
||||
`amount`*: float
|
||||
|
||||
TurnCauseExternal* {.preservesRecord: "external".} = object
|
||||
`description`*: Value
|
||||
|
||||
`TurnCause`* {.preservesOr.} = object
|
||||
case orKind*: TurnCauseKind
|
||||
of TurnCauseKind.`turn`:
|
||||
`turn`*: TurnCauseTurn
|
||||
|
||||
of TurnCauseKind.`cleanup`:
|
||||
`cleanup`*: TurnCauseCleanup
|
||||
|
||||
of TurnCauseKind.`linkedTaskRelease`:
|
||||
`linkedtaskrelease`*: TurnCauseLinkedTaskRelease
|
||||
|
||||
of TurnCauseKind.`periodicActivation`:
|
||||
`periodicactivation`*: TurnCausePeriodicActivation
|
||||
|
||||
of TurnCauseKind.`delay`:
|
||||
`delay`*: TurnCauseDelay
|
||||
|
||||
of TurnCauseKind.`external`:
|
||||
`external`*: TurnCauseExternal
|
||||
|
||||
|
||||
TurnEventKind* {.pure.} = enum
|
||||
`assert`, `retract`, `message`, `sync`, `breakLink`
|
||||
TurnEventAssert* {.preservesRecord: "assert".} = object
|
||||
`assertion`*: AssertionDescription
|
||||
`handle`*: protocol.Handle
|
||||
|
||||
TurnEventRetract* {.preservesRecord: "retract".} = object
|
||||
`handle`*: protocol.Handle
|
||||
|
||||
TurnEventMessage* {.preservesRecord: "message".} = object
|
||||
`body`*: AssertionDescription
|
||||
|
||||
TurnEventSync* {.preservesRecord: "sync".} = object
|
||||
`peer`*: Target
|
||||
|
||||
TurnEventBreakLink* {.preservesRecord: "break-link".} = object
|
||||
`source`*: ActorId
|
||||
`handle`*: protocol.Handle
|
||||
|
||||
`TurnEvent`* {.preservesOr.} = object
|
||||
case orKind*: TurnEventKind
|
||||
of TurnEventKind.`assert`:
|
||||
`assert`*: TurnEventAssert
|
||||
|
||||
of TurnEventKind.`retract`:
|
||||
`retract`*: TurnEventRetract
|
||||
|
||||
of TurnEventKind.`message`:
|
||||
`message`*: TurnEventMessage
|
||||
|
||||
of TurnEventKind.`sync`:
|
||||
`sync`*: TurnEventSync
|
||||
|
||||
of TurnEventKind.`breakLink`:
|
||||
`breaklink`*: TurnEventBreakLink
|
||||
|
||||
|
||||
TurnDescription* {.preservesRecord: "turn".} = object
|
||||
`id`*: TurnId
|
||||
`cause`*: TurnCause
|
||||
`actions`*: seq[ActionDescription]
|
||||
|
||||
ExitStatusKind* {.pure.} = enum
|
||||
`ok`, `Error`
|
||||
`ExitStatus`* {.preservesOr.} = object
|
||||
case orKind*: ExitStatusKind
|
||||
of ExitStatusKind.`ok`:
|
||||
`ok`* {.preservesLiteral: "ok".}: bool
|
||||
|
||||
of ExitStatusKind.`Error`:
|
||||
`error`*: protocol.Error
|
||||
|
||||
|
||||
TraceEntry* {.preservesRecord: "trace".} = object
|
||||
`timestamp`*: float
|
||||
`actor`*: ActorId
|
||||
`item`*: ActorActivation
|
||||
|
||||
Oid* = Value
|
||||
ActionDescriptionKind* {.pure.} = enum
|
||||
`dequeue`, `enqueue`, `dequeueInternal`, `enqueueInternal`, `spawn`, `link`,
|
||||
`facetStart`, `facetStop`, `linkedTaskStart`
|
||||
ActionDescriptionDequeue* {.preservesRecord: "dequeue".} = object
|
||||
`event`*: TargetedTurnEvent
|
||||
|
||||
ActionDescriptionEnqueue* {.preservesRecord: "enqueue".} = object
|
||||
`event`*: TargetedTurnEvent
|
||||
|
||||
ActionDescriptionDequeueInternal* {.preservesRecord: "dequeue-internal".} = object
|
||||
`event`*: TargetedTurnEvent
|
||||
|
||||
ActionDescriptionEnqueueInternal* {.preservesRecord: "enqueue-internal".} = object
|
||||
`event`*: TargetedTurnEvent
|
||||
|
||||
ActionDescriptionSpawn* {.preservesRecord: "spawn".} = object
|
||||
`link`*: bool
|
||||
`id`*: ActorId
|
||||
|
||||
ActionDescriptionLink* {.preservesRecord: "link".} = object
|
||||
`parentActor`*: ActorId
|
||||
`childToParent`*: protocol.Handle
|
||||
`childActor`*: ActorId
|
||||
`parentToChild`*: protocol.Handle
|
||||
|
||||
ActionDescriptionFacetStart* {.preservesRecord: "facet-start".} = object
|
||||
`path`*: seq[FacetId]
|
||||
|
||||
ActionDescriptionFacetStop* {.preservesRecord: "facet-stop".} = object
|
||||
`path`*: seq[FacetId]
|
||||
`reason`*: FacetStopReason
|
||||
|
||||
ActionDescriptionLinkedTaskStart* {.preservesRecord: "linked-task-start".} = object
|
||||
`taskName`*: Name
|
||||
`id`*: TaskId
|
||||
|
||||
`ActionDescription`* {.preservesOr.} = object
|
||||
case orKind*: ActionDescriptionKind
|
||||
of ActionDescriptionKind.`dequeue`:
|
||||
`dequeue`*: ActionDescriptionDequeue
|
||||
|
||||
of ActionDescriptionKind.`enqueue`:
|
||||
`enqueue`*: ActionDescriptionEnqueue
|
||||
|
||||
of ActionDescriptionKind.`dequeueInternal`:
|
||||
`dequeueinternal`*: ActionDescriptionDequeueInternal
|
||||
|
||||
of ActionDescriptionKind.`enqueueInternal`:
|
||||
`enqueueinternal`*: ActionDescriptionEnqueueInternal
|
||||
|
||||
of ActionDescriptionKind.`spawn`:
|
||||
`spawn`*: ActionDescriptionSpawn
|
||||
|
||||
of ActionDescriptionKind.`link`:
|
||||
`link`*: ActionDescriptionLink
|
||||
|
||||
of ActionDescriptionKind.`facetStart`:
|
||||
`facetstart`*: ActionDescriptionFacetStart
|
||||
|
||||
of ActionDescriptionKind.`facetStop`:
|
||||
`facetstop`*: ActionDescriptionFacetStop
|
||||
|
||||
of ActionDescriptionKind.`linkedTaskStart`:
|
||||
`linkedtaskstart`*: ActionDescriptionLinkedTaskStart
|
||||
|
||||
|
||||
proc `$`*(x: TargetedTurnEvent | AssertionDescription | Name | ActorActivation |
|
||||
Target |
|
||||
TurnCause |
|
||||
TurnEvent |
|
||||
TurnDescription |
|
||||
ExitStatus |
|
||||
TraceEntry |
|
||||
ActionDescription): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: TargetedTurnEvent | AssertionDescription | Name |
|
||||
ActorActivation |
|
||||
Target |
|
||||
TurnCause |
|
||||
TurnEvent |
|
||||
TurnDescription |
|
||||
ExitStatus |
|
||||
TraceEntry |
|
||||
ActionDescription): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,22 @@
|
|||
|
||||
import
|
||||
preserves
|
||||
|
||||
type
|
||||
WebSocket* {.preservesRecord: "ws".} = object
|
||||
`url`*: string
|
||||
|
||||
Stdio* {.preservesRecord: "stdio".} = object
|
||||
|
||||
Unix* {.preservesRecord: "unix".} = object
|
||||
`path`*: string
|
||||
|
||||
Tcp* {.preservesRecord: "tcp".} = object
|
||||
`host`*: string
|
||||
`port`*: BiggestInt
|
||||
|
||||
proc `$`*(x: WebSocket | Stdio | Unix | Tcp): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: WebSocket | Stdio | Unix | Tcp): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,14 @@
|
|||
|
||||
import
|
||||
preserves
|
||||
|
||||
type
|
||||
Instance* {.preservesRecord: "Instance".} = object
|
||||
`name`*: string
|
||||
`argument`*: Value
|
||||
|
||||
proc `$`*(x: Instance): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: Instance): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,660 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[options, tables]
|
||||
import preserves
|
||||
import ../syndicate, ./durings, ./membranes, ./protocols/[gatekeeper, protocol, sturdy, transportAddress]
|
||||
|
||||
when defined(posix):
|
||||
import ./capabilities
|
||||
from std/os import getEnv, `/`
|
||||
|
||||
when defined(traceSyndicate):
|
||||
when defined(posix):
|
||||
template trace(args: varargs[untyped]): untyped = stderr.writeLine(args)
|
||||
else:
|
||||
template trace(args: varargs[untyped]): untyped = echo(args)
|
||||
else:
|
||||
template trace(args: varargs[untyped]): untyped = discard
|
||||
|
||||
export `$`
|
||||
|
||||
export Route, Stdio, Tcp, WebSocket, Unix
|
||||
|
||||
type
|
||||
Assertion = Value
|
||||
Event = protocol.Event
|
||||
Handle = actors.Handle
|
||||
Oid = sturdy.Oid
|
||||
Turn = syndicate.Turn
|
||||
WireRef = sturdy.WireRef
|
||||
|
||||
PacketWriter = proc (turn: Turn; buf: seq[byte]) {.closure.}
|
||||
RelaySetup = proc (turn: Turn; relay: Relay) {.closure.}
|
||||
|
||||
Relay* = ref object
|
||||
facet: Facet
|
||||
inboundAssertions: Table[Handle,
|
||||
tuple[localHandle: Handle, imported: seq[WireSymbol]]]
|
||||
outboundAssertions: Table[Handle, seq[WireSymbol]]
|
||||
pendingTurn: protocol.Turn
|
||||
exported: Membrane
|
||||
imported: Membrane
|
||||
nextLocalOid: Oid
|
||||
wireBuf: BufferedDecoder
|
||||
packetWriter: PacketWriter
|
||||
peer: Cap
|
||||
|
||||
SyncPeerEntity = ref object of Entity
|
||||
relay: Relay
|
||||
peer: Cap
|
||||
handleMap: Table[Handle, Handle]
|
||||
e: WireSymbol
|
||||
|
||||
RelayEntity = ref object of Entity
|
||||
## https://synit.org/book/protocol.html#relay-entities
|
||||
label: string
|
||||
relay: Relay
|
||||
|
||||
proc releaseCapOut(r: Relay; e: WireSymbol) =
|
||||
r.exported.drop e
|
||||
|
||||
method publish(spe: SyncPeerEntity; t: Turn; a: AssertionRef; h: Handle) =
|
||||
spe.handleMap[h] = publish(t, spe.peer, a.value)
|
||||
|
||||
method retract(se: SyncPeerEntity; t: Turn; h: Handle) =
|
||||
var other: Handle
|
||||
if se.handleMap.pop(h, other):
|
||||
retract(t, other)
|
||||
|
||||
method message(se: SyncPeerEntity; t: Turn; a: AssertionRef) =
|
||||
if not se.e.isNil:
|
||||
se.relay.releaseCapOut(se.e)
|
||||
message(t, se.peer, a.value)
|
||||
|
||||
method sync(se: SyncPeerEntity; t: Turn; peer: Cap) =
|
||||
sync(t, se.peer, peer)
|
||||
|
||||
proc newSyncPeerEntity(r: Relay; p: Cap): SyncPeerEntity =
|
||||
SyncPeerEntity(relay: r, peer: p)
|
||||
|
||||
proc rewriteCapOut(relay: Relay; cap: Cap; exported: var seq[WireSymbol]): WireRef =
|
||||
if cap.target of RelayEntity and cap.target.RelayEntity.relay == relay and cap.attenuation.len == 0:
|
||||
result = WireRef(orKind: WireRefKind.yours, yours: WireRefYours(oid: cap.target.oid))
|
||||
else:
|
||||
var ws = grab(relay.exported, cap)
|
||||
if ws.isNil:
|
||||
ws = newWireSymbol(relay.exported, relay.nextLocalOid, cap)
|
||||
inc relay.nextLocalOid
|
||||
exported.add ws
|
||||
result = WireRef(
|
||||
orKind: WireRefKind.mine,
|
||||
mine: WireRefMine(oid: ws.oid))
|
||||
|
||||
proc rewriteOut(relay: Relay; v: Assertion):
|
||||
tuple[rewritten: Value, exported: seq[WireSymbol]] =
|
||||
var exported: seq[WireSymbol]
|
||||
result.rewritten = mapEmbeds(v) do (pr: Value) -> Value:
|
||||
let o = pr.unembed(Cap); if o.isSome:
|
||||
rewriteCapOut(relay, o.get, exported).toPreserves
|
||||
else: pr
|
||||
result.exported = exported
|
||||
|
||||
proc register(relay: Relay; v: Assertion; h: Handle): tuple[rewritten: Value, exported: seq[WireSymbol]] =
|
||||
result = rewriteOut(relay, v)
|
||||
relay.outboundAssertions[h] = result.exported
|
||||
|
||||
proc deregister(relay: Relay; h: Handle) =
|
||||
var outbound: seq[WireSymbol]
|
||||
if relay.outboundAssertions.pop(h, outbound):
|
||||
for e in outbound: releaseCapOut(relay, e)
|
||||
|
||||
proc send(relay: Relay; turn: Turn; rOid: protocol.Oid; m: Event) =
|
||||
# TODO: don't send right away.
|
||||
relay.pendingTurn.add TurnEvent(oid: rOid, event: m)
|
||||
queueEffect(turn, relay.facet) do (turn: Turn):
|
||||
if relay.pendingTurn.len > 0:
|
||||
var pkt = Packet(
|
||||
orKind: PacketKind.Turn,
|
||||
turn: move relay.pendingTurn)
|
||||
trace "C: ", pkt
|
||||
relay.packetWriter(turn, encode pkt)
|
||||
|
||||
proc send(re: RelayEntity; turn: Turn; ev: Event) =
|
||||
send(re.relay, turn, protocol.Oid re.oid, ev)
|
||||
|
||||
method publish(re: RelayEntity; t: Turn; a: AssertionRef; h: Handle) =
|
||||
re.send(t, Event(
|
||||
orKind: EventKind.Assert,
|
||||
`assert`: protocol.Assert(
|
||||
assertion: re.relay.register(a.value, h).rewritten,
|
||||
handle: h)))
|
||||
|
||||
method retract(re: RelayEntity; t: Turn; h: Handle) =
|
||||
re.relay.deregister h
|
||||
re.send(t, Event(
|
||||
orKind: EventKind.Retract,
|
||||
retract: Retract(handle: h)))
|
||||
|
||||
method message(re: RelayEntity; turn: Turn; msg: AssertionRef) =
|
||||
var (value, exported) = rewriteOut(re.relay, msg.value)
|
||||
assert(len(exported) == 0, "cannot send a reference in a message")
|
||||
if len(exported) == 0:
|
||||
re.send(turn, Event(orKind: EventKind.Message, message: Message(body: value)))
|
||||
|
||||
method sync(re: RelayEntity; turn: Turn; peer: Cap) =
|
||||
var
|
||||
peerEntity = newSyncPeerEntity(re.relay, peer)
|
||||
exported: seq[WireSymbol]
|
||||
wr = rewriteCapOut(re.relay, turn.newCap(peerEntity), exported)
|
||||
peerEntity.e = exported[0]
|
||||
var ev = Event(orKind: EventKind.Sync)
|
||||
ev.sync.peer = wr.toPreserves.embed
|
||||
re.send(turn, ev)
|
||||
|
||||
proc newRelayEntity(label: string; r: Relay; o: Oid): RelayEntity =
|
||||
RelayEntity(label: label, relay: r, oid: o)
|
||||
|
||||
using
|
||||
relay: Relay
|
||||
facet: Facet
|
||||
|
||||
proc lookupLocal(relay; oid: Oid): Cap =
|
||||
let sym = relay.exported.grab oid
|
||||
if not sym.isNil:
|
||||
result = sym.cap
|
||||
|
||||
proc rewriteCapIn(relay; facet; n: WireRef, imported: var seq[WireSymbol]): Cap =
|
||||
case n.orKind
|
||||
of WireRefKind.mine:
|
||||
var e = relay.imported.grab(n.mine.oid)
|
||||
if e.isNil:
|
||||
e = newWireSymbol(
|
||||
relay.imported,
|
||||
n.mine.oid,
|
||||
newCap(facet, newRelayEntity("rewriteCapIn", relay, n.mine.oid)),
|
||||
)
|
||||
imported.add e
|
||||
result = e.cap
|
||||
of WireRefKind.yours:
|
||||
result = relay.lookupLocal(n.yours.oid)
|
||||
if result.isNil:
|
||||
result = newInertCap()
|
||||
elif n.yours.attenuation.len > 0:
|
||||
result = attenuate(result, n.yours.attenuation)
|
||||
|
||||
proc rewriteIn(relay; facet; v: Value):
|
||||
tuple[rewritten: Assertion; imported: seq[WireSymbol]] =
|
||||
var imported: seq[WireSymbol]
|
||||
result.rewritten = mapEmbeds(v) do (pr: Value) -> Value:
|
||||
let wr = pr.preservesTo WireRef; if wr.isSome:
|
||||
result = rewriteCapIn(relay, facet, wr.get, imported).embed
|
||||
else:
|
||||
result = pr
|
||||
result.imported = imported
|
||||
|
||||
proc close(r: Relay) = discard
|
||||
|
||||
proc dispatch(relay: Relay; turn: Turn; cap: Cap; event: Event) =
|
||||
case event.orKind
|
||||
of EventKind.Assert:
|
||||
let (a, imported) = rewriteIn(relay, turn.facet, event.assert.assertion)
|
||||
relay.inboundAssertions[event.assert.handle] = (publish(turn, cap, a), imported,)
|
||||
|
||||
of EventKind.Retract:
|
||||
let remoteHandle = event.retract.handle
|
||||
var outbound: tuple[localHandle: Handle, imported: seq[WireSymbol]]
|
||||
if relay.inboundAssertions.pop(remoteHandle, outbound):
|
||||
for e in outbound.imported: relay.imported.drop e
|
||||
turn.retract(outbound.localHandle)
|
||||
|
||||
of EventKind.Message:
|
||||
let (a, imported) = rewriteIn(relay, turn.facet, event.message.body)
|
||||
assert imported.len == 0, "Cannot receive transient reference"
|
||||
turn.message(cap, a)
|
||||
|
||||
of EventKind.Sync:
|
||||
turn.sync(cap) do (turn: Turn):
|
||||
var
|
||||
(v, imported) = rewriteIn(relay, turn.facet, event.sync.peer)
|
||||
peer = unembed(v, Cap)
|
||||
if peer.isSome:
|
||||
turn.message(get peer, true)
|
||||
for e in imported: relay.imported.drop e
|
||||
|
||||
proc dispatch(relay: Relay; v: Value) =
|
||||
trace "S: ", v
|
||||
run(relay.facet) do (t: Turn):
|
||||
var pkt: Packet
|
||||
if pkt.fromPreserves(v):
|
||||
case pkt.orKind
|
||||
of PacketKind.Turn:
|
||||
# https://synit.org/book/protocol.html#turn-packets
|
||||
for te in pkt.turn:
|
||||
let r = lookupLocal(relay, te.oid.Oid)
|
||||
if not r.isNil:
|
||||
dispatch(relay, t, r, te.event)
|
||||
of PacketKind.Error:
|
||||
# https://synit.org/book/protocol.html#error-packets
|
||||
when defined(posix):
|
||||
stderr.writeLine("Error from server: ", pkt.error.message, " (detail: ", pkt.error.detail, ")")
|
||||
close relay
|
||||
of PacketKind.Extension:
|
||||
# https://synit.org/book/protocol.html#extension-packets
|
||||
discard
|
||||
of PacketKind.Nop:
|
||||
discard
|
||||
else:
|
||||
when defined(posix):
|
||||
stderr.writeLine("discarding undecoded packet ", v)
|
||||
|
||||
proc recv(relay: Relay; buf: openarray[byte]; slice: Slice[int]) =
|
||||
feed(relay.wireBuf, buf, slice)
|
||||
var pr = decode(relay.wireBuf)
|
||||
if pr.isSome: dispatch(relay, pr.get)
|
||||
|
||||
proc recv(relay: Relay; buf: openarray[byte]) {.used.} =
|
||||
feed(relay.wireBuf, buf)
|
||||
var pr = decode(relay.wireBuf)
|
||||
if pr.isSome: dispatch(relay, pr.get)
|
||||
|
||||
type
|
||||
RelayOptions* = object of RootObj
|
||||
packetWriter*: PacketWriter
|
||||
|
||||
RelayActorOptions* = object of RelayOptions
|
||||
initialOid*: Option[Oid]
|
||||
initialCap*: Cap
|
||||
nextLocalOid*: Option[Oid]
|
||||
|
||||
proc spawnRelay(name: string; turn: Turn; opts: RelayActorOptions; setup: RelaySetup) =
|
||||
linkActor(turn, name) do (turn: Turn):
|
||||
turn.preventInertCheck()
|
||||
let relay = Relay(
|
||||
facet: turn.facet,
|
||||
packetWriter: opts.packetWriter,
|
||||
wireBuf: newBufferedDecoder(0),
|
||||
)
|
||||
if not opts.initialCap.isNil:
|
||||
var exported: seq[WireSymbol]
|
||||
discard rewriteCapOut(relay, opts.initialCap, exported)
|
||||
opts.nextLocalOid.map do (oid: Oid):
|
||||
relay.nextLocalOid =
|
||||
if oid == 0.Oid: 1.Oid
|
||||
else: oid
|
||||
assert opts.initialOid.isSome
|
||||
if opts.initialOid.isSome:
|
||||
var
|
||||
imported: seq[WireSymbol]
|
||||
wr = WireRef(
|
||||
orKind: WireRefKind.mine,
|
||||
mine: WireRefMine(oid: opts.initialOid.get))
|
||||
relay.peer = rewriteCapIn(relay, turn.facet, wr, imported)
|
||||
assert not relay.peer.isNil
|
||||
setup(turn, relay)
|
||||
|
||||
proc rejected(detail: Value): Resolved =
|
||||
result = Resolved(orKind: ResolvedKind.Rejected)
|
||||
result.rejected.detail = detail
|
||||
|
||||
proc accepted(cap: Cap): Resolved =
|
||||
result = Resolved(orKind: ResolvedKind.accepted)
|
||||
result.accepted.responderSession = cap
|
||||
|
||||
type ShutdownEntity = ref object of Entity
|
||||
method retract(e: ShutdownEntity; turn: Turn; h: Handle) =
|
||||
stopActor(e.facet)
|
||||
|
||||
when defined(posix):
|
||||
|
||||
import std/[oserrors, posix]
|
||||
import pkg/sys/[files, handles, ioqueue, sockets]
|
||||
export transportAddress.Unix
|
||||
|
||||
type StdioEntity = ref object of Entity
|
||||
relay: Relay
|
||||
stdin: AsyncFile
|
||||
alive: bool
|
||||
|
||||
method message(entity: StdioEntity; turn: Turn; ass: AssertionRef) =
|
||||
if ass.value.preservesTo(ForceDisconnect).isSome:
|
||||
entity.alive = false
|
||||
|
||||
proc loop(entity: StdioEntity) {.asyncio.} =
|
||||
let buf = new seq[byte]
|
||||
entity.alive = true
|
||||
while entity.alive:
|
||||
buf[].setLen(0x1000)
|
||||
let n = read(entity.stdin, buf)
|
||||
if n > 0:
|
||||
entity.relay.recv(buf[], 0..<n)
|
||||
else:
|
||||
entity.alive = false
|
||||
if n < 0: raiseOSError(osLastError())
|
||||
stopActor(entity.facet)
|
||||
|
||||
proc connectTransport(turn: Turn; ds: Cap; ta: transportAddress.Stdio) =
|
||||
## Connect to an external dataspace over stdio.
|
||||
let localDataspace = newDataspace(turn)
|
||||
proc stdoutWriter(turn: Turn; buf: seq[byte]) =
|
||||
## Blocking write to stdout.
|
||||
let n = writeBytes(stdout, buf, 0, buf.len)
|
||||
flushFile(stdout)
|
||||
if n != buf.len:
|
||||
stopActor(turn)
|
||||
var opts = RelayActorOptions(
|
||||
packetWriter: stdoutWriter,
|
||||
initialCap: localDataspace,
|
||||
initialOid: 0.Oid.some,
|
||||
)
|
||||
spawnRelay("stdio", turn, opts) do (turn: Turn; relay: Relay):
|
||||
let
|
||||
facet = turn.facet
|
||||
fd = stdin.getOsFileHandle()
|
||||
flags = fcntl(fd.cint, F_GETFL, 0)
|
||||
if flags < 0: raiseOSError(osLastError())
|
||||
if fcntl(fd.cint, F_SETFL, flags or O_NONBLOCK) < 0:
|
||||
raiseOSError(osLastError())
|
||||
let entity = StdioEntity(
|
||||
facet: turn.facet, relay: relay, stdin: newAsyncFile(FD fd))
|
||||
onStop(entity.facet) do (turn: Turn):
|
||||
entity.alive = false
|
||||
close(entity.stdin)
|
||||
# Close stdin to remove it from the ioqueue
|
||||
discard trampoline:
|
||||
whelp loop(entity)
|
||||
publish(turn, ds, TransportConnection(
|
||||
`addr`: ta.toPreserves,
|
||||
control: newCap(entity, turn),
|
||||
resolved: localDataspace.accepted,
|
||||
))
|
||||
|
||||
proc connectStdio*(turn: Turn; ds: Cap) =
|
||||
## Connect to an external dataspace over stdin and stdout.
|
||||
connectTransport(turn, ds, transportAddress.Stdio())
|
||||
|
||||
type
|
||||
TcpEntity = ref object of Entity
|
||||
relay: Relay
|
||||
sock: AsyncConn[sockets.Protocol.TCP]
|
||||
alive: bool
|
||||
|
||||
UnixEntity = ref object of Entity
|
||||
relay: Relay
|
||||
sock: AsyncConn[sockets.Protocol.Unix]
|
||||
alive: bool
|
||||
|
||||
SocketEntity = TcpEntity | UnixEntity
|
||||
|
||||
method message(entity: SocketEntity; turn: Turn; ass: AssertionRef) =
|
||||
if ass.value.preservesTo(ForceDisconnect).isSome:
|
||||
entity.alive = false
|
||||
|
||||
template bootSocketEntity() {.dirty.} =
|
||||
proc setup(turn: Turn) {.closure.} =
|
||||
proc kill(turn: Turn) =
|
||||
if entity.alive:
|
||||
entity.alive = false
|
||||
close(entity.sock)
|
||||
onStop(turn, kill)
|
||||
var ass = TransportConnection(
|
||||
`addr`: ta.toPreserves,
|
||||
control: newCap(entity, turn),
|
||||
resolved: entity.relay.peer.accepted,
|
||||
)
|
||||
publish(turn, ds, ass)
|
||||
run(entity.relay.facet, setup)
|
||||
let buf = new seq[byte]
|
||||
entity.alive = true
|
||||
while entity.alive:
|
||||
buf[].setLen(0x1000)
|
||||
let n = read(entity.sock, buf)
|
||||
if n > 0:
|
||||
entity.relay.recv(buf[], 0..<n)
|
||||
else:
|
||||
entity.alive = false
|
||||
if n < 0: raiseOSError(osLastError())
|
||||
stopActor(entity.facet)
|
||||
# the socket closes when the actor is stopped
|
||||
|
||||
proc boot(entity: TcpEntity; ta: transportAddress.Tcp; ds: Cap) {.asyncio.} =
|
||||
entity.sock = connectTcpAsync(ta.host, Port ta.port)
|
||||
bootSocketEntity()
|
||||
|
||||
proc boot(entity: UnixEntity; ta: transportAddress.Unix; ds: Cap) {.asyncio.} =
|
||||
entity.sock = connectUnixAsync(ta.path)
|
||||
bootSocketEntity()
|
||||
|
||||
template spawnSocketRelay() {.dirty.} =
|
||||
proc writeConn(turn: Turn; buf: seq[byte]) =
|
||||
if entity.alive:
|
||||
discard trampoline:
|
||||
whelp write(entity.sock, buf)
|
||||
var ops = RelayActorOptions(
|
||||
packetWriter: writeConn,
|
||||
initialOid: 0.Oid.some,
|
||||
)
|
||||
spawnRelay("socket", turn, ops) do (turn: Turn; relay: Relay):
|
||||
entity.facet = turn.facet
|
||||
entity.relay = relay
|
||||
discard trampoline:
|
||||
whelp boot(entity, ta, ds)
|
||||
|
||||
proc connectTransport(turn: Turn; ds: Cap; ta: transportAddress.Tcp) =
|
||||
let entity = TcpEntity()
|
||||
spawnSocketRelay()
|
||||
|
||||
proc connectTransport(turn: Turn; ds: Cap; ta: transportAddress.Unix) =
|
||||
let entity = UnixEntity()
|
||||
spawnSocketRelay()
|
||||
|
||||
elif defined(solo5):
|
||||
|
||||
import solo5_dispatcher
|
||||
import taps
|
||||
|
||||
type
|
||||
TcpEntity = ref object of Entity
|
||||
relay: Relay
|
||||
conn: Connection
|
||||
decoder: BufferedDecoder
|
||||
|
||||
method message(entity: TcpEntity; turn: Turn; ass: AssertionRef) =
|
||||
if ass.value.preservesTo(ForceDisconnect).isSome:
|
||||
entity.conn.abort()
|
||||
|
||||
proc connectTransport(turn: Turn; ds: Cap; ta: transportAddress.Tcp) =
|
||||
let entity = TcpEntity(facet: turn.facet)
|
||||
|
||||
proc writeConn(turn: Turn; buf: seq[byte]) =
|
||||
assert not entity.conn.isNil
|
||||
entity.conn.batch:
|
||||
entity.conn.send(buf)
|
||||
var ops = RelayActorOptions(
|
||||
packetWriter: writeConn,
|
||||
initialOid: 0.Oid.some,
|
||||
)
|
||||
spawnRelay("socket", turn, ops) do (turn: Turn; relay: Relay):
|
||||
entity.facet = turn.facet
|
||||
entity.relay = relay
|
||||
|
||||
var ep = newRemoteEndpoint()
|
||||
if ta.host.isIpAddress:
|
||||
ep.with ta.host.parseIpAddress
|
||||
else:
|
||||
ep.withHostname ta.host
|
||||
ep.with ta.port.Port
|
||||
|
||||
var tp = newTransportProperties()
|
||||
tp.require "reliability"
|
||||
tp.ignore "congestion-control"
|
||||
tp.ignore "preserve-order"
|
||||
|
||||
var preconn = newPreconnection(
|
||||
remote=[ep], transport=tp.some)
|
||||
entity.conn = preconn.initiate()
|
||||
entity.facet.onStop do (turn: Turn):
|
||||
entity.conn.close()
|
||||
entity.conn.onConnectionError do (err: ref Exception):
|
||||
run(entity.facet) do (turn: Turn):
|
||||
terminate(turn, err)
|
||||
entity.conn.onClosed():
|
||||
stop(entity.facet)
|
||||
entity.conn.onReceivedPartial do (data: seq[byte]; ctx: MessageContext; eom: bool):
|
||||
entity.relay.recv(data)
|
||||
if eom:
|
||||
stop(entity.facet)
|
||||
else:
|
||||
entity.conn.receive()
|
||||
entity.conn.onReady do ():
|
||||
entity.facet.run do (turn: Turn):
|
||||
publish(turn, ds, TransportConnection(
|
||||
`addr`: ta.toPreserves,
|
||||
control: newCap(entity, turn),
|
||||
resolved: entity.relay.peer.accepted,
|
||||
))
|
||||
entity.conn.receive()
|
||||
|
||||
proc walk(turn: Turn; ds, origin: Cap; route: Route; transOff, stepOff: int) =
|
||||
if stepOff < route.pathSteps.len:
|
||||
let
|
||||
step = route.pathSteps[stepOff]
|
||||
rejectPat = ResolvedPathStep?:{
|
||||
0: ?(origin.embed), 1: ?step, 2: ?:Rejected}
|
||||
acceptPat = ResolvedPathStep?:{
|
||||
0: ?(origin.embed), 1: ?step, 2: ?:ResolvedAccepted}
|
||||
onPublish(turn, ds, rejectPat) do (detail: Value):
|
||||
publish(turn, ds, ResolvePath(
|
||||
route: route,
|
||||
`addr`: route.transports[transOff],
|
||||
resolved: detail.rejected,
|
||||
))
|
||||
during(turn, ds, acceptPat) do (next: Cap):
|
||||
walk(turn, ds, next, route, transOff, stepOff.succ)
|
||||
else:
|
||||
publish(turn, ds, ResolvePath(
|
||||
route: route,
|
||||
`addr`: route.transports[transOff],
|
||||
resolved: origin.accepted,
|
||||
))
|
||||
|
||||
proc connectRoute(turn: Turn; ds: Cap; route: Route; transOff: int) =
|
||||
let rejectPat = TransportConnection ?: {
|
||||
0: ?route.transports[transOff],
|
||||
2: ?:Rejected,
|
||||
}
|
||||
during(turn, ds, rejectPat) do (detail: Value):
|
||||
publish(turn, ds, ResolvePath(
|
||||
route: route,
|
||||
`addr`: route.transports[transOff],
|
||||
resolved: detail.rejected,
|
||||
))
|
||||
let acceptPat = TransportConnection?:{
|
||||
0: ?route.transports[transOff],
|
||||
2: ?:ResolvedAccepted,
|
||||
}
|
||||
onPublish(turn, ds, acceptPat) do (origin: Cap):
|
||||
origin.relay.run do (turn: Turn):
|
||||
# walk using the facet that manages the transport connection
|
||||
walk(turn, ds, origin, route, transOff, 0)
|
||||
|
||||
type StepCallback = proc (turn: Turn; step: Value; origin: Cap; res: Resolved) {.closure.}
|
||||
|
||||
proc spawnStepResolver(turn: Turn; ds: Cap; stepType: Value; cb: StepCallback) =
|
||||
let pat = observePattern(
|
||||
ResolvedPathStep?:{1: grabRecord(stepType)},
|
||||
{ @[0.toPreserve]: grabLit(), @[1.toPreserve]: grab() },
|
||||
)
|
||||
during(turn, ds, pat) do (origin: Cap; stepDetail: Literal[Value]):
|
||||
proc duringCallback(turn: Turn; ass: Value; h: Handle): TurnAction =
|
||||
var res: Resolved
|
||||
if res.fromPreserves ass:
|
||||
cb(turn, stepDetail.value, origin, res)
|
||||
proc action(turn: Turn) =
|
||||
stop(turn)
|
||||
result = action
|
||||
publish(turn, origin, Resolve(
|
||||
step: stepDetail.value, observer: newCap(turn, during(duringCallback))))
|
||||
|
||||
proc spawnRelays*(turn: Turn; ds: Cap) =
|
||||
## Spawn actors that manage routes and appease gatekeepers.
|
||||
|
||||
let transPat = observePattern(!TransportConnection, { @[0.toPreserves]: grab() })
|
||||
# Use a generic pattern and type matching
|
||||
# in the during handler because it is easy.
|
||||
|
||||
when defined(posix):
|
||||
let stdioPat = ?Observe(pattern: TransportConnection?:{0: ?:Stdio})
|
||||
during(turn, ds, stdioPat) do:
|
||||
connectTransport(turn, ds, Stdio())
|
||||
|
||||
# TODO: unix pattern
|
||||
during(turn, ds, transPat) do (ta: Literal[transportAddress.Unix]):
|
||||
try: connectTransport(turn, ds, ta.value)
|
||||
except exceptions.IOError as e:
|
||||
publish(turn, ds, TransportConnection(
|
||||
`addr`: ta.toPreserve,
|
||||
resolved: rejected(embed e),
|
||||
))
|
||||
|
||||
# TODO: tcp pattern
|
||||
during(turn, ds, transPat) do (ta: Literal[transportAddress.Tcp]):
|
||||
try: connectTransport(turn, ds, ta.value)
|
||||
except exceptions.IOError as e:
|
||||
publish(turn, ds, TransportConnection(
|
||||
`addr`: ta.toPreserve,
|
||||
resolved: rejected(embed e),
|
||||
))
|
||||
|
||||
let resolvePat = observePattern(!ResolvePath, {@[0.toPreserves]: grab()})
|
||||
during(turn, ds, resolvePat) do (route: Literal[Route]):
|
||||
for i, transAddr in route.value.transports:
|
||||
connectRoute(turn, ds, route.value, i)
|
||||
|
||||
spawnStepResolver(turn, ds, "ref".toSymbol) do (
|
||||
turn: Turn, step: Value, origin: Cap, res: Resolved):
|
||||
publish(turn, ds, ResolvedPathStep(
|
||||
origin: origin, pathStep: step, resolved: res))
|
||||
|
||||
type BootProc* = proc (turn: Turn; ds: Cap) {.closure.}
|
||||
|
||||
proc resolve*(turn: Turn; ds: Cap; route: Route; bootProc: BootProc) =
|
||||
## Resolve `route` within `ds` and call `bootProc` with resolved capabilities.
|
||||
let pat = ResolvePath ?: {0: ?route, 3: ?:ResolvedAccepted}
|
||||
during(turn, ds, ResolvePath ?: {0: ?route, 3: ?:ResolvedAccepted}) do (dst: Cap):
|
||||
bootProc(turn, dst)
|
||||
|
||||
when defined(posix):
|
||||
const defaultRoute* = "<route [<stdio>]>"
|
||||
|
||||
proc envRoute*: Route =
|
||||
## Get an route to a Syndicate capability from the calling environment.
|
||||
## On UNIX this is the SYNDICATE_ROUTE environmental variable with a
|
||||
## fallack to a defaultRoute_.
|
||||
## See https://git.syndicate-lang.org/syndicate-lang/syndicate-protocols/raw/branch/main/schemas/gatekeeper.prs.
|
||||
var text = getEnv("SYNDICATE_ROUTE", defaultRoute)
|
||||
if text == "":
|
||||
var tx = (getEnv("XDG_RUNTIME_DIR", "/run/user/1000") / "dataspace").toPreserves
|
||||
result.transports = @[initRecord("unix", tx)]
|
||||
result.pathSteps = @[capabilities.mint().toPreserves]
|
||||
else:
|
||||
var pr = parsePreserves(text)
|
||||
if not result.fromPreserves(pr):
|
||||
raise newException(ValueError, "failed to parse $SYNDICATE_ROUTE " & $pr)
|
||||
|
||||
proc resolveEnvironment*(turn: Turn; bootProc: BootProc) =
|
||||
## Resolve a capability from the calling environment
|
||||
## and call `bootProc`. See envRoute_.
|
||||
var resolved = false
|
||||
let
|
||||
ds = newDataspace(turn)
|
||||
pat = ResolvePath ?: {0: ?envRoute(), 3: ?:ResolvedAccepted}
|
||||
during(turn, ds, pat) do (dst: Cap):
|
||||
if not resolved:
|
||||
resolved = true
|
||||
bootProc(turn, dst)
|
||||
do:
|
||||
resolved = false
|
||||
spawnRelays(turn, ds)
|
||||
|
||||
# TODO: define a runActor that comes preloaded with relaying
|
|
@ -1,171 +1,197 @@
|
|||
# SPDX-FileCopyrightText: ☭ 2021 Emery Hemingway
|
||||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import ./assertions, ./bags, ./events
|
||||
import preserves, preserves/records
|
||||
import lists, options, sets, tables
|
||||
## https://git.syndicate-lang.org/syndicate-lang/syndicate-rkt/src/commit/90c4c60699069b496491b81ee63b5a45ffd638cb/syndicate/HOWITWORKS.md
|
||||
|
||||
import std/[assertions, hashes, options, sets, tables]
|
||||
import preserves
|
||||
import ./actors, ./bags, ./patterns
|
||||
import ./protocols/dataspacePatterns
|
||||
|
||||
type
|
||||
NonEmptySkeleton*[Shape] = object
|
||||
shape: Shape
|
||||
members: seq[Skeleton[Shape]]
|
||||
Skeleton*[Shape] = Option[NonEmptySkeleton[Shape]]
|
||||
Pattern = dataspacePatterns.Pattern
|
||||
Path = seq[Value]
|
||||
ClassKind = enum classNone, classRecord, classSequence, classDictionary
|
||||
Class = object
|
||||
kind: ClassKind
|
||||
label: Value
|
||||
|
||||
Shape = string
|
||||
func classOf(v: Value): Class =
|
||||
case v.kind
|
||||
of pkRecord: Class(kind: classRecord, label: v.label)
|
||||
of pkSequence: Class(kind: classSequence)
|
||||
of pkDictionary: Class(kind: classDictionary)
|
||||
else: Class(kind: classNone)
|
||||
|
||||
Value = Preserve
|
||||
|
||||
HandlerCallback* = proc (event: EventKind; bindings: seq[Value]) {.gcsafe.}
|
||||
|
||||
Path = seq[Natural]
|
||||
|
||||
Analysis* = object
|
||||
skeleton: Skeleton[Shape]
|
||||
constPaths: seq[Path]
|
||||
constVals: seq[Value]
|
||||
capturePaths: seq[Path]
|
||||
|
||||
proc projectPath(v: Value; path: Path): Value =
|
||||
result = v
|
||||
for index in path:
|
||||
result = result[index]
|
||||
|
||||
proc projectPaths(v: Value; paths: seq[Path]): seq[Value] =
|
||||
result.setLen(paths.len)
|
||||
for i, path in paths: result[i] = projectPath(v, path)
|
||||
|
||||
proc analyzeAssertion*(a: Value): Analysis =
|
||||
var path: Path
|
||||
proc walk(analysis: var Analysis; a: Value): Skeleton[Shape] =
|
||||
if a.preserveTo(Discard).isSome:
|
||||
discard
|
||||
elif a.preserveTo(Capture).isSome:
|
||||
analysis.capturePaths.add(path)
|
||||
result = walk(analysis, a.fields[0])
|
||||
else:
|
||||
if a.kind == pkRecord:
|
||||
let class = classOf(a)
|
||||
result = some NonEmptySkeleton[Shape](shape: $class)
|
||||
path.add(0)
|
||||
var i: int
|
||||
for field in a.fields:
|
||||
path[path.high] = i
|
||||
result.get.members.add(walk(analysis, field))
|
||||
inc(i)
|
||||
discard path.pop
|
||||
else:
|
||||
analysis.constPaths.add(path)
|
||||
analysis.constVals.add(a)
|
||||
result.skeleton = walk(result, a)
|
||||
proc classOf(p: Pattern): Class =
|
||||
if p.orKind == PatternKind.group:
|
||||
case p.group.type.orKind
|
||||
of GroupTypeKind.rec:
|
||||
Class(kind: classRecord, label: p.group.`type`.rec.label)
|
||||
of GroupTypeKind.arr:
|
||||
Class(kind: classSequence)
|
||||
of GroupTypeKind.dict:
|
||||
Class(kind: classDictionary)
|
||||
else:
|
||||
Class(kind: classNone)
|
||||
|
||||
type
|
||||
Handler = ref object
|
||||
cachedCaptures: Bag[seq[Value]]
|
||||
callbacks: HashSet[HandlerCallback]
|
||||
EventKind = enum addedEvent, removedEvent, messageEvent
|
||||
|
||||
AssertionCache = HashSet[Value]
|
||||
|
||||
ObserverGroup = ref object # Endpoints
|
||||
cachedCaptures: Bag[Captures]
|
||||
observers: Table[Cap, TableRef[Captures, Handle]]
|
||||
|
||||
Leaf = ref object
|
||||
cachedAssertions: AssertionCache
|
||||
handlerMap: Table[seq[Path], Handler]
|
||||
cache: AssertionCache
|
||||
observerGroups: Table[Paths, ObserverGroup]
|
||||
|
||||
LeafMap = TableRef[seq[Value], Leaf]
|
||||
|
||||
Continuation = ref object
|
||||
cachedAssertions: AssertionCache
|
||||
leafMap: Table[seq[Path], TableRef[seq[Value], Leaf]] # TODO: not TableRef?
|
||||
cache: AssertionCache
|
||||
leafMap: Table[Paths, LeafMap]
|
||||
|
||||
Selector = tuple[popCount: int; index: int]
|
||||
func isEmpty(leaf: Leaf): bool =
|
||||
leaf.cache.len == 0 and leaf.observerGroups.len == 0
|
||||
|
||||
Node = ref object
|
||||
edges: Table[Selector, TableRef[string, Node]]
|
||||
continuation: Continuation
|
||||
|
||||
using
|
||||
continuation: Continuation
|
||||
leaf: Leaf
|
||||
node: Node
|
||||
|
||||
proc isEmpty(leaf): bool =
|
||||
leaf.cachedAssertions.len == 0 and leaf.handlerMap.len == 0
|
||||
func isEmpty(cont: Continuation): bool =
|
||||
cont.cache.len == 0 and cont.leafMap.len == 0
|
||||
|
||||
type
|
||||
ContinuationProc = proc (c: Continuation; v: Value) {.gcsafe.}
|
||||
LeafProc = proc (l: Leaf; v: Value) {.gcsafe.}
|
||||
HandlerProc = proc (h: Handler; vs: seq[Value]) {.gcsafe.}
|
||||
ContinuationProc = proc (c: Continuation; v: Value) {.closure.}
|
||||
LeafProc = proc (l: Leaf; v: Value) {.closure.}
|
||||
ObserverProc = proc (turn: Turn; group: ObserverGroup; vs: seq[Value]) {.closure.}
|
||||
|
||||
proc modify(node; operation: EventKind; outerValue: Value;
|
||||
mCont: ContinuationProc; mLeaf: LeafProc; mHandler: HandlerProc) =
|
||||
proc getLeaves(cont: Continuation; presentPaths, constPaths: Paths): LeafMap =
|
||||
result = cont.leafMap.getOrDefault(constPaths)
|
||||
if result.isNil:
|
||||
new result
|
||||
cont.leafMap[constPaths] = result
|
||||
assert not cont.isEmpty
|
||||
for ass in cont.cache:
|
||||
# TODO: check presence
|
||||
let key = projectPaths(ass, constPaths)
|
||||
if key.isSome:
|
||||
var leaf = result.getOrDefault(get key)
|
||||
if leaf.isNil:
|
||||
new leaf
|
||||
result[get key] = leaf
|
||||
leaf.cache.incl(ass)
|
||||
|
||||
proc walkContinuation(continuation) {.gcsafe.}
|
||||
proc getLeaf(leafMap: LeafMap; constVals: seq[Value]): Leaf =
|
||||
result = leafMap.getOrDefault(constVals)
|
||||
if result.isNil:
|
||||
new result
|
||||
leafMap[constVals] = result
|
||||
|
||||
proc walkNode(node; termStack: SinglyLinkedList[seq[Value]]) =
|
||||
# TODO: use a seq for the stack?
|
||||
walkContinuation(node.continuation)
|
||||
for (selector, table) in node.edges.pairs:
|
||||
var nextStack = termStack
|
||||
for _ in 1..selector.popCount:
|
||||
nextStack.head = nextStack.head.next
|
||||
let nextValue = nextStack.head.value[selector.index]
|
||||
if nextValue.isRecord:
|
||||
let nextClass = classOf(nextValue)
|
||||
let nextNode = table.getOrDefault($nextClass)
|
||||
if not nextNode.isNil:
|
||||
nextStack.prepend(nextValue.record)
|
||||
walkNode(nextNode, nextStack)
|
||||
type
|
||||
Selector = tuple[popCount: int; index: Value]
|
||||
|
||||
proc walkContinuation(continuation: Continuation) =
|
||||
mCont(continuation, outerValue)
|
||||
for (constPaths, constValMap) in continuation.leafMap.pairs:
|
||||
Node = ref object
|
||||
continuation: Continuation
|
||||
edges: Table[Selector, TableRef[Class, Node]]
|
||||
|
||||
func isEmpty(node: Node): bool =
|
||||
node.continuation.isEmpty and node.edges.len == 0
|
||||
|
||||
type TermStack = seq[Value]
|
||||
|
||||
proc push(stack: TermStack; val: Value): Termstack =
|
||||
result = stack
|
||||
add(result, val)
|
||||
|
||||
proc pop(stack: TermStack; n: int): TermStack =
|
||||
assert n <= stack.len
|
||||
stack[stack.low..(stack.high-n)]
|
||||
|
||||
proc top(stack: TermStack): Value =
|
||||
assert stack.len > 0
|
||||
stack[stack.high]
|
||||
|
||||
proc modify(node: Node; turn: Turn; outerValue: Value; event: EventKind;
|
||||
modCont: ContinuationProc; modLeaf: LeafProc; modObs: ObserverProc) =
|
||||
|
||||
proc walk(cont: Continuation; turn: Turn) =
|
||||
modCont(cont, outerValue)
|
||||
for constPaths, constValMap in cont.leafMap.pairs:
|
||||
let constVals = projectPaths(outerValue, constPaths)
|
||||
let leaf = constValMap.getOrDefault(constVals)
|
||||
if leaf.isNil:
|
||||
if operation == addedEvent:
|
||||
constValMap[constVals] = Leaf()
|
||||
else:
|
||||
mLeaf(leaf, outerValue)
|
||||
for (capturePaths, handler) in leaf.handlerMap.pairs:
|
||||
mHandler(handler, projectPaths(outerValue, capturePaths))
|
||||
if operation == removedEvent and leaf.isEmpty:
|
||||
constValMap.del(constVals)
|
||||
if constValMap.len == 0:
|
||||
continuation.leafMap.del(constPaths)
|
||||
var stack: SinglyLinkedList[seq[Value]]
|
||||
stack.prepend(@[outerValue])
|
||||
walkNode(node, stack)
|
||||
if constVals.isSome:
|
||||
case event
|
||||
of addedEvent, messageEvent:
|
||||
let leaf = constValMap.getLeaf(get constVals)
|
||||
modLeaf(leaf, outerValue)
|
||||
for capturePaths, observerGroup in leaf.observerGroups.pairs:
|
||||
let captures = projectPaths(outerValue, capturePaths)
|
||||
if captures.isSome:
|
||||
modObs(turn, observerGroup, get captures)
|
||||
of removedEvent:
|
||||
let leaf = constValMap.getOrDefault(get constVals)
|
||||
if not leaf.isNil:
|
||||
modLeaf(leaf, outerValue)
|
||||
for capturePaths, observerGroup in leaf.observerGroups.pairs:
|
||||
let captures = projectPaths(outerValue, capturePaths)
|
||||
if captures.isSome:
|
||||
modObs(turn, observerGroup, get captures)
|
||||
if leaf.isEmpty:
|
||||
constValMap.del(get constVals)
|
||||
|
||||
proc extend[Shape](node; skeleton: Skeleton[Shape]): Continuation =
|
||||
|
||||
proc walk(node: Node; turn: Turn; termStack: TermStack) =
|
||||
walk(node.continuation, turn)
|
||||
for selector, table in node.edges:
|
||||
let
|
||||
nextStack = pop(termStack, selector.popCount)
|
||||
nextValue = step(nextStack.top, selector.index)
|
||||
if nextValue.isSome:
|
||||
let nextClass = classOf(get nextValue)
|
||||
if nextClass.kind != classNone:
|
||||
let nextNode = table.getOrDefault(nextClass)
|
||||
if not nextNode.isNil:
|
||||
walk(nextNode, turn, push(nextStack, get nextValue))
|
||||
if event == removedEvent and nextNode.isEmpty:
|
||||
table.del(nextClass)
|
||||
|
||||
walk(node, turn, @[@[outerValue].toPreserves])
|
||||
|
||||
proc getOrNew[A, B, C](t: var Table[A, TableRef[B, C]], k: A): TableRef[B, C] =
|
||||
result = t.getOrDefault(k)
|
||||
if result.isNil:
|
||||
result = newTable[B, C]()
|
||||
t[k] = result
|
||||
|
||||
proc extendWalk(node: Node; popCount: Natural; stepIndex: Value; pat: Pattern; path: var Path): tuple[popCount: Natural, nextNode: Node] =
|
||||
case pat.orKind
|
||||
of PatternKind.`discard`, PatternKind.lit:
|
||||
result = (popCount, node)
|
||||
of PatternKind.`bind`:
|
||||
result = extendWalk(node, popCount, stepIndex, pat.`bind`.pattern, path)
|
||||
of PatternKind.`group`:
|
||||
let
|
||||
selector: Selector = (popCount, stepIndex,)
|
||||
table = node.edges.getOrNew(selector)
|
||||
class = classOf pat
|
||||
result.nextNode = table.getOrDefault(class)
|
||||
if result.nextNode.isNil:
|
||||
new result.nextNode
|
||||
table[class] = result.nextNode
|
||||
new result.nextNode.continuation
|
||||
for a in node.continuation.cache:
|
||||
var v = step(a, path)
|
||||
if v.isSome and class == classOf(get v):
|
||||
result.nextNode.continuation.cache.incl a
|
||||
result.popCount = 0
|
||||
for step, p in pat.group.entries:
|
||||
add(path, step)
|
||||
result = extendWalk(result.nextNode, result.popCount, step, p, path)
|
||||
discard pop(path)
|
||||
inc(result.popCount)
|
||||
|
||||
proc extend(node: var Node; pat: Pattern): Continuation =
|
||||
var path: Path
|
||||
|
||||
proc walkNode(node; popCount, index: int; skeleton: Skeleton[Shape]): tuple[popCount: int, node: Node] =
|
||||
assert(not node.isNil)
|
||||
if skeleton.isNone:
|
||||
return (popCount, node)
|
||||
else:
|
||||
let selector: Selector = (popCount, index)
|
||||
var
|
||||
cls = skeleton.get.shape
|
||||
var
|
||||
table = node.edges.getOrDefault(selector)
|
||||
if table.isNil:
|
||||
table = newTable[string, Node]()
|
||||
node.edges[selector] = table
|
||||
var nextNode = table.getOrDefault(cls)
|
||||
if nextNode.isNil:
|
||||
nextNode = Node(continuation: Continuation())
|
||||
table[cls] = nextNode
|
||||
for a in node.continuation.cachedAssertions:
|
||||
if $classOf(projectPath(a, path)) == cls:
|
||||
nextNode.continuation.cachedAssertions.incl(a)
|
||||
block:
|
||||
var popCount, index: int
|
||||
path.add(index)
|
||||
for member in skeleton.get.members:
|
||||
(popCount, nextNode) = walkNode(nextNode, result.popCount, index, member)
|
||||
inc(index)
|
||||
discard path.pop()
|
||||
path.add(index)
|
||||
discard path.pop()
|
||||
result = (popCount.succ, nextNode)
|
||||
walkNode(node, 0, 0, skeleton).node.continuation
|
||||
extendWalk(node, 0, 0.toPreserves, pat, path).nextNode.continuation
|
||||
|
||||
type
|
||||
Index* = object
|
||||
|
@ -175,93 +201,89 @@ type
|
|||
proc initIndex*(): Index =
|
||||
Index(root: Node(continuation: Continuation()))
|
||||
|
||||
using index: Index
|
||||
proc getEndpoints(leaf: Leaf; capturePaths: Paths): ObserverGroup =
|
||||
result = leaf.observerGroups.getOrDefault(capturePaths)
|
||||
if result.isNil:
|
||||
new result
|
||||
leaf.observerGroups[capturePaths] = result
|
||||
for term in leaf.cache:
|
||||
# leaf.cache would be empty if observers come before assertions
|
||||
let captures = projectPaths(term, capturePaths)
|
||||
if captures.isSome:
|
||||
discard result.cachedCaptures.change(get captures, +1)
|
||||
|
||||
proc addHandler*(index; res: Analysis; callback: HandlerCallback) =
|
||||
assert(not index.root.isNil)
|
||||
proc add*(index: var Index; turn: Turn; pattern: Pattern; observer: Cap) =
|
||||
let
|
||||
constPaths = res.constPaths
|
||||
constVals = res.constVals
|
||||
capturePaths = res.capturePaths
|
||||
continuation = index.root.extend(res.skeleton)
|
||||
var constValMap = continuation.leafMap.getOrDefault(constPaths)
|
||||
if constValMap.isNil:
|
||||
constValMap = newTable[seq[Value], Leaf]()
|
||||
continuation.leafMap[constPaths] = constValMap
|
||||
for a in continuation.cachedAssertions:
|
||||
let key = projectPaths(a, constPaths)
|
||||
var leaf = constValMap.getOrDefault(key)
|
||||
if leaf.isNil:
|
||||
new leaf
|
||||
constValMap[key] = leaf
|
||||
leaf.cachedAssertions.incl(a)
|
||||
var leaf = constValMap.getOrDefault(constVals)
|
||||
if leaf.isNil:
|
||||
new leaf
|
||||
constValMap[constVals] = leaf
|
||||
var handler = leaf.handlerMap.getOrDefault(capturePaths)
|
||||
if handler.isNil:
|
||||
new handler
|
||||
leaf.handlerMap[capturePaths] = handler
|
||||
for a in leaf.cachedAssertions:
|
||||
let a = projectPaths(a, capturePaths)
|
||||
if handler.cachedCaptures.contains(a):
|
||||
discard handler.cachedCaptures.change(a, +1)
|
||||
handler.callbacks.incl(callback)
|
||||
for captures, count in handler.cachedCaptures.pairs:
|
||||
callback(addedEvent, captures)
|
||||
cont = index.root.extend(pattern)
|
||||
analysis = analyse pattern
|
||||
constValMap = cont.getLeaves(analysis.presentPaths, analysis.constPaths)
|
||||
leaf = constValMap.getLeaf(analysis.constValues)
|
||||
endpoints = leaf.getEndpoints(analysis.capturePaths)
|
||||
# TODO if endpoints.cachedCaptures.len > 0:
|
||||
var captureMap = newTable[seq[Value], Handle]()
|
||||
for capture in endpoints.cachedCaptures.items:
|
||||
captureMap[capture] = publish(turn, observer, capture)
|
||||
endpoints.observers[observer] = captureMap
|
||||
|
||||
proc removeHandler*(index; res: Analysis; callback: HandlerCallback) =
|
||||
let continuation = index.root.extend(res.skeleton)
|
||||
try:
|
||||
let
|
||||
constValMap = continuation.leafMap[res.constPaths]
|
||||
leaf = constValMap[res.constVals]
|
||||
handler = leaf.handlerMap[res.capturePaths]
|
||||
handler.callbacks.excl(callback)
|
||||
if handler.callbacks.len == 0:
|
||||
leaf.handlerMap.del(res.capturePaths)
|
||||
if leaf.isEmpty:
|
||||
constValMap.del(res.constVals)
|
||||
proc remove*(index: var Index; turn: Turn; pattern: Pattern; observer: Cap) =
|
||||
let
|
||||
cont = index.root.extend(pattern)
|
||||
analysis = analyse pattern
|
||||
constValMap = cont.leafMap.getOrDefault(analysis.constPaths)
|
||||
if not constValMap.isNil:
|
||||
let leaf = constValMap.getOrDefault(analysis.constValues)
|
||||
if not leaf.isNil:
|
||||
let endpoints = leaf.observerGroups.getOrDefault(analysis.capturePaths)
|
||||
if not endpoints.isNil:
|
||||
var captureMap: TableRef[seq[Value], Handle]
|
||||
if endpoints.observers.pop(observer, captureMap):
|
||||
for handle in captureMap.values: retract(turn, handle)
|
||||
if endpoints.observers.len == 0:
|
||||
leaf.observerGroups.del(analysis.capturePaths)
|
||||
if leaf.observerGroups.len == 0:
|
||||
constValMap.del(analysis.constValues)
|
||||
if constValMap.len == 0:
|
||||
continuation.leafMap.del(res.constPaths)
|
||||
except KeyError: discard
|
||||
cont.leafMap.del(analysis.constPaths)
|
||||
|
||||
proc adjustAssertion*(index: var Index; outerValue: Value; delta: int): ChangeDescription =
|
||||
result = index.allAssertions.change(outerValue, delta)
|
||||
case result
|
||||
proc adjustAssertion(index: var Index; turn: Turn; outerValue: Value; delta: int): bool =
|
||||
case index.allAssertions.change(outerValue, delta)
|
||||
of cdAbsentToPresent:
|
||||
index.root.modify(
|
||||
addedEvent,
|
||||
outerValue,
|
||||
(proc (c: Continuation; v: Value) = c.cachedAssertions.incl(v)),
|
||||
(proc (l: Leaf; v: Value) = l.cachedAssertions.incl(v)),
|
||||
(proc (h: Handler; vs: seq[Value]) =
|
||||
if h.cachedCaptures.change(vs, +1) == cdAbsentToPresent:
|
||||
#debugEcho " assertion of ", outerValue
|
||||
for cb in h.callbacks: cb(addedEvent, vs)))
|
||||
result = true
|
||||
proc modContinuation(c: Continuation; v: Value) =
|
||||
c.cache.incl(v)
|
||||
proc modLeaf(l: Leaf; v: Value) =
|
||||
l.cache.incl(v)
|
||||
proc modObserver(turn: Turn; group: ObserverGroup; vs: seq[Value]) =
|
||||
let change = group.cachedCaptures.change(vs, +1)
|
||||
if change == cdAbsentToPresent:
|
||||
for (observer, captureMap) in group.observers.pairs:
|
||||
captureMap[vs] = publish(turn, observer, vs.toPreserves)
|
||||
# TODO: this handle is coming from the facet?
|
||||
modify(index.root, turn, outerValue, addedEvent, modContinuation, modLeaf, modObserver)
|
||||
of cdPresentToAbsent:
|
||||
index.root.modify(
|
||||
removedEvent,
|
||||
outerValue,
|
||||
(proc (c: Continuation; v: Value) = c.cachedAssertions.excl(v)),
|
||||
(proc (l: Leaf; v: Value) = l.cachedAssertions.excl(v)),
|
||||
(proc (h: Handler; vs: seq[Value]) =
|
||||
if h.cachedCaptures.change(vs, -1) == cdPresentToAbsent:
|
||||
#debugEcho "retraction of ", outerValue
|
||||
for cb in h.callbacks: cb(removedEvent, vs)))
|
||||
else:
|
||||
discard
|
||||
result = true
|
||||
proc modContinuation(c: Continuation; v: Value) =
|
||||
c.cache.excl(v)
|
||||
proc modLeaf(l: Leaf; v: Value) =
|
||||
l.cache.excl(v)
|
||||
proc modObserver(turn: Turn; group: ObserverGroup; vs: seq[Value]) =
|
||||
if group.cachedCaptures.change(vs, -1) == cdPresentToAbsent:
|
||||
for (observer, captureMap) in group.observers.pairs:
|
||||
var h: Handle
|
||||
if captureMap.take(vs, h):
|
||||
retract(observer.target, turn, h)
|
||||
modify(index.root, turn, outerValue, removedEvent, modContinuation, modLeaf, modObserver)
|
||||
else: discard
|
||||
|
||||
proc continuationNoop(c: Continuation; v: Value) = discard
|
||||
proc leafNoop(l: Leaf; v: Value) = discard
|
||||
|
||||
proc deliverMessage*(index; v: Value; leafCb: proc (l: Leaf; v: Value) {.gcsafe.}) =
|
||||
proc handlerCb(h: Handler; vs: seq[Value]) =
|
||||
for cb in h.callbacks: cb(messageEvent, vs)
|
||||
index.root.modify(messageEvent, v, continuationNoop, leafCb, handlerCb)
|
||||
proc add*(index: var Index; turn: Turn; v: Value): bool =
|
||||
adjustAssertion(index, turn, v, +1)
|
||||
proc remove*(index: var Index; turn: Turn; v: Value): bool =
|
||||
adjustAssertion(index, turn, v, -1)
|
||||
|
||||
proc deliverMessage*(index; v: Value) =
|
||||
proc handlerCb(h: Handler; vs: seq[Value]) =
|
||||
for cb in h.callbacks: cb(messageEvent, vs)
|
||||
index.root.modify(messageEvent, v, continuationNoop, leafNoop, handlerCb)
|
||||
proc deliverMessage*(index: var Index; turn: Turn; v: Value) =
|
||||
proc observersCb(turn: Turn; group: ObserverGroup; vs: seq[Value]) =
|
||||
for observer in group.observers.keys: message(turn, observer, vs)
|
||||
index.root.modify(turn, v, messageEvent, continuationNoop, leafNoop, observersCb)
|
||||
|
|
|
@ -1,34 +0,0 @@
|
|||
# SPDX-FileCopyrightText: ☭ 2021 Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import preserves
|
||||
import ../syndicate/protocols/schemas/sturdy, ./private/hmacs
|
||||
|
||||
proc mint*(key: openarray[byte]; oid: Preserve): SturdyRef =
|
||||
SturdyRef(oid: oid, sig: hmacSha256(key, encode(oid), key.len))
|
||||
|
||||
proc attenuate*(r: SturdyRef; caveats: Attenuation): SturdyRef =
|
||||
result = SturdyRef(
|
||||
oid: r.oid,
|
||||
caveatChain: r.caveatChain,
|
||||
sig: hmacSha256(r.sig, caveats.encode))
|
||||
result.caveatChain.add caveats
|
||||
|
||||
proc validate*(key: openarray[byte]; r: SturdyRef): bool =
|
||||
var sig = hmacSha256(key, r.oid.encode, key.len)
|
||||
for a in r.caveatChain:
|
||||
sig = hmacSha256(sig, a.encode)
|
||||
r.sig == sig
|
||||
|
||||
when isMainModule:
|
||||
import unittest
|
||||
import preserves/parse
|
||||
test "sturdy":
|
||||
var
|
||||
key: array[16, byte]
|
||||
oid = "syndicate".toPreserve
|
||||
sRef = mint(key, oid)
|
||||
control = parsePreserves"""<ref "syndicate" [] #[pkgN9TBmEd3Q04grVG4Zdw]>"""
|
||||
check(sRef.toPreserve == control)
|
||||
let aRef = attenuate(sRef, newSeq[Caveat]())
|
||||
check validate(key, aRef)
|
|
@ -1,12 +1,61 @@
|
|||
# Package
|
||||
# Emulate Nimble from CycloneDX data at sbom.json.
|
||||
|
||||
version = "0.0.0"
|
||||
author = "Emery Hemingway"
|
||||
description = "Syndicated actors for conversational concurrency"
|
||||
license = "Unlicense"
|
||||
srcDir = "src"
|
||||
import std/json
|
||||
|
||||
proc lookupComponent(sbom: JsonNode; bomRef: string): JsonNode =
|
||||
for c in sbom{"components"}.getElems.items:
|
||||
if c{"bom-ref"}.getStr == bomRef:
|
||||
return c
|
||||
result = newJNull()
|
||||
|
||||
# Dependencies
|
||||
let
|
||||
sbom = (getPkgDir() & "/sbom.json").readFile.parseJson
|
||||
comp = sbom{"metadata", "component"}
|
||||
bomRef = comp{"bom-ref"}.getStr
|
||||
|
||||
requires "nim >= 1.4.8", "nimSHA2 >= 0.1.1", "preserves >= 1.0.0"
|
||||
version = comp{"version"}.getStr
|
||||
author = comp{"authors"}[0]{"name"}.getStr
|
||||
description = comp{"description"}.getStr
|
||||
license = comp{"licenses"}[0]{"license", "id"}.getStr
|
||||
|
||||
for prop in comp{"properties"}.getElems.items:
|
||||
let (key, val) = (prop{"name"}.getStr, prop{"value"}.getStr)
|
||||
case key
|
||||
of "nim:skipDirs:":
|
||||
add(skipDirs, val)
|
||||
of "nim:skipFiles:":
|
||||
add(skipFiles, val)
|
||||
of "nim:skipExt":
|
||||
add(skipExt, val)
|
||||
of "nim:installDirs":
|
||||
add(installDirs, val)
|
||||
of "nim:installFiles":
|
||||
add(installFiles, val)
|
||||
of "nim:installExt":
|
||||
add(installExt, val)
|
||||
of "nim:binDir":
|
||||
add(binDir, val)
|
||||
of "nim:srcDir":
|
||||
add(srcDir, val)
|
||||
of "nim:backend":
|
||||
add(backend, val)
|
||||
else:
|
||||
if key.startsWith "nim:bin:":
|
||||
namedBin[key[8..key.high]] = val
|
||||
|
||||
for depend in sbom{"dependencies"}.items:
|
||||
if depend{"ref"}.getStr == bomRef:
|
||||
for depRef in depend{"dependsOn"}.items:
|
||||
let dep = sbom.lookupComponent(depRef.getStr)
|
||||
var spec = dep{"name"}.getStr
|
||||
for extRef in dep{"externalReferences"}.elems:
|
||||
if extRef{"type"}.getStr == "vcs":
|
||||
spec = extRef{"url"}.getStr
|
||||
break
|
||||
let ver = dep{"version"}.getStr
|
||||
if ver != "":
|
||||
if ver.allCharsInSet {'0'..'9', '.'}: spec.add " == "
|
||||
else: spec.add '#'
|
||||
spec.add ver
|
||||
requires spec
|
||||
break
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
include_rules
|
||||
: foreach *.prs |> !preserves_schema_nim |> | {schema}
|
||||
: foreach t*.nim | ../../preserves-nim/<tests> {schema} $(SYNDICATE_PROTOCOL) |> !nim_run |> | ../<test>
|
||||
: foreach solo5*.nim | ../../taps/<sources> ../../preserves-nim/<tests> {schema} $(SYNDICATE_PROTOCOL) |> !nim_solo5_spt |> | ../<test>
|
|
@ -1,20 +0,0 @@
|
|||
|
||||
## Date of generation: 2021-09-01 13:32
|
||||
import
|
||||
std/typetraits, preserves
|
||||
|
||||
type
|
||||
EmbeddedType = void
|
||||
BoxState* {.record: "box-state".} = object ## ``<box-state @value int>``
|
||||
`value`*: BiggestInt
|
||||
|
||||
SetBox* {.record: "set-box".} = object ## ``<set-box @value int>``
|
||||
`value`*: BiggestInt
|
||||
|
||||
proc prsBoxState*(value: Preserve | BiggestInt): Preserve =
|
||||
initRecord[EmbeddedType](symbol("box-state", EmbeddedType),
|
||||
toPreserve(value, EmbeddedType))
|
||||
|
||||
proc prsSetBox*(value: Preserve | BiggestInt): Preserve =
|
||||
initRecord[EmbeddedType](symbol("set-box", EmbeddedType),
|
||||
toPreserve(value, EmbeddedType))
|
|
@ -1,5 +0,0 @@
|
|||
version 1.
|
||||
|
||||
BoxState = <box-state @value int> .
|
||||
|
||||
SetBox = <set-box @value int> .
|
|
@ -1,50 +0,0 @@
|
|||
# SPDX-FileCopyrightText: ☭ 2021 Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[asyncdispatch, asyncfile, posix, random, strutils]
|
||||
import preserves
|
||||
import syndicate, syndicate/protocols/schemas/simpleChatProtocol, syndicate/sturdy
|
||||
|
||||
randomize()
|
||||
|
||||
syndicate chat:
|
||||
|
||||
let me = "user_" & $rand(range[10..1000])
|
||||
|
||||
spawn "debug":
|
||||
onAsserted(?s) do (s: Preserve):
|
||||
echo " asserted ", s
|
||||
onRetracted(?s) do (s: Preserve):
|
||||
echo " retracted ", s
|
||||
onMessage(?s) do (s: Preserve):
|
||||
echo " message ", s
|
||||
|
||||
spawn "log":
|
||||
during(present(?who)) do (who: string):
|
||||
echo who, " joined"
|
||||
onStop:
|
||||
echo who, " left"
|
||||
onMessage(says(?who, ?what)) do (who: string; what: string):
|
||||
echo who, " says ", what
|
||||
|
||||
spawn "chat":
|
||||
publish present(me)
|
||||
during (present(me)):
|
||||
let
|
||||
inputFacet = getCurrentFacet()
|
||||
af = newAsyncFile(AsyncFD STDIN_FILENO)
|
||||
inputFacet.beginExternalTask()
|
||||
proc readStdin() =
|
||||
readline(af).addCallback do (f: Future[string]):
|
||||
if f.failed:
|
||||
inputFacet.endExternalTask()
|
||||
else:
|
||||
callSoon:
|
||||
readStdin()
|
||||
let line = read f
|
||||
if line.len > 0:
|
||||
let a = says(me, strip line)
|
||||
send a
|
||||
readStdin()
|
||||
|
||||
waitFor chat()
|
|
@ -0,0 +1,39 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import taps
|
||||
import solo5
|
||||
import syndicate, syndicate/relays
|
||||
import preserves
|
||||
|
||||
acquireDevices([("relay", netBasic)], netAcquireHook)
|
||||
|
||||
type Netif {.preservesRecord: "netif"} = object
|
||||
device, ipAddr: string
|
||||
|
||||
proc spawnNetifActor(turn: Turn; ds: Cap) =
|
||||
spawnActor(turn, "netif") do (turn: Turn):
|
||||
let facet = turn.facet
|
||||
onInterfaceUp do (device: string; ip: IpAddress):
|
||||
run(facet) do (turn: Turn):
|
||||
if not ip.isLinkLocal:
|
||||
discard publish(turn, ds, Netif(device: device, ipAddr: $ip))
|
||||
|
||||
runActor("relay-test") do (turn: Turn):
|
||||
let root = turn.facet
|
||||
onStop(turn) do (turn: Turn):
|
||||
quit()
|
||||
let ds = newDataspace(turn)
|
||||
spawnNetifActor(turn, ds)
|
||||
spawnRelays(turn, ds)
|
||||
var
|
||||
route: Route
|
||||
pr = parsePreserves $solo5_start_info.cmdline
|
||||
if route.fromPreserves pr:
|
||||
echo "parsed route ", route.toPreserves
|
||||
during(turn, ds, Netif?:{1: grab()}) do (ip: string):
|
||||
echo "Acquired address ", ip
|
||||
resolve(turn, ds, route) do (turn: Turn; ds: Cap):
|
||||
echo "route resolved!"
|
||||
echo "stopping root facet"
|
||||
stop(turn, root)
|
|
@ -0,0 +1,3 @@
|
|||
define:ipv6Enabled
|
||||
define:traceSyndicate
|
||||
import:"std/assertions"
|
|
@ -0,0 +1,25 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/times
|
||||
import solo5
|
||||
import syndicate, syndicate/drivers/timers
|
||||
|
||||
acquireDevices()
|
||||
|
||||
runActor("timer-test") do (turn: Turn):
|
||||
let timers = newDataspace(turn)
|
||||
spawnTimerDriver(turn, timers)
|
||||
|
||||
onPublish(turn, timers, ?LaterThan(seconds: 1356100000)):
|
||||
echo "now in 13th bʼakʼtun"
|
||||
|
||||
after(turn, timers, initDuration(seconds = 3)) do (turn: Turn):
|
||||
echo "third timer expired"
|
||||
stopActor(turn)
|
||||
|
||||
after(turn, timers, initDuration(seconds = 1)) do (turn: Turn):
|
||||
echo "first timer expired"
|
||||
|
||||
after(turn, timers, initDuration(seconds = 2)) do (turn: Turn):
|
||||
echo "second timer expired"
|
|
@ -0,0 +1,2 @@
|
|||
define:ipv6Enabled
|
||||
import:"std/assertions"
|
|
@ -1,66 +0,0 @@
|
|||
# SPDX-FileCopyrightText: 2021 ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import syndicate/assertions, syndicate/dataspaces, syndicate/events, syndicate/skeletons
|
||||
import preserves, preserves/records
|
||||
import asyncdispatch, tables, options
|
||||
|
||||
import ./box_and_client
|
||||
|
||||
const N = 100000
|
||||
|
||||
let
|
||||
`?_` = Discard().toPreserve
|
||||
`?$` = Capture().toPreserve
|
||||
|
||||
proc boot(facet: Facet) =
|
||||
|
||||
facet.spawn("box") do (facet: Facet):
|
||||
facet.declareField(value, int, 0)
|
||||
|
||||
facet.addEndpoint do (facet: Facet) -> EndpointSpec:
|
||||
# echo "recomputing published BoxState ", facet.fields.value
|
||||
result.assertion = prsBoxState(value.getPreserve)
|
||||
|
||||
facet.addDataflow do (facet: Facet):
|
||||
# echo "box dataflow saw new value ", facet.fields.value
|
||||
if value.get == N:
|
||||
facet.stop do (facet: Facet):
|
||||
echo "terminated box root facet"
|
||||
|
||||
facet.addEndpoint do (facet: Facet) -> EndpointSpec:
|
||||
let a = prsSetBox(`?$`)
|
||||
result.analysis = some analyzeAssertion(a)
|
||||
proc cb(facet: Facet; vs: seq[Value]) =
|
||||
facet.scheduleScript do (facet: Facet):
|
||||
value.set(vs[0])
|
||||
# echo "box updated value ", vs[0]
|
||||
result.callback = facet.wrap(messageEvent, cb)
|
||||
result.assertion = observe(prsSetBox(`?$`))
|
||||
|
||||
facet.spawn("client") do (facet: Facet):
|
||||
|
||||
facet.addEndpoint do (facet: Facet) -> EndpointSpec:
|
||||
let a = prsBoxState(`?$`)
|
||||
result.analysis = some analyzeAssertion(a)
|
||||
proc cb(facet: Facet; vs: seq[Value]) =
|
||||
facet.scheduleScript do (facet: Facet):
|
||||
let v = prsSetBox(vs[0].int.succ.toPreserve)
|
||||
# echo "client sending ", v
|
||||
facet.send(v)
|
||||
result.callback = facet.wrap(addedEvent, cb)
|
||||
result.assertion = observe(prsBoxState(`?$`))
|
||||
|
||||
facet.addEndpoint do (facet: Facet) -> EndpointSpec:
|
||||
let a = prsBoxState(`?_`)
|
||||
result.analysis = some analyzeAssertion(a)
|
||||
proc cb(facet: Facet; vs: seq[Value]) =
|
||||
facet.scheduleScript do (facet: Facet):
|
||||
echo "box gone"
|
||||
result.callback = facet.wrap(removedEvent, cb)
|
||||
result.assertion = observe(prsBoxState(`?_`))
|
||||
|
||||
facet.actor.dataspace.ground.addStopHandler do (_: Dataspace):
|
||||
echo "stopping box-and-client"
|
||||
|
||||
waitFor bootModule("box-and-client", boot)
|
|
@ -0,0 +1,74 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[oserrors, parseopt, posix, strutils]
|
||||
import pkg/sys/[files, handles, ioqueue]
|
||||
import preserves, syndicate, syndicate/relays
|
||||
|
||||
type
|
||||
Present {.preservesRecord: "Present".} = object
|
||||
username: string
|
||||
Says {.preservesRecord: "Says".} = object
|
||||
who, what: string
|
||||
|
||||
proc syncAndStop(facet: Facet; cap: Cap) =
|
||||
## Stop the actor responsible for `facet` after
|
||||
## synchronizing with `cap`.
|
||||
run(facet) do (turn: Turn):
|
||||
sync(turn, cap, stopActor)
|
||||
|
||||
proc readStdin(facet: Facet; ds: Cap; username: string) {.asyncio.} =
|
||||
let
|
||||
fd = stdin.getOsFileHandle()
|
||||
flags = fcntl(fd.cint, F_GETFL, 0)
|
||||
if flags < 0:
|
||||
raiseOSError(osLastError())
|
||||
if fcntl(fd.cint, F_SETFL, flags or O_NONBLOCK) < 0:
|
||||
raiseOSError(osLastError())
|
||||
let
|
||||
file = newAsyncFile(FD fd)
|
||||
buf = new string
|
||||
buf[].setLen(0x1000)
|
||||
while true:
|
||||
let n = read(file, buf)
|
||||
if n < 1:
|
||||
stderr.writeLine "test_chat calls stopsActor ", facet.actor
|
||||
syncAndStop(facet, ds)
|
||||
return
|
||||
else:
|
||||
var msg = buf[][0..<n].strip
|
||||
proc send(turn: Turn) =
|
||||
message(turn, ds, Says(who: username, what: msg))
|
||||
run(facet, send)
|
||||
|
||||
proc chat(turn: Turn; ds: Cap; username: string) =
|
||||
during(turn, ds, ?:Present) do (who: string):
|
||||
echo who, " joined"
|
||||
do:
|
||||
echo who, " left"
|
||||
|
||||
onMessage(turn, ds, ?:Says) do (who: string, what: string):
|
||||
echo who, ": ", what
|
||||
|
||||
discard publish(turn, ds, Present(username: username))
|
||||
|
||||
discard trampoline:
|
||||
whelp readStdin(turn.facet, ds, username)
|
||||
|
||||
proc main =
|
||||
var username = ""
|
||||
|
||||
for kind, key, val in getopt():
|
||||
if kind == cmdLongOption:
|
||||
case key
|
||||
of "user", "username":
|
||||
username = val
|
||||
|
||||
if username == "":
|
||||
stderr.writeLine "--user: unspecified"
|
||||
else:
|
||||
runActor("chat") do (turn: Turn):
|
||||
resolveEnvironment(turn) do (turn: Turn; ds: Cap):
|
||||
chat(turn, ds, username)
|
||||
|
||||
main()
|
|
@ -1,35 +0,0 @@
|
|||
# SPDX-FileCopyrightText: ☭ 2021 Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/asyncdispatch
|
||||
import preserves
|
||||
import syndicate
|
||||
|
||||
import ./box_and_client
|
||||
|
||||
syndicate testDsl:
|
||||
|
||||
spawn "box":
|
||||
field(currentValue, BiggestInt, 0)
|
||||
publish prsBoxState(currentValue.get)
|
||||
stopIf currentValue.get == 10:
|
||||
echo "box: terminating"
|
||||
onMessage(prsSetBox(?newValue)) do (newValue: int):
|
||||
# The SetBox message is unpacked to `newValue: int`
|
||||
echo "box: taking on new value ", newValue
|
||||
currentValue.set(newValue)
|
||||
|
||||
spawn "client":
|
||||
#stopIf retracted(observe(SetBox, _)):
|
||||
# echo "client: box has gone"
|
||||
onAsserted(prsBoxState(?v)) do (v: BiggestInt):
|
||||
echo "client: learned that box's value is now ", v
|
||||
send(prsSetBox(v.succ))
|
||||
onRetracted(prsBoxState(?_)) do (_):
|
||||
echo "client: box state disappeared"
|
||||
onStop:
|
||||
quit(0) # Quit explicitly rather than let the dispatcher run empty.
|
||||
|
||||
runForever()
|
||||
# The dataspace is driven by the async dispatcher.
|
||||
# Without `runForever` this module would exit immediately.
|
|
@ -0,0 +1,87 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[options, sequtils, tables, unittest]
|
||||
|
||||
import preserves, syndicate, syndicate/protocols/[gatekeeper, timer]
|
||||
|
||||
import ./test_schema
|
||||
|
||||
suite "example":
|
||||
var pat: Pattern
|
||||
check pat.fromPreserves parsePreserves"""
|
||||
<group <arr> {
|
||||
0: <lit 1>
|
||||
1: <bind <group <arr> {
|
||||
0: <bind <_>>
|
||||
1: <_>
|
||||
}>>
|
||||
2: <_>
|
||||
}>
|
||||
"""
|
||||
|
||||
const A = "[1 2 3]"
|
||||
test A:
|
||||
let v = parsePreserves A
|
||||
check:
|
||||
not pat.matches(v)
|
||||
|
||||
const B = "[1 [2 3] 4]"
|
||||
test B:
|
||||
let
|
||||
v = parsePreserves B
|
||||
c = parsePreserves "[[2 3] 2]"
|
||||
check pat.matches(v)
|
||||
check pat.capture(v).toPreserves == c
|
||||
|
||||
const C = "[1 [2] 5]"
|
||||
test C:
|
||||
let v = parsePreserves C
|
||||
check:
|
||||
not pat.matches(v)
|
||||
|
||||
const D = "[1 [2 3 4] 5]"
|
||||
test D:
|
||||
let
|
||||
v = parsePreserves D
|
||||
c = parsePreserves "[[2 3 4] 2]"
|
||||
check pat.matches(v)
|
||||
check pat.capture(v).toPreserves == c
|
||||
|
||||
const E = "[1 [<x> <y>] []]"
|
||||
test E:
|
||||
let
|
||||
v = parsePreserves E
|
||||
c = parsePreserves "[[<x> <y>] <x>]"
|
||||
check pat.matches(v)
|
||||
check pat.capture(v).toPreserves == c
|
||||
|
||||
suite "meta":
|
||||
|
||||
test "pattern-of-pattern":
|
||||
let
|
||||
pat = grabRecord("foo".toSymbol, {666: drop()})
|
||||
meta = pat.toPreserves.drop()
|
||||
check $meta == "<group <rec group> {0: <group <rec rec> {0: <lit foo>}> 1: <group <dict> {666: <_>}>}>"
|
||||
|
||||
test "observe":
|
||||
let
|
||||
val = Observe(pattern: LaterThan ?: {0: drop 12.24}).toPreserves
|
||||
pat = grab(val)
|
||||
check pat.matches(val)
|
||||
check pat.capture(val) == @[val]
|
||||
let
|
||||
meta = observePattern(!LaterThan, {@[0.toPreserves]: grabLit()})
|
||||
res = parsePreserves "[12.24]"
|
||||
check meta.matches(val)
|
||||
check meta.capture(val).toPreserves == res
|
||||
|
||||
test "connect-transport":
|
||||
let pat = parsePreserves"""
|
||||
<group <rec connect-transport> {0: <group <rec unix> {0: <lit "/run/user/1000/dataspace">}> 2: <group <rec accepted> {0: <bind <_>>}>}>
|
||||
""".preservesTo(Pattern).get
|
||||
let val = parsePreserves"""
|
||||
<connect-transport <unix "/run/user/1000/dataspace"> #:#f <accepted #:#f>>
|
||||
"""
|
||||
check pat.matches(val)
|
||||
check pat.capture(val).toPreserves == parsePreserves "[#:#f]"
|
|
@ -0,0 +1,29 @@
|
|||
import std/[streams, strutils, unittest]
|
||||
|
||||
import preserves
|
||||
import syndicate/relays
|
||||
import syndicate/protocols/sturdy
|
||||
|
||||
type WireRef = sturdy.WireRef
|
||||
|
||||
suite "protocols":
|
||||
test "PDiscard":
|
||||
var pd: PDiscard
|
||||
check $pd == "<_>"
|
||||
|
||||
test "stuff":
|
||||
const
|
||||
data = parseHexStr "b5b590b4b306617373657274b4b3074f627365727665b4b303726563b3067274742d6d73b5b4b3036c697483406805604189374c84b4b3036c6974b3062e3134342e3184b4b30462696e64b4b3015f8484b4b30462696e64b4b3015f8484b4b30462696e64b4b3015f8484b4b30462696e64b4b3015f8484848486b590908484a2132403848484b5b590b4b30772657472616374a2132403848484"
|
||||
var
|
||||
str = newStringStream(data)
|
||||
while not str.atEnd:
|
||||
var pos = str.getPosition
|
||||
echo "decode position: ", pos
|
||||
try:
|
||||
var a = decodePreserves(str)
|
||||
echo a
|
||||
except CatchableError:
|
||||
str.setPosition pos
|
||||
echo str.readAll.toHex
|
||||
break
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
|
||||
import
|
||||
preserves
|
||||
|
||||
type
|
||||
Foo* {.preservesRecord: "foo".} = object
|
||||
`x`*: seq[string]
|
||||
`y`*: BiggestInt
|
||||
`z`*: BiggestInt
|
||||
|
||||
proc `$`*(x: Foo): string =
|
||||
`$`(toPreserves(x))
|
||||
|
||||
proc encode*(x: Foo): seq[byte] =
|
||||
encode(toPreserves(x))
|
|
@ -0,0 +1,2 @@
|
|||
version 1 .
|
||||
Foo = <foo @x [string ...] @y int @z int> .
|
|
@ -1,27 +0,0 @@
|
|||
# SPDX-FileCopyrightText: 2021 ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/[asyncdispatch, monotimes, times]
|
||||
import preserves, preserves/records
|
||||
import syndicate
|
||||
|
||||
import syndicate/drivers/timers
|
||||
|
||||
syndicate plainTimerDemo:
|
||||
boot timerDriver
|
||||
|
||||
spawn "laterThanDemo":
|
||||
field(deadline, MonoTime, getMonoTime())
|
||||
field(count, int, 0)
|
||||
|
||||
onAsserted(prsTimeLaterThan(deadline.get)) do ():
|
||||
echo "TimeLaterThan ticked for deadline ", deadline.get
|
||||
count.set(count.get.succ)
|
||||
if count.get < 5:
|
||||
deadline.set(getMonoTime() + initDuration(milliseconds = 500))
|
||||
|
||||
onStop:
|
||||
echo "dataspace stopped"
|
||||
quit(0)
|
||||
|
||||
runForever()
|
|
@ -0,0 +1,32 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
import std/times
|
||||
import syndicate, syndicate/drivers/timers, preserves
|
||||
|
||||
var passCount = 0
|
||||
|
||||
runActor("timer-test") do (turn: Turn):
|
||||
let timers = newDataspace(turn)
|
||||
spawnTimerDriver(turn, timers)
|
||||
|
||||
onPublish(turn, timers, ?LaterThan(seconds: 1356100000)):
|
||||
echo "now in 13th bʼakʼtun"
|
||||
inc passCount
|
||||
|
||||
after(turn, timers, initDuration(seconds = 3)) do (turn: Turn):
|
||||
echo "third timer expired"
|
||||
assert passCount == 3
|
||||
inc passCount
|
||||
|
||||
after(turn, timers, initDuration(seconds = 1)) do (turn: Turn):
|
||||
echo "first timer expired"
|
||||
assert passCount == 1
|
||||
inc passCount
|
||||
|
||||
after(turn, timers, initDuration(seconds = 2)) do (turn: Turn):
|
||||
echo "second timer expired"
|
||||
assert passCount == 2
|
||||
inc passCount
|
||||
|
||||
doAssert passCount == 4, $passCount
|
Loading…
Reference in New Issue