Add syndesizer and a basic webhook endpoint
This commit is contained in:
parent
f64ffaf188
commit
92dbdbe438
|
@ -0,0 +1,12 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
## Syndicate multitool.
|
||||
|
||||
import syndicate, syndicate/relays
|
||||
|
||||
import ./syndesizer/webhooks
|
||||
|
||||
runActor("syndesizer") do (turn: var Turn; root: Cap):
|
||||
connectStdio(turn, root)
|
||||
discard bootWebhookActor(turn, root)
|
|
@ -0,0 +1,87 @@
|
|||
# SPDX-FileCopyrightText: ☭ Emery Hemingway
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
## An actor for relaying Webhooks.
|
||||
|
||||
import std/[asyncdispatch, asynchttpserver, net, sets, strutils, tables, uri]
|
||||
|
||||
import preserves
|
||||
import syndicate, syndicate/[bags, relays]
|
||||
import syndicate/protocols/[http, transportAddress]
|
||||
|
||||
type
|
||||
Endpoints = Table[seq[string], Cap]
|
||||
WebhookArgs {.preservesDictionary.} = object
|
||||
endpoints: Assertion
|
||||
listen: Tcp
|
||||
BootArgs {.preservesDictionary.} = object
|
||||
webhook: WebhookArgs
|
||||
|
||||
func splitPath(s: string): seq[string] = s.strip(chars={'/'}).split('/')
|
||||
|
||||
proc toRecord(req: Request; seqnum: BiggestInt; path: seq[string]): HttpRequest =
|
||||
## Convert a request value from the std/asynchttpserver module
|
||||
## to a request type from syndicate/protocols/http.
|
||||
result.sequenceNumber = seqnum
|
||||
result.host = req.hostname
|
||||
result.`method` = Symbol($req.reqMethod)
|
||||
result.path = path
|
||||
for key, val in req.headers.pairs:
|
||||
result.headers[Symbol key] = val
|
||||
for key, val in decodeQuery(req.url.query):
|
||||
result.query[Symbol key] =
|
||||
@[QueryValue(orKind: QueryValueKind.string, string: val)]
|
||||
if req.body == "":
|
||||
result.body = RequestBody(orKind: RequestBodyKind.absent)
|
||||
else:
|
||||
result.body = RequestBody(
|
||||
orKind: RequestBodyKind.present,
|
||||
present: cast[seq[byte]](req.body))
|
||||
|
||||
proc bootWebhookActor*(turn: var Turn; root: Cap): Actor =
|
||||
spawn("webhooks", turn) do (turn: var Turn):
|
||||
during(turn, root, inject(!BootArgs, {1: grab(), 2: grab()})) do (host: string; port: Port):
|
||||
var seqNum: BiggestInt
|
||||
let facet = turn.facet
|
||||
let endpoints = newTable[seq[string], Bag[Cap]]()
|
||||
# use a bag so the same capability registered multiple
|
||||
# times with the same path does not get duplicate messages
|
||||
|
||||
proc cb(req: Request): Future[void] =
|
||||
inc(seqNum)
|
||||
let path = req.url.path.splitPath
|
||||
if not endpoints.hasKey path:
|
||||
result = respond(req, Http404,
|
||||
"no capabilities registered at $1\n" % [req.url.path])
|
||||
else:
|
||||
result = respond(req, Http200, "")
|
||||
run(facet) do (turn: var Turn):
|
||||
var rec = req.toRecord(seqNum, path)
|
||||
for cap in endpoints[rec.path]:
|
||||
message(turn, cap, rec)
|
||||
|
||||
let server = newAsyncHttpServer()
|
||||
if host.isIpAddress:
|
||||
var ip = parseIpAddress host
|
||||
case ip.family
|
||||
of IPv6:
|
||||
asyncCheck(turn, server.serve(port, cb, host, domain = AF_INET6))
|
||||
of IPv4:
|
||||
asyncCheck(turn, server.serve(port, cb, host, domain = AF_INET))
|
||||
else:
|
||||
asyncCheck(turn, server.serve(port, cb, host, domain = AF_INET6))
|
||||
asyncCheck(turn, server.serve(port, cb, host, domain = AF_INET))
|
||||
|
||||
during(turn, root, inject(!BootArgs, {0: grab(), 1: ?host, 2: ?port})) do (eps: Endpoints):
|
||||
for path, cap in eps:
|
||||
if not endpoints.hasKey path:
|
||||
endpoints[path] = Bag[Cap]()
|
||||
discard endpoints[path].change(cap, +1)
|
||||
do:
|
||||
for path, cap in eps:
|
||||
discard endpoints[path].change(cap, -1)
|
||||
|
||||
do:
|
||||
close(server)
|
||||
|
||||
# TODO: JSON payloads
|
|
@ -1,6 +1,6 @@
|
|||
# Package
|
||||
|
||||
version = "20231130"
|
||||
version = "20231214"
|
||||
author = "Emery Hemingway"
|
||||
description = "Utilites for Syndicated Actors and Synit"
|
||||
license = "unlicense"
|
||||
|
|
Loading…
Reference in New Issue