Initial port to typescript/es6/babel-free. Far from working or complete
This commit is contained in:
parent
bdc3dcc1d5
commit
58e190e419
205 changed files with 2365 additions and 14329 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -9,3 +9,5 @@ scratch/
|
|||
*.did
|
||||
.do_built
|
||||
.do_built.dir/
|
||||
|
||||
tsconfig.tsbuildinfo
|
||||
|
|
15
Makefile
Normal file
15
Makefile
Normal file
|
@ -0,0 +1,15 @@
|
|||
bootstrap: node_modules/lerna
|
||||
|
||||
node_modules/lerna:
|
||||
npm i .
|
||||
$(MAKE) clean
|
||||
+$(MAKE) -j$$(nproc) all
|
||||
|
||||
PACKAGE_JSONS=$(wildcard packages/*/package.json)
|
||||
PACKAGE_DIRS=$(PACKAGE_JSONS:/package.json=)
|
||||
|
||||
all clean veryclean:
|
||||
+for d in $(PACKAGE_DIRS); do make -C $$d $@ & done; wait
|
||||
|
||||
watch:
|
||||
inotifytest make -j$$(nproc) all
|
1
all.do
1
all.do
|
@ -1 +0,0 @@
|
|||
(for p in packages/*/; do echo $p/all; done | xargs redo-ifchange) && (echo Done. >&2)
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/sh
|
||||
[ -d node_modules/lerna ] || npm i .
|
||||
redo clean
|
||||
redo -j$(nproc) all
|
1
clean.do
1
clean.do
|
@ -1 +0,0 @@
|
|||
for p in packages/*/; do echo $p/clean; done | xargs redo
|
368
do
368
do
|
@ -1,368 +0,0 @@
|
|||
#!/bin/sh
|
||||
#
|
||||
# A minimal alternative to djb redo that doesn't support incremental builds.
|
||||
# For the full version, visit http://github.com/apenwarr/redo
|
||||
#
|
||||
# The author disclaims copyright to this source file and hereby places it in
|
||||
# the public domain. (2010 12 14; updated 2018 10 31)
|
||||
#
|
||||
USAGE="
|
||||
usage: $0 [-d] [-x] [-v] [-c] <targets...>
|
||||
-d print extra debug messages (mostly about dependency checks)
|
||||
-v run .do files with 'set -v'
|
||||
-x run .do files with 'set -x'
|
||||
-c clean up all old targets before starting
|
||||
|
||||
Note: $0 is an implementation of redo that does *not* check dependencies.
|
||||
It will never rebuild a target it has already built, unless you use -c.
|
||||
"
|
||||
|
||||
# By default, no output coloring.
|
||||
green=""
|
||||
bold=""
|
||||
plain=""
|
||||
|
||||
if [ -n "$TERM" -a "$TERM" != "dumb" ] && tty <&2 >/dev/null 2>&1; then
|
||||
green="$(printf '\033[32m')"
|
||||
bold="$(printf '\033[1m')"
|
||||
plain="$(printf '\033[m')"
|
||||
fi
|
||||
|
||||
# Split $1 into a dir part ($_dirsplit_dir) and base filename ($_dirsplit_base)
|
||||
_dirsplit()
|
||||
{
|
||||
_dirsplit_base=${1##*/}
|
||||
_dirsplit_dir=${1%$_dirsplit_base}
|
||||
}
|
||||
|
||||
# Like /usr/bin/dirname, but avoids a fork and uses _dirsplit semantics.
|
||||
dirname()
|
||||
(
|
||||
_dirsplit "$1"
|
||||
dir=${_dirsplit_dir%/}
|
||||
echo "${dir:-.}"
|
||||
)
|
||||
|
||||
_dirsplit "$0"
|
||||
export REDO=$(cd "${_dirsplit_dir:-.}" && echo "$PWD/$_dirsplit_base")
|
||||
_cmd=$_dirsplit_base
|
||||
|
||||
DO_TOP=
|
||||
if [ -z "$DO_BUILT" ]; then
|
||||
export _do_opt_debug=
|
||||
export _do_opt_exec=
|
||||
export _do_opt_verbose=
|
||||
export _do_opt_clean=
|
||||
fi
|
||||
while getopts 'dxvch?' _opt; do
|
||||
case $_opt in
|
||||
d) _do_opt_debug=1 ;;
|
||||
x) _do_opt_exec=x ;;
|
||||
v) _do_opt_verbose=v ;;
|
||||
c) _do_opt_clean=1 ;;
|
||||
\?|h|*) printf "%s" "$USAGE" >&2
|
||||
exit 99
|
||||
;;
|
||||
esac
|
||||
done
|
||||
shift "$((OPTIND - 1))"
|
||||
_debug() {
|
||||
[ -z "$_do_opt_debug" ] || echo "$@" >&2
|
||||
}
|
||||
|
||||
if [ -z "$DO_BUILT" -a "$_cmd" != "redo-whichdo" ]; then
|
||||
DO_TOP=1
|
||||
if [ "$#" -eq 0 ] && [ "$_cmd" = "do" -o "$_cmd" = "redo" ]; then
|
||||
set all # only toplevel redo has a default target
|
||||
fi
|
||||
export DO_BUILT=$PWD/.do_built
|
||||
: >>"$DO_BUILT"
|
||||
sort -u "$DO_BUILT" >"$DO_BUILT.new"
|
||||
echo "Cleaning up from previous run..." >&2
|
||||
while read f; do
|
||||
[ -n "$_do_opt_clean" ] && printf "%s\0%s.did\0" "$f" "$f"
|
||||
printf "%s.did.tmp\0" "$f"
|
||||
done <"$DO_BUILT.new" |
|
||||
xargs -0 rm -f 2>/dev/null
|
||||
mv "$DO_BUILT.new" "$DO_BUILT"
|
||||
DO_PATH=$DO_BUILT.dir
|
||||
export PATH=$DO_PATH:$PATH
|
||||
rm -rf "$DO_PATH"
|
||||
mkdir "$DO_PATH"
|
||||
for d in redo redo-ifchange redo-whichdo; do
|
||||
ln -s "$REDO" "$DO_PATH/$d"
|
||||
done
|
||||
[ -e /bin/true ] && TRUE=/bin/true || TRUE=/usr/bin/true
|
||||
for d in redo-ifcreate redo-stamp redo-always redo-ood \
|
||||
redo-targets redo-sources; do
|
||||
ln -s $TRUE "$DO_PATH/$d"
|
||||
done
|
||||
fi
|
||||
|
||||
|
||||
# Chop the "file" part off a /path/to/file pathname.
|
||||
# Note that if the filename already ends in a /, we just remove the slash.
|
||||
_updir()
|
||||
{
|
||||
local v="${1%/*}"
|
||||
[ "$v" != "$1" ] && echo "$v"
|
||||
# else "empty" which means we went past the root
|
||||
}
|
||||
|
||||
|
||||
# Returns true if $1 starts with $2.
|
||||
_startswith()
|
||||
{
|
||||
[ "${1#"$2"}" != "$1" ]
|
||||
}
|
||||
|
||||
|
||||
# Returns true if $1 ends with $2.
|
||||
_endswith()
|
||||
{
|
||||
[ "${1%"$2"}" != "$1" ]
|
||||
}
|
||||
|
||||
|
||||
# Prints $1 as a path relative to $PWD (not starting with /).
|
||||
# If it already doesn't start with a /, doesn't change the string.
|
||||
_relpath()
|
||||
{
|
||||
local here="$(command pwd)" there="$1" out= hadslash=
|
||||
#echo "RP start '$there' hs='$hadslash'" >&2
|
||||
_startswith "$there" "/" || { echo "$there" && return; }
|
||||
[ "$there" != "/" ] && _endswith "$there" "/" && hadslash=/
|
||||
here=${here%/}/
|
||||
while [ -n "$here" ]; do
|
||||
#echo "RP out='$out' here='$here' there='$there'" >&2
|
||||
[ "${here%/}" = "${there%/}" ] && there= && break;
|
||||
[ "${there#$here}" != "$there" ] && break
|
||||
out=../$out
|
||||
_dirsplit "${here%/}"
|
||||
here=$_dirsplit_dir
|
||||
done
|
||||
there=${there#$here}
|
||||
if [ -n "$there" ]; then
|
||||
echo "$out${there%/}$hadslash"
|
||||
else
|
||||
echo "${out%/}$hadslash"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# Prints a "normalized relative" path, with ".." resolved where possible.
|
||||
# For example, a/b/../c will be reduced to just a/c.
|
||||
_normpath()
|
||||
(
|
||||
local path="$1" out= isabs=
|
||||
#echo "NP start '$path'" >&2
|
||||
if _startswith "$path" "/"; then
|
||||
isabs=1
|
||||
else
|
||||
path="${PWD%/}/$path"
|
||||
fi
|
||||
set -f
|
||||
IFS=/
|
||||
for d in $path; do
|
||||
#echo "NP out='$out' d='$d'" >&2
|
||||
if [ "$d" = ".." ]; then
|
||||
out=$(_updir "${out%/}")/
|
||||
else
|
||||
out=$out$d/
|
||||
fi
|
||||
done
|
||||
#echo "NP out='$out' (done)" >&2
|
||||
out=${out%/}
|
||||
if [ -n "$isabs" ]; then
|
||||
echo "${out:-/}"
|
||||
else
|
||||
_relpath "${out:-/}"
|
||||
fi
|
||||
)
|
||||
|
||||
|
||||
# List the possible names for default*.do files in dir $1 matching the target
|
||||
# pattern in $2. We stop searching when we find the first one that exists.
|
||||
_find_dofiles_pwd()
|
||||
{
|
||||
local dodir="$1" dofile="$2"
|
||||
_startswith "$dofile" "default." || dofile=${dofile#*.}
|
||||
while :; do
|
||||
dofile=default.${dofile#default.*.}
|
||||
echo "$dodir$dofile"
|
||||
[ -e "$dodir$dofile" ] && return 0
|
||||
[ "$dofile" = default.do ] && break
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
|
||||
# List the possible names for default*.do files in $PWD matching the target
|
||||
# pattern in $1. We stop searching when we find the first name that works.
|
||||
# If there are no matches in $PWD, we'll search in .., and so on, to the root.
|
||||
_find_dofiles()
|
||||
{
|
||||
local target="$1" dodir= dofile= newdir=
|
||||
_debug "find_dofile: '$PWD' '$target'"
|
||||
dofile="$target.do"
|
||||
echo "$dofile"
|
||||
[ -e "$dofile" ] && return 0
|
||||
|
||||
# Try default.*.do files, walking up the tree
|
||||
_dirsplit "$dofile"
|
||||
dodir=$_dirsplit_dir
|
||||
dofile=$_dirsplit_base
|
||||
[ -n "$dodir" ] && dodir=${dodir%/}/
|
||||
[ -e "$dodir$dofile" ] && return 0
|
||||
for i in $(seq 100); do
|
||||
[ -n "$dodir" ] && dodir=${dodir%/}/
|
||||
#echo "_find_dofiles: '$dodir' '$dofile'" >&2
|
||||
_find_dofiles_pwd "$dodir" "$dofile" && return 0
|
||||
newdir=$(_normpath "${dodir}..")
|
||||
[ "$newdir" = "$dodir" ] && break
|
||||
dodir=$newdir
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
|
||||
# Print the last .do file returned by _find_dofiles.
|
||||
# If that file exists, returns 0, else 1.
|
||||
_find_dofile()
|
||||
{
|
||||
local files="$(_find_dofiles "$1")"
|
||||
rv=$?
|
||||
#echo "files='$files'" >&2
|
||||
[ "$rv" -ne 0 ] && return $rv
|
||||
echo "$files" | {
|
||||
while read -r linex; do line=$linex; done
|
||||
printf "%s\n" "$line"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# Actually run the given $dofile with the arguments in $@.
|
||||
# Note: you should always run this in a subshell.
|
||||
_run_dofile()
|
||||
{
|
||||
export DO_DEPTH="$DO_DEPTH "
|
||||
export REDO_TARGET="$PWD/$target"
|
||||
local line1
|
||||
set -e
|
||||
read line1 <"$PWD/$dofile" || true
|
||||
cmd=${line1#"#!/"}
|
||||
if [ "$cmd" != "$line1" ]; then
|
||||
set -$_do_opt_verbose$_do_opt_exec
|
||||
exec /$cmd "$PWD/$dofile" "$@" >"$tmp.tmp2"
|
||||
else
|
||||
set -$_do_opt_verbose$_do_opt_exec
|
||||
:; . "$PWD/$dofile" >"$tmp.tmp2"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# Find and run the right .do file, starting in dir $1, for target $2, using
|
||||
# filename $3 as the temporary output file. Renames the temp file to $2 when
|
||||
# done.
|
||||
_do()
|
||||
{
|
||||
local dir="$1" target="$2" tmp="$3" dopath= dodir= dofile= ext=
|
||||
if [ "$_cmd" = "redo" ] ||
|
||||
( [ ! -e "$target" -o -d "$target" ] &&
|
||||
[ ! -e "$target.did" ] ); then
|
||||
printf '%sdo %s%s%s%s\n' \
|
||||
"$green" "$DO_DEPTH" "$bold" "$dir$target" "$plain" >&2
|
||||
dopath=$(_find_dofile "$target")
|
||||
if [ ! -e "$dopath" ]; then
|
||||
echo "do: $target: no .do file ($PWD)" >&2
|
||||
return 1
|
||||
fi
|
||||
_dirsplit "$dopath"
|
||||
dodir=$_dirsplit_dir dofile=$_dirsplit_base
|
||||
if _startswith "$dofile" "default."; then
|
||||
ext=${dofile#default}
|
||||
ext=${ext%.do}
|
||||
else
|
||||
ext=
|
||||
fi
|
||||
target=$PWD/$target
|
||||
tmp=$PWD/$tmp
|
||||
cd "$dodir" || return 99
|
||||
target=$(_relpath "$target") || return 98
|
||||
tmp=$(_relpath "$tmp") || return 97
|
||||
base=${target%$ext}
|
||||
[ ! -e "$DO_BUILT" ] || [ ! -d "$(dirname "$target")" ] ||
|
||||
: >>"$target.did.tmp"
|
||||
( _run_dofile "$target" "$base" "$tmp.tmp" )
|
||||
rv=$?
|
||||
if [ $rv != 0 ]; then
|
||||
printf "do: %s%s\n" "$DO_DEPTH" \
|
||||
"$dir$target: got exit code $rv" >&2
|
||||
rm -f "$tmp.tmp" "$tmp.tmp2" "$target.did"
|
||||
return $rv
|
||||
fi
|
||||
echo "$PWD/$target" >>"$DO_BUILT"
|
||||
mv "$tmp.tmp" "$target" 2>/dev/null ||
|
||||
! test -s "$tmp.tmp2" ||
|
||||
mv "$tmp.tmp2" "$target" 2>/dev/null
|
||||
[ -e "$target.did.tmp" ] &&
|
||||
mv "$target.did.tmp" "$target.did" ||
|
||||
: >>"$target.did"
|
||||
rm -f "$tmp.tmp2"
|
||||
else
|
||||
_debug "do $DO_DEPTH$target exists." >&2
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# Make corrections for directories that don't actually exist yet.
|
||||
_dir_shovel()
|
||||
{
|
||||
local dir base
|
||||
xdir=$1 xbase=$2 xbasetmp=$2
|
||||
while [ ! -d "$xdir" -a -n "$xdir" ]; do
|
||||
_dirsplit "${xdir%/}"
|
||||
xbasetmp=${_dirsplit_base}__$xbasetmp
|
||||
xdir=$_dirsplit_dir xbase=$_dirsplit_base/$xbase
|
||||
done
|
||||
_debug "xbasetmp='$xbasetmp'" >&2
|
||||
}
|
||||
|
||||
|
||||
# Implementation of the "redo" command.
|
||||
_redo()
|
||||
{
|
||||
set +e
|
||||
for i in "$@"; do
|
||||
_dirsplit "$i"
|
||||
_dir_shovel "$_dirsplit_dir" "$_dirsplit_base"
|
||||
dir=$xdir base=$xbase basetmp=$xbasetmp
|
||||
( cd "$dir" && _do "$dir" "$base" "$basetmp" )
|
||||
[ "$?" = 0 ] || return 1
|
||||
done
|
||||
}
|
||||
|
||||
|
||||
# Implementation of the "redo-whichdo" command.
|
||||
_whichdo()
|
||||
{
|
||||
_find_dofiles "$1"
|
||||
}
|
||||
|
||||
|
||||
case $_cmd in
|
||||
do|redo|redo-ifchange) _redo "$@" ;;
|
||||
redo-whichdo) _whichdo "$1" ;;
|
||||
do.test) ;;
|
||||
*) printf "$0: '%s': unexpected redo command" "$_cmd" >&2; exit 99 ;;
|
||||
esac
|
||||
[ "$?" = 0 ] || exit 1
|
||||
|
||||
if [ -n "$DO_TOP" ]; then
|
||||
if [ -n "$_do_opt_clean" ]; then
|
||||
echo "Removing stamp files..." >&2
|
||||
[ ! -e "$DO_BUILT" ] ||
|
||||
while read f; do printf "%s.did\0" "$f"; done <"$DO_BUILT" |
|
||||
xargs -0 rm -f 2>/dev/null
|
||||
fi
|
||||
fi
|
11135
package-lock.json
generated
11135
package-lock.json
generated
File diff suppressed because it is too large
Load diff
35
package.json
35
package.json
|
@ -2,34 +2,21 @@
|
|||
"name": "@syndicate-lang/root",
|
||||
"private": true,
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.11.6",
|
||||
"@babel/plugin-syntax-jsx": "^7.10.4",
|
||||
"@babel/plugin-transform-react-jsx": "^7.10.4",
|
||||
"@babel/preset-env": "^7.11.5",
|
||||
"@rollup/plugin-commonjs": "^14.0.0",
|
||||
"@rollup/plugin-json": "^4.1.0",
|
||||
"@rollup/plugin-node-resolve": "^8.4.0",
|
||||
"@rollup/plugin-node-resolve": "^11.0.1",
|
||||
"@types/jest": "^26.0.19",
|
||||
"@types/node": "^14.14.20",
|
||||
"esm": "^3.2.25",
|
||||
"jest": "^26.6.3",
|
||||
"lerna": "^3.22.1",
|
||||
"mocha": "^7.2.0",
|
||||
"nyc": "^14.1.1",
|
||||
"rollup": "^2.23.0"
|
||||
"rollup": "^2.36.1",
|
||||
"rollup-plugin-terser": "^7.0.2",
|
||||
"ts-jest": "^26.4.4",
|
||||
"ts-node": "^9.1.1",
|
||||
"ts-node-dev": "^1.1.1",
|
||||
"typescript": "^4.1.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@syndicate-lang/core": "file:packages/core",
|
||||
"@syndicate-lang/create": "file:packages/create",
|
||||
"@syndicate-lang/driver-browser-ui": "file:packages/driver-browser-ui",
|
||||
"@syndicate-lang/driver-http-node": "file:packages/driver-http-node",
|
||||
"@syndicate-lang/driver-mdns": "file:packages/driver-mdns",
|
||||
"@syndicate-lang/driver-streams-node": "file:packages/driver-streams-node",
|
||||
"@syndicate-lang/driver-timer": "file:packages/driver-timer",
|
||||
"@syndicate-lang/driver-udp-node": "file:packages/driver-udp-node",
|
||||
"@syndicate-lang/driver-websocket": "file:packages/driver-websocket",
|
||||
"@syndicate-lang/flappy-bird-demo": "file:packages/flappy-bird-demo",
|
||||
"@syndicate-lang/server": "file:packages/server",
|
||||
"@syndicate-lang/socks": "file:packages/socks",
|
||||
"@syndicate-lang/syntax": "file:packages/syntax",
|
||||
"@syndicate-lang/syntax-playground": "file:packages/syntax-playground",
|
||||
"@syndicate-lang/syntax-server": "file:packages/syntax-server"
|
||||
"@syndicate-lang/core": "file:packages/core"
|
||||
}
|
||||
}
|
||||
|
|
8
packages/core/Makefile
Normal file
8
packages/core/Makefile
Normal file
|
@ -0,0 +1,8 @@
|
|||
all:
|
||||
npm run prepare
|
||||
|
||||
clean:
|
||||
rm -rf lib dist .nyc_output coverage tsconfig.tsbuildinfo
|
||||
|
||||
veryclean: clean
|
||||
rm -rf node_modules package-lock.json
|
16
packages/core/dist-link.js
Normal file
16
packages/core/dist-link.js
Normal file
|
@ -0,0 +1,16 @@
|
|||
import * as fs from 'fs';
|
||||
|
||||
const pkg = JSON.parse(fs.readFileSync('./package.json', 'utf-8'));
|
||||
|
||||
for (let f of fs.readdirSync('dist')) {
|
||||
const prefix = `syndicate-${pkg.version}`;
|
||||
if (f.startsWith(prefix)) {
|
||||
const linkname = `dist/syndicate${f.substring(prefix.length)}`;
|
||||
try {
|
||||
fs.unlinkSync(linkname);
|
||||
} catch (e) {
|
||||
if (e.code !== 'ENOENT') throw e;
|
||||
}
|
||||
fs.symlinkSync(f, linkname);
|
||||
}
|
||||
}
|
125
packages/core/examples/box-and-client.js
Normal file → Executable file
125
packages/core/examples/box-and-client.js
Normal file → Executable file
|
@ -1,7 +1,7 @@
|
|||
"use strict";
|
||||
#!/usr/bin/env -S node --es-module-specifier-resolution=node
|
||||
//---------------------------------------------------------------------------
|
||||
// @syndicate-lang/core, an implementation of Syndicate dataspaces for JS.
|
||||
// Copyright (C) 2016-2018 Tony Garnock-Jones <tonyg@leastfixedpoint.com>
|
||||
// Copyright (C) 2016-2021 Tony Garnock-Jones <tonyg@leastfixedpoint.com>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
|
@ -17,14 +17,9 @@
|
|||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
//---------------------------------------------------------------------------
|
||||
|
||||
const Immutable = require('immutable');
|
||||
const Syndicate = require('../src/index.js');
|
||||
const Skeleton = Syndicate.Skeleton;
|
||||
const Dataspace = Syndicate.Dataspace;
|
||||
const Ground = Syndicate.Ground;
|
||||
const Record = Syndicate.Record;
|
||||
const __ = Syndicate.Discard._instance;
|
||||
const _$ = Syndicate.Capture(__);
|
||||
import { Dataspace, Skeleton, Ground, Record, Discard, Capture, Observe } from '../lib/index';
|
||||
const __ = Discard._instance;
|
||||
const _$ = Capture(__);
|
||||
|
||||
const BoxState = Record.makeConstructor('BoxState', ['value']);
|
||||
const SetBox = Record.makeConstructor('SetBox', ['newValue']);
|
||||
|
@ -32,70 +27,58 @@ const SetBox = Record.makeConstructor('SetBox', ['newValue']);
|
|||
const N = 100000;
|
||||
|
||||
console.time('box-and-client-' + N.toString());
|
||||
let _savedGlobalFacet = Dataspace._currentFacet;
|
||||
Dataspace._currentFacet = new Syndicate._Dataspace.ActionCollector();
|
||||
|
||||
Dataspace.spawn('box', function () {
|
||||
Dataspace.declareField(this, 'value', 0);
|
||||
Dataspace.currentFacet().addEndpoint(() => {
|
||||
return [BoxState(this.value), null];
|
||||
});
|
||||
Dataspace.currentFacet().addDataflow(() => {
|
||||
if (this.value === N) {
|
||||
Dataspace.currentFacet().stop(() => {
|
||||
console.log('terminated box root facet');
|
||||
});
|
||||
}
|
||||
});
|
||||
Dataspace.currentFacet().addEndpoint(() => {
|
||||
let handler = Skeleton.analyzeAssertion(SetBox(_$));
|
||||
handler.callback = Dataspace.wrap((evt, vs) => {
|
||||
if (evt === Skeleton.EVENT_MESSAGE) {
|
||||
Dataspace.currentFacet().actor.scheduleScript(() => {
|
||||
this.value = vs.get(0);
|
||||
// console.log('box updated value', vs.get(0));
|
||||
new Ground(() => {
|
||||
Dataspace.spawn('box', function () {
|
||||
Dataspace.declareField(this, 'value', 0);
|
||||
Dataspace.currentFacet.addEndpoint(() => {
|
||||
return { assertion: BoxState(this.value), analysis: null };
|
||||
});
|
||||
}
|
||||
});
|
||||
return [Syndicate.Observe(SetBox(_$)), handler];
|
||||
});
|
||||
});
|
||||
|
||||
Dataspace.spawn('client', () => {
|
||||
Dataspace.currentFacet().addEndpoint(() => {
|
||||
let handler = Skeleton.analyzeAssertion(BoxState(_$));
|
||||
handler.callback = Dataspace.wrap((evt, vs) => {
|
||||
if (evt === Skeleton.EVENT_ADDED) {
|
||||
Dataspace.currentFacet().actor.scheduleScript(() => {
|
||||
// console.log('client sending SetBox', vs.get(0) + 1);
|
||||
Dataspace.send(SetBox(vs.get(0) + 1));
|
||||
Dataspace.currentFacet.addDataflow(() => {
|
||||
console.log('dataflow saw new value', this.value);
|
||||
if (this.value === N) {
|
||||
Dataspace.currentFacet.stop(() => {
|
||||
console.log('terminated box root facet');
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
return [Syndicate.Observe(BoxState(_$)), handler];
|
||||
});
|
||||
Dataspace.currentFacet().addEndpoint(() => {
|
||||
let handler = Skeleton.analyzeAssertion(BoxState(__));
|
||||
handler.callback = Dataspace.wrap((evt, vs) => {
|
||||
if (evt === Skeleton.EVENT_REMOVED) {
|
||||
Dataspace.currentFacet().actor.scheduleScript(() => {
|
||||
console.log('box gone');
|
||||
Dataspace.currentFacet.addEndpoint(() => {
|
||||
let analysis = Skeleton.analyzeAssertion(SetBox(_$));
|
||||
analysis.callback = Dataspace.wrap((evt, vs) => {
|
||||
if (evt === Skeleton.EventType.MESSAGE) {
|
||||
Dataspace.currentFacet.actor.scheduleScript(() => {
|
||||
this.value = vs[0];
|
||||
console.log('box updated value', vs[0]);
|
||||
});
|
||||
}
|
||||
});
|
||||
return { assertion: Observe(SetBox(_$)), analysis };
|
||||
});
|
||||
}
|
||||
});
|
||||
return [Syndicate.Observe(BoxState(__)), handler];
|
||||
});
|
||||
});
|
||||
|
||||
module.exports[Dataspace.BootSteps] = {
|
||||
module: module,
|
||||
steps: Dataspace._currentFacet.actions
|
||||
};
|
||||
Dataspace._currentFacet = _savedGlobalFacet;
|
||||
_savedGlobalFacet = null;
|
||||
|
||||
Ground.bootModule(module, (g) => {
|
||||
g.addStopHandler(() => {
|
||||
console.timeEnd('box-and-client-' + N.toString());
|
||||
});
|
||||
});
|
||||
Dataspace.spawn('client', function () {
|
||||
Dataspace.currentFacet.addEndpoint(() => {
|
||||
let analysis = Skeleton.analyzeAssertion(BoxState(_$));
|
||||
analysis.callback = Dataspace.wrap((evt, vs) => {
|
||||
if (evt === Skeleton.EventType.ADDED) {
|
||||
Dataspace.currentFacet.actor.scheduleScript(() => {
|
||||
console.log('client sending SetBox', vs[0] + 1);
|
||||
Dataspace.send(SetBox(vs[0] + 1));
|
||||
});
|
||||
}
|
||||
});
|
||||
return { assertion: Observe(BoxState(_$)), analysis };
|
||||
});
|
||||
Dataspace.currentFacet.addEndpoint(() => {
|
||||
let analysis = Skeleton.analyzeAssertion(BoxState(__));
|
||||
analysis.callback = Dataspace.wrap((evt, _vs) => {
|
||||
if (evt === Skeleton.EventType.REMOVED) {
|
||||
Dataspace.currentFacet.actor.scheduleScript(() => {
|
||||
console.log('box gone');
|
||||
});
|
||||
}
|
||||
});
|
||||
return { assertion: Observe(BoxState(__)), analysis };
|
||||
});
|
||||
});
|
||||
}).addStopHandler(() => console.timeEnd('box-and-client-' + N.toString())).start();
|
||||
|
|
6
packages/core/jest.config.ts
Normal file
6
packages/core/jest.config.ts
Normal file
|
@ -0,0 +1,6 @@
|
|||
import 'preserves';
|
||||
|
||||
export default {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
};
|
|
@ -9,14 +9,17 @@
|
|||
},
|
||||
"repository": "github:syndicate-lang/syndicate-js",
|
||||
"scripts": {
|
||||
"test": "mocha",
|
||||
"cover": "nyc --reporter=html mocha"
|
||||
"prepare": "npm run compile && npm run rollup",
|
||||
"compile": "../../node_modules/.bin/tsc --incremental",
|
||||
"rollup": "../../node_modules/.bin/rollup -c syndicate.dist.js && node ./dist-link.js",
|
||||
"test": "../../node_modules/.bin/jest",
|
||||
"cover": "../../node_modules/.bin/nyc --reporter=html ../../node_modules/.bin/jest"
|
||||
},
|
||||
"main": "src/index.js",
|
||||
"type": "module",
|
||||
"main": "lib/index.js",
|
||||
"types": "lib/index.d.ts",
|
||||
"author": "Tony Garnock-Jones <tonyg@leastfixedpoint.com>",
|
||||
"dependencies": {
|
||||
"debug": "^4.1.1",
|
||||
"immutable": "^4.0.0-rc.12",
|
||||
"preserves": "0.3.1"
|
||||
"preserves": "0.4.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,50 +0,0 @@
|
|||
"use strict";
|
||||
//---------------------------------------------------------------------------
|
||||
// @syndicate-lang/core, an implementation of Syndicate dataspaces for JS.
|
||||
// Copyright (C) 2016-2018 Tony Garnock-Jones <tonyg@leastfixedpoint.com>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
//---------------------------------------------------------------------------
|
||||
|
||||
if (require('preserves/src/singletonmodule.js')('syndicate-lang.org/syndicate-js',
|
||||
require('../package.json').version,
|
||||
'assertions.js',
|
||||
module)) return;
|
||||
|
||||
var { Record } = require('preserves');
|
||||
|
||||
function Seal(contents) {
|
||||
if (!(this instanceof Seal)) return new Seal(contents);
|
||||
this.contents = contents;
|
||||
}
|
||||
|
||||
Seal.prototype.toJSON = function () {
|
||||
// This definition is useless for actual transport, of course, but
|
||||
// useful for debugging, inasmuch as it seals off the contents from
|
||||
// the view of the JSON renderer, which has trouble with e.g. cyclic
|
||||
// data.
|
||||
return { '@seal': 0 };
|
||||
};
|
||||
|
||||
module.exports.Discard = Record.makeConstructor('discard', []);
|
||||
module.exports.Discard._instance = module.exports.Discard();
|
||||
|
||||
module.exports.Capture = Record.makeConstructor('capture', ['specification']);
|
||||
module.exports.Observe = Record.makeConstructor('observe', ['specification']);
|
||||
|
||||
module.exports.Inbound = Record.makeConstructor('inbound', ['assertion']);
|
||||
module.exports.Outbound = Record.makeConstructor('outbound', ['assertion']);
|
||||
module.exports.Instance = Record.makeConstructor('instance', ['uniqueId']);
|
||||
|
||||
module.exports.Seal = Seal;
|
|
@ -1,129 +0,0 @@
|
|||
"use strict";
|
||||
//---------------------------------------------------------------------------
|
||||
// @syndicate-lang/core, an implementation of Syndicate dataspaces for JS.
|
||||
// Copyright (C) 2016-2018 Tony Garnock-Jones <tonyg@leastfixedpoint.com>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
//---------------------------------------------------------------------------
|
||||
|
||||
// Bags and Deltas (which are Bags where item-counts can be negative).
|
||||
|
||||
if (require('preserves/src/singletonmodule.js')('syndicate-lang.org/syndicate-js',
|
||||
require('../package.json').version,
|
||||
'bag.js',
|
||||
module)) return;
|
||||
|
||||
const Immutable = require("immutable");
|
||||
const { fromJS } = require("preserves");
|
||||
|
||||
const PRESENT_TO_ABSENT = -1;
|
||||
const ABSENT_TO_ABSENT = 0;
|
||||
const ABSENT_TO_PRESENT = 1;
|
||||
const PRESENT_TO_PRESENT = 2;
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function MutableBag(s) {
|
||||
this._items = s ? fromSet(s) : Immutable.Map();
|
||||
}
|
||||
|
||||
MutableBag.prototype.change = function (key, delta, clamp) {
|
||||
var net;
|
||||
({bag: this._items, net: net} = change(this._items, key, delta, clamp));
|
||||
return net;
|
||||
};
|
||||
|
||||
MutableBag.prototype.get = function (key) {
|
||||
return get(this._items, key);
|
||||
};
|
||||
|
||||
MutableBag.prototype.clear = function () {
|
||||
this._items = Immutable.Map();
|
||||
};
|
||||
|
||||
MutableBag.prototype.includes = function (key) {
|
||||
return includes(this._items, key);
|
||||
};
|
||||
|
||||
MutableBag.prototype.isEmpty = function () {
|
||||
return this._items.isEmpty();
|
||||
};
|
||||
|
||||
MutableBag.prototype.count = function () {
|
||||
return this._items.count();
|
||||
};
|
||||
|
||||
MutableBag.prototype.keys = function () {
|
||||
return this._items.keys();
|
||||
};
|
||||
|
||||
MutableBag.prototype.entries = function () {
|
||||
return this._items.entries();
|
||||
};
|
||||
|
||||
MutableBag.prototype.snapshot = function () {
|
||||
return this._items;
|
||||
};
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
|
||||
const Bag = Immutable.Map;
|
||||
|
||||
function fromSet(s) {
|
||||
return Bag().withMutations(function (b) {
|
||||
for (let v of Immutable.Set(s)) {
|
||||
b = b.set(fromJS(v), 1);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function change(bag, key, delta, clamp) {
|
||||
let oldCount = get(bag, key);
|
||||
let newCount = oldCount + delta;
|
||||
if (clamp) {
|
||||
newCount = Math.max(0, newCount);
|
||||
}
|
||||
if (newCount === 0) {
|
||||
return {
|
||||
bag: bag.remove(key),
|
||||
net: (oldCount === 0) ? ABSENT_TO_ABSENT : PRESENT_TO_ABSENT
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
bag: bag.set(key, newCount),
|
||||
net: (oldCount === 0) ? ABSENT_TO_PRESENT : PRESENT_TO_PRESENT
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function get(bag, key) {
|
||||
return bag.get(key, 0);
|
||||
}
|
||||
|
||||
function includes(bag, key) {
|
||||
return get(bag, key) > 0;
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
|
||||
module.exports.PRESENT_TO_ABSENT = PRESENT_TO_ABSENT;
|
||||
module.exports.ABSENT_TO_ABSENT = ABSENT_TO_ABSENT;
|
||||
module.exports.ABSENT_TO_PRESENT = ABSENT_TO_PRESENT;
|
||||
module.exports.PRESENT_TO_PRESENT = PRESENT_TO_PRESENT;
|
||||
module.exports.MutableBag = MutableBag;
|
||||
module.exports.Bag = Bag;
|
||||
module.exports.fromSet = fromSet;
|
||||
module.exports.change = change;
|
||||
module.exports.get = get;
|
||||
module.exports.includes = includes;
|
|
@ -1,3 +0,0 @@
|
|||
module.exports = {
|
||||
randomBytes: void 0,
|
||||
};
|
|
@ -1,130 +0,0 @@
|
|||
"use strict";
|
||||
//---------------------------------------------------------------------------
|
||||
// @syndicate-lang/core, an implementation of Syndicate dataspaces for JS.
|
||||
// Copyright (C) 2016-2018 Tony Garnock-Jones <tonyg@leastfixedpoint.com>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
//---------------------------------------------------------------------------
|
||||
|
||||
// Property-based "dataflow"
|
||||
|
||||
if (require('preserves/src/singletonmodule.js')('syndicate-lang.org/syndicate-js',
|
||||
require('../package.json').version,
|
||||
'dataflow.js',
|
||||
module)) return;
|
||||
|
||||
var Immutable = require("immutable");
|
||||
var MapSet = require("./mapset.js");
|
||||
|
||||
function Graph() {
|
||||
this.edgesForward = Immutable.Map();
|
||||
this.edgesReverse = Immutable.Map();
|
||||
this.damagedNodes = Immutable.Set();
|
||||
this.currentSubjectId = null;
|
||||
}
|
||||
|
||||
Graph.prototype.withSubject = function (subjectId, f) {
|
||||
var oldSubjectId = this.currentSubjectId;
|
||||
this.currentSubjectId = subjectId;
|
||||
var result;
|
||||
try {
|
||||
result = f();
|
||||
} catch (e) {
|
||||
this.currentSubjectId = oldSubjectId;
|
||||
throw e;
|
||||
}
|
||||
this.currentSubjectId = oldSubjectId;
|
||||
return result;
|
||||
};
|
||||
|
||||
Graph.prototype.recordObservation = function (objectId) {
|
||||
if (this.currentSubjectId) {
|
||||
this.edgesForward = MapSet.add(this.edgesForward, objectId, this.currentSubjectId);
|
||||
this.edgesReverse = MapSet.add(this.edgesReverse, this.currentSubjectId, objectId);
|
||||
}
|
||||
};
|
||||
|
||||
Graph.prototype.recordDamage = function (objectId) {
|
||||
this.damagedNodes = this.damagedNodes.add(objectId);
|
||||
};
|
||||
|
||||
Graph.prototype.forgetSubject = function (subjectId) {
|
||||
var self = this;
|
||||
var subjectObjects = self.edgesReverse.get(subjectId) || Immutable.Set();
|
||||
self.edgesReverse = self.edgesReverse.remove(subjectId);
|
||||
subjectObjects.forEach(function (objectId) {
|
||||
self.edgesForward = MapSet.remove(self.edgesForward, objectId, subjectId);
|
||||
});
|
||||
};
|
||||
|
||||
Graph.prototype.repairDamage = function (repairNode) {
|
||||
var self = this;
|
||||
var repairedThisRound = Immutable.Set();
|
||||
while (true) {
|
||||
var workSet = self.damagedNodes;
|
||||
self.damagedNodes = Immutable.Set();
|
||||
|
||||
var alreadyDamaged = workSet.intersect(repairedThisRound);
|
||||
if (!alreadyDamaged.isEmpty()) {
|
||||
console.warn('Cyclic dependencies involving', alreadyDamaged);
|
||||
}
|
||||
|
||||
workSet = workSet.subtract(repairedThisRound);
|
||||
repairedThisRound = repairedThisRound.union(workSet);
|
||||
|
||||
if (workSet.isEmpty()) break;
|
||||
|
||||
workSet.forEach(function (objectId) {
|
||||
var subjects = self.edgesForward.get(objectId) || Immutable.Set();
|
||||
subjects.forEach(function (subjectId) {
|
||||
self.forgetSubject(subjectId);
|
||||
self.withSubject(subjectId, function () {
|
||||
repairNode(subjectId);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
Graph.prototype.defineObservableProperty = function (obj, prop, value, maybeOptions) {
|
||||
var graph = this;
|
||||
var options = maybeOptions === void 0 ? {} : maybeOptions;
|
||||
var objectId = options.objectId || '__' + prop;
|
||||
Object.defineProperty(obj, prop, {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
graph.recordObservation(objectId);
|
||||
return value;
|
||||
},
|
||||
set: function (newValue) {
|
||||
if (!options.noopGuard || !options.noopGuard(value, newValue)) {
|
||||
graph.recordDamage(objectId);
|
||||
value = newValue;
|
||||
}
|
||||
}
|
||||
});
|
||||
graph.recordDamage(objectId);
|
||||
return objectId;
|
||||
};
|
||||
|
||||
Graph.newScope = function (o) {
|
||||
function O() {}
|
||||
O.prototype = o;
|
||||
return new O();
|
||||
};
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
|
||||
module.exports.Graph = Graph;
|
|
@ -1,873 +0,0 @@
|
|||
"use strict";
|
||||
//---------------------------------------------------------------------------
|
||||
// @syndicate-lang/core, an implementation of Syndicate dataspaces for JS.
|
||||
// Copyright (C) 2016-2018 Tony Garnock-Jones <tonyg@leastfixedpoint.com>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
//---------------------------------------------------------------------------
|
||||
|
||||
if (require('preserves/src/singletonmodule.js')('syndicate-lang.org/syndicate-js',
|
||||
require('../package.json').version,
|
||||
'dataspace.js',
|
||||
module)) return;
|
||||
|
||||
const Immutable = require("immutable");
|
||||
const Preserves = require("preserves");
|
||||
// const debug = require("debug")("syndicate/core:dataspace");
|
||||
|
||||
const Skeleton = require('./skeleton.js');
|
||||
const $Special = require('./special.js');
|
||||
const Bag = require('./bag.js');
|
||||
const Assertions = require('./assertions.js');
|
||||
const Dataflow = require('./dataflow.js');
|
||||
|
||||
const PRIORITY = Object.freeze({
|
||||
QUERY_HIGH: 0,
|
||||
QUERY: 1,
|
||||
QUERY_HANDLER: 2,
|
||||
NORMAL: 3,
|
||||
GC: 4,
|
||||
IDLE: 5,
|
||||
_count: 6
|
||||
});
|
||||
|
||||
function Dataspace(bootProc) {
|
||||
this.nextId = 0;
|
||||
this.index = new Skeleton.Index();
|
||||
this.dataflow = new Dataflow.Graph();
|
||||
this.runnable = Immutable.List();
|
||||
this.pendingActions = Immutable.List([
|
||||
new ActionGroup(null, Immutable.List([new Spawn(null, bootProc, Immutable.Set())]))]);
|
||||
this.activatedModules = Immutable.Set();
|
||||
this.actors = Immutable.Map();
|
||||
}
|
||||
|
||||
// Parameters
|
||||
Dataspace._currentFacet = null;
|
||||
Dataspace._inScript = true;
|
||||
|
||||
Dataspace.BootSteps = Symbol.for('SyndicateBootSteps');
|
||||
|
||||
Dataspace.currentFacet = function () {
|
||||
return Dataspace._currentFacet;
|
||||
};
|
||||
|
||||
Dataspace.withNonScriptContext = function (thunk) {
|
||||
let savedInScript = Dataspace._inScript;
|
||||
Dataspace._inScript = false;
|
||||
try {
|
||||
return thunk();
|
||||
} finally {
|
||||
Dataspace._inScript = savedInScript;
|
||||
}
|
||||
};
|
||||
|
||||
Dataspace.withCurrentFacet = function (facet, thunk) {
|
||||
let savedFacet = Dataspace._currentFacet;
|
||||
Dataspace._currentFacet = facet;
|
||||
try {
|
||||
// console.group('Facet', facet && facet.toString());
|
||||
let result = thunk();
|
||||
Dataspace._currentFacet = savedFacet;
|
||||
return result;
|
||||
} catch (e) {
|
||||
let a = facet.actor;
|
||||
a.abandonQueuedWork();
|
||||
a._terminate(false);
|
||||
Dataspace._currentFacet = savedFacet;
|
||||
console.error('Actor ' + a.toString() + ' exited with exception:', e);
|
||||
} finally {
|
||||
// console.groupEnd();
|
||||
}
|
||||
};
|
||||
|
||||
Dataspace.wrap = function (f) {
|
||||
let savedFacet = Dataspace._currentFacet;
|
||||
return function () {
|
||||
let actuals = arguments;
|
||||
Dataspace.withCurrentFacet(savedFacet, function () {
|
||||
f.apply(savedFacet.fields, actuals);
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
Dataspace.wrapExternal = function (f) {
|
||||
let savedFacet = Dataspace._currentFacet;
|
||||
let ac = savedFacet.actor;
|
||||
return function () {
|
||||
if (savedFacet.isLive) {
|
||||
let actuals = arguments;
|
||||
ac.dataspace.start();
|
||||
ac.pushScript(function () {
|
||||
Dataspace.withCurrentFacet(savedFacet, function () {
|
||||
f.apply(this, actuals);
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
Dataspace.backgroundTask = function (k) {
|
||||
return Dataspace._currentFacet.actor.dataspace.ground().backgroundTask(k);
|
||||
};
|
||||
|
||||
Dataspace.referenceField = function (obj, prop) {
|
||||
if (!(prop in obj)) {
|
||||
Dataspace._currentFacet.actor.dataspace.dataflow.recordObservation(
|
||||
Immutable.List.of(obj, prop));
|
||||
}
|
||||
return obj[prop];
|
||||
};
|
||||
|
||||
Dataspace.declareField = function (obj, prop, init) {
|
||||
if (prop in obj) {
|
||||
obj[prop] = init;
|
||||
} else {
|
||||
Dataspace._currentFacet.actor.dataspace.dataflow.defineObservableProperty(
|
||||
obj,
|
||||
prop,
|
||||
init,
|
||||
{
|
||||
objectId: Immutable.List.of(obj, prop),
|
||||
noopGuard: Preserves.is
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
Dataspace.deleteField = function (obj, prop) {
|
||||
Dataspace._currentFacet.actor.dataspace.dataflow.recordDamage(Immutable.List.of(obj, prop));
|
||||
return delete obj[prop];
|
||||
};
|
||||
|
||||
Dataspace.prototype.runScripts = function () { // TODO: rename?
|
||||
this.runPendingScripts();
|
||||
this.performPendingActions();
|
||||
return !this.runnable.isEmpty() || !this.pendingActions.isEmpty();
|
||||
};
|
||||
|
||||
Dataspace.prototype.runPendingScripts = function () {
|
||||
let runnable = this.runnable;
|
||||
this.runnable = Immutable.List();
|
||||
runnable.forEach((ac) => { ac.runPendingScripts(); /* TODO: rename? */ });
|
||||
};
|
||||
|
||||
Dataspace.prototype.performPendingActions = function () {
|
||||
let groups = this.pendingActions;
|
||||
this.pendingActions = Immutable.List();
|
||||
groups.forEach((group) => {
|
||||
group.actions.forEach((action) => {
|
||||
// console.log('[DATASPACE]', group.actor && group.actor.toString(), action);
|
||||
action.perform(this, group.actor);
|
||||
this.runPendingScripts();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
Dataspace.prototype.commitActions = function (ac, pending) {
|
||||
this.pendingActions = this.pendingActions.push(new ActionGroup(ac, pending));
|
||||
};
|
||||
|
||||
Dataspace.prototype.refreshAssertions = function () {
|
||||
Dataspace.withNonScriptContext(() => {
|
||||
this.dataflow.repairDamage((subjectId) => {
|
||||
let [facet, eid] = subjectId;
|
||||
if (facet.isLive) { // TODO: necessary test, or tautological?
|
||||
let ac = facet.actor;
|
||||
Dataspace.withCurrentFacet(facet, () => {
|
||||
facet.endpoints.get(eid).refresh(this, ac, facet);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
Dataspace.prototype.addActor = function (name, bootProc, initialAssertions, parentActor) {
|
||||
let ac = new Actor(this, name, initialAssertions, parentActor && parentActor.id);
|
||||
// debug('Spawn', ac && ac.toString());
|
||||
this.applyPatch(ac, ac.adhocAssertions.snapshot());
|
||||
ac.addFacet(null, () => {
|
||||
// Root facet is a dummy "system" facet that exists to hold
|
||||
// one-or-more "user" "root" facets.
|
||||
ac.addFacet(Dataspace._currentFacet, bootProc);
|
||||
// ^ The "true root", user-visible facet.
|
||||
initialAssertions.forEach((a) => { ac.adhocRetract(a); });
|
||||
});
|
||||
};
|
||||
|
||||
Dataspace.prototype.applyPatch = function (ac, delta) {
|
||||
// if (!delta.isEmpty()) debug('applyPatch BEGIN', ac && ac.toString());
|
||||
let removals = [];
|
||||
delta.forEach((count, a) => {
|
||||
if (a !== void 0) {
|
||||
if (count > 0) {
|
||||
// debug('applyPatch +', a && a.toString());
|
||||
this.adjustIndex(a, count);
|
||||
} else {
|
||||
removals.push([count, a]);
|
||||
}
|
||||
if (ac) ac.cleanupChanges.change(a, -count);
|
||||
}
|
||||
});
|
||||
removals.forEach(([count, a]) => {
|
||||
// debug('applyPatch -', a && a.toString());
|
||||
this.adjustIndex(a, count);
|
||||
});
|
||||
// if (!delta.isEmpty()) debug('applyPatch END');
|
||||
};
|
||||
|
||||
Dataspace.prototype.sendMessage = function (m, sendingActor) {
|
||||
// debug('sendMessage', sendingActor && sendingActor.toString(), m.toString());
|
||||
this.index.sendMessage(m);
|
||||
// this.index.sendMessage(m, (leaf, _m) => {
|
||||
// sendingActor.touchedTopics = sendingActor.touchedTopics.add(leaf);
|
||||
// });
|
||||
};
|
||||
|
||||
Dataspace.prototype.adjustIndex = function (a, count) {
|
||||
return this.index.adjustAssertion(a, count);
|
||||
};
|
||||
|
||||
Dataspace.prototype.subscribe = function (handler) {
|
||||
this.index.addHandler(handler, handler.callback);
|
||||
};
|
||||
|
||||
Dataspace.prototype.unsubscribe = function (handler) {
|
||||
this.index.removeHandler(handler, handler.callback);
|
||||
};
|
||||
|
||||
Dataspace.prototype.endpointHook = function (facet, endpoint) {
|
||||
};
|
||||
|
||||
Dataspace.prototype._debugString = function (outerIndent) {
|
||||
const pieces = [];
|
||||
pieces.push(this.index.root._debugString(outerIndent));
|
||||
outerIndent = outerIndent || '\n';
|
||||
pieces.push(outerIndent + 'FACET TREE');
|
||||
this.actors.forEach((a) => {
|
||||
pieces.push(outerIndent + ' ' + a.toString());
|
||||
function walkFacet(indent, f) {
|
||||
pieces.push(indent + f.toString());
|
||||
f.endpoints.forEach((ep) => {
|
||||
pieces.push(indent + ' - ' + ep.id + ': ' + (ep.assertion && ep.assertion.toString()));
|
||||
});
|
||||
f.children.forEach((child) => { walkFacet(indent + ' ', child); });
|
||||
}
|
||||
a.rootFacet.children.forEach((child) => { walkFacet(outerIndent + ' ', child); });
|
||||
});
|
||||
pieces.push(outerIndent + 'ACTORS');
|
||||
this.actors.forEach((a) => pieces.push(outerIndent + ' ' + a.toString()));
|
||||
return pieces.join('');
|
||||
};
|
||||
|
||||
Dataspace.prototype._dotGraph = function () {
|
||||
let id = 0;
|
||||
const assertionIds = {};
|
||||
|
||||
const nodes = [];
|
||||
const edges = [];
|
||||
const pieces = [];
|
||||
|
||||
function emitNode(type, id, _label, attrs) {
|
||||
const label = _str(_label);
|
||||
pieces.push(`\n ${id} [label=${JSON.stringify(label)}];`);
|
||||
nodes.push(Object.assign({}, attrs || {}, {type, id, label}));
|
||||
}
|
||||
|
||||
function emitEdge(source, target, maybeDir) {
|
||||
pieces.push(`\n ${source} -- ${target} [dir=${maybeDir || 'none'}];`);
|
||||
edges.push({source, target, dir: maybeDir || 'none'});
|
||||
}
|
||||
|
||||
function _aId(aStr) {
|
||||
// if (aStr.startsWith('observe(Request(') || aStr.startsWith('Request(')) return null;
|
||||
// if (aStr.startsWith('observe(Connection(') || aStr.startsWith('Connection(')) return null;
|
||||
if (!(aStr in assertionIds)) assertionIds[aStr] = id++;
|
||||
return assertionIds[aStr];
|
||||
}
|
||||
|
||||
let topics = Immutable.Map();
|
||||
function topicForLeaf(leaf) {
|
||||
if (topics.has(leaf)) {
|
||||
return topics.get(leaf);
|
||||
} else {
|
||||
const topic = {id: id++, hasEmitter: false, senders: {}, inbound: {}, outbound: {}};
|
||||
topics = topics.set(leaf, topic);
|
||||
return topic;
|
||||
}
|
||||
}
|
||||
|
||||
function _str(a) {
|
||||
return '' + a;
|
||||
}
|
||||
|
||||
pieces.push('graph G {');
|
||||
pieces.push('\n overlap=false;');
|
||||
|
||||
this.actors.forEach((ac) => {
|
||||
const acId = ac.id;
|
||||
emitNode('actor', `ac_${acId}`, ac.toString());
|
||||
if (this.actors.has(ac.parentId)) {
|
||||
emitEdge(`ac_${ac.parentId}`, `ac_${acId}`, 'forward');
|
||||
}
|
||||
// ac.touchedTopics.forEach((leaf) => {
|
||||
// const topic = topicForLeaf(leaf);
|
||||
// topic.senders[acId] = true;
|
||||
// topic.hasEmitter = true;
|
||||
// });
|
||||
// ac.touchedTopics = Immutable.Set();
|
||||
function walkFacet(parent) {
|
||||
return (f) => {
|
||||
const facetId = id++;
|
||||
emitNode('facet', `facet_${facetId}`, `Facet ${f.id}`, {parent});
|
||||
emitEdge(parent, `facet_${facetId}`);
|
||||
f.endpoints.forEach((ep) => {
|
||||
if (ep.assertion !== void 0) {
|
||||
const aId = _aId(_str(ep.assertion));
|
||||
if (aId) {
|
||||
emitNode('endpoint', `ep_${ep.id}`, ep.id);
|
||||
emitEdge(`facet_${facetId}`, `ep_${ep.id}`);
|
||||
emitEdge(`ep_${ep.id}`, `assn_${aId}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
f.children.forEach(walkFacet(`facet_${facetId}`));
|
||||
};
|
||||
}
|
||||
ac.rootFacet.children.forEach(walkFacet(`ac_${acId}`));
|
||||
});
|
||||
|
||||
function walkNode(n) {
|
||||
n.edges.forEach((table) => table.forEach(walkNode));
|
||||
n.continuation.leafMap.forEach((cvMap) => cvMap.forEach((leaf) => {
|
||||
const topic = topicForLeaf(leaf);
|
||||
leaf.cachedAssertions.forEach((observed_assertion) => {
|
||||
const observed_assertion_id = _aId(_str(observed_assertion));
|
||||
if (observed_assertion_id) {
|
||||
topic.inbound[observed_assertion_id] = true;
|
||||
topic.hasEmitter = true;
|
||||
}
|
||||
});
|
||||
leaf.handlerMap.forEach((handler) => {
|
||||
handler.callbacks.forEach((cb) => {
|
||||
const observing_assertion_id = _aId(_str(cb.__endpoint.handler.assertion));
|
||||
if (observing_assertion_id) {
|
||||
topic.outbound[observing_assertion_id] = true;
|
||||
}
|
||||
});
|
||||
});
|
||||
}));
|
||||
}
|
||||
walkNode(this.index.root);
|
||||
|
||||
for (const a in assertionIds) {
|
||||
emitNode('assertion', `assn_${assertionIds[a]}`, a);
|
||||
}
|
||||
|
||||
topics.forEach((topic) => {
|
||||
if (topic.hasEmitter) {
|
||||
emitNode('topic', 'topic_' + topic.id, ''); // `Topic ${topic.id}`);
|
||||
for (const acId in topic.senders) {
|
||||
emitEdge(`ac_${acId}`, `topic_${topic.id}`, 'forward');
|
||||
}
|
||||
for (const aId in topic.inbound) {
|
||||
emitEdge(`assn_${aId}`, `topic_${topic.id}`, 'forward');
|
||||
}
|
||||
for (const aId in topic.outbound) {
|
||||
emitEdge(`topic_${topic.id}`, `assn_${aId}`, 'forward');
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
pieces.push('\n}');
|
||||
|
||||
// require('fs').writeFileSync('d.json', 'var dataspaceContents = ' + JSON.stringify({nodes, edges}, null, 2));
|
||||
|
||||
return pieces.join('');
|
||||
};
|
||||
|
||||
function Actor(dataspace, name, initialAssertions, parentActorId) {
|
||||
this.id = dataspace.nextId++;
|
||||
this.dataspace = dataspace;
|
||||
this.name = name;
|
||||
this.rootFacet = null;
|
||||
this.isRunnable = false;
|
||||
this.pendingScripts = [];
|
||||
for (let i = 0; i < PRIORITY._count; i++) { this.pendingScripts.push(Immutable.List()); }
|
||||
this.pendingActions = Immutable.List();
|
||||
this.adhocAssertions = new Bag.MutableBag(initialAssertions); // no negative counts allowed
|
||||
this.cleanupChanges = new Bag.MutableBag(); // negative counts allowed!
|
||||
this.parentId = parentActorId;
|
||||
// this.touchedTopics = Immutable.Set();
|
||||
dataspace.actors = dataspace.actors.set(this.id, this);
|
||||
}
|
||||
|
||||
Actor.prototype.runPendingScripts = function () {
|
||||
while (true) {
|
||||
let script = this.popNextScript();
|
||||
if (!script) break;
|
||||
script();
|
||||
this.dataspace.refreshAssertions();
|
||||
}
|
||||
|
||||
this.isRunnable = false;
|
||||
let pending = this.pendingActions;
|
||||
if (!pending.isEmpty()) {
|
||||
this.pendingActions = Immutable.List();
|
||||
this.dataspace.commitActions(this, pending);
|
||||
}
|
||||
};
|
||||
|
||||
Actor.prototype.popNextScript = function () {
|
||||
let scripts = this.pendingScripts;
|
||||
for (let i = 0; i < PRIORITY._count; i++) {
|
||||
let q = scripts[i];
|
||||
if (!q.isEmpty()) {
|
||||
scripts[i] = q.shift();
|
||||
return q.first();
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
Actor.prototype.abandonQueuedWork = function () {
|
||||
this.pendingActions = Immutable.List();
|
||||
for (let i = 0; i < PRIORITY._count; i++) { this.pendingScripts[i] = Immutable.List(); }
|
||||
};
|
||||
|
||||
Actor.prototype.scheduleScript = function (unwrappedThunk, priority) {
|
||||
this.pushScript(Dataspace.wrap(unwrappedThunk), priority);
|
||||
};
|
||||
|
||||
Actor.prototype.pushScript = function (wrappedThunk, priority) {
|
||||
// The wrappedThunk must already have code for ensuring
|
||||
// _currentFacet is correct inside it. Compare with scheduleScript.
|
||||
if (priority === void 0) {
|
||||
priority = PRIORITY.NORMAL;
|
||||
}
|
||||
if (!this.isRunnable) {
|
||||
this.isRunnable = true;
|
||||
this.dataspace.runnable = this.dataspace.runnable.push(this);
|
||||
}
|
||||
this.pendingScripts[priority] = this.pendingScripts[priority].push(wrappedThunk);
|
||||
};
|
||||
|
||||
Actor.prototype.addFacet = function (parentFacet, bootProc, checkInScript) {
|
||||
if (checkInScript === true && !Dataspace._inScript) {
|
||||
throw new Error("Cannot add facet outside script; are you missing a `react { ... }`?");
|
||||
}
|
||||