forked from syndicate-lang/preserves
Compare commits
1 Commits
main
...
js_typed_r
Author | SHA1 | Date |
---|---|---|
Tony Garnock-Jones | cff1a3d318 |
|
@ -1,4 +1,3 @@
|
|||
_site/
|
||||
preserves.pdf
|
||||
preserves-schema.pdf
|
||||
scratch/
|
||||
|
|
|
@ -10,5 +10,5 @@ pages:
|
|||
paths:
|
||||
- public
|
||||
only:
|
||||
- main
|
||||
- master
|
||||
|
||||
|
|
13
Makefile
13
Makefile
|
@ -1,15 +1,6 @@
|
|||
__ignored__ := $(shell ./setup.sh)
|
||||
|
||||
PDFS=preserves.pdf preserves-schema.pdf
|
||||
|
||||
all: $(PDFS)
|
||||
|
||||
clean:
|
||||
rm -f $(PDFS)
|
||||
|
||||
%.pdf: %.md preserves.css
|
||||
preserves.pdf: preserves.md preserves.css
|
||||
google-chrome --headless --disable-gpu --print-to-pdf=$@ \
|
||||
http://localhost:4000/preserves/$*.html
|
||||
http://localhost:4000/preserves/preserves.html
|
||||
|
||||
test-all:
|
||||
make -C tests
|
||||
|
|
2
NOTICE
2
NOTICE
|
@ -1,2 +1,2 @@
|
|||
Preserves: an Expressive Data Language
|
||||
Copyright 2018-2021 Tony Garnock-Jones
|
||||
Copyright 2018-2020 Tony Garnock-Jones
|
||||
|
|
56
README.md
56
README.md
|
@ -1,14 +1,15 @@
|
|||
---
|
||||
projectpages: "https://gitlab.com/preserves/preserves"
|
||||
projecttree: "https://gitlab.com/preserves/preserves/tree/main"
|
||||
projecttree: "https://gitlab.com/preserves/preserves/tree/master"
|
||||
title: "Preserves: an Expressive Data Language"
|
||||
no_site_title: true
|
||||
---
|
||||
|
||||
This [repository]({{page.projectpages}}) contains a
|
||||
[proposal](preserves.html) and various implementations of *Preserves*,
|
||||
a new data model and serialization format in many ways comparable to
|
||||
JSON, XML, S-expressions, CBOR, ASN.1 BER, and so on.
|
||||
[proposal](preserves.html) and
|
||||
[various implementations]({{page.projecttree}}/implementations/) of
|
||||
*Preserves*, a new data model and serialization format in many ways
|
||||
comparable to JSON, XML, S-expressions, CBOR, ASN.1 BER, and so on.
|
||||
|
||||
> **WARNING** Everything in this repository is experimental and in
|
||||
> flux! The design of Preserves is not finalised and may change
|
||||
|
@ -17,53 +18,10 @@ JSON, XML, S-expressions, CBOR, ASN.1 BER, and so on.
|
|||
|
||||
## Core documents
|
||||
|
||||
### Preserves data model and serialization formats
|
||||
|
||||
Preserves is defined in terms of a syntax-neutral
|
||||
[data model and semantics](preserves.html#starting-with-semantics)
|
||||
which all transfer syntaxes share. This allows trivial, completely
|
||||
automatic, perfect-fidelity conversion between syntaxes.
|
||||
|
||||
- [Preserves tutorial](TUTORIAL.html)
|
||||
- [Preserves specification](preserves.html), including semantics,
|
||||
data model, textual syntax, and compact binary syntax
|
||||
textual syntax, and compact binary syntax
|
||||
- [Canonical Form for Binary Syntax](canonical-binary.html)
|
||||
- [Syrup](https://github.com/ocapn/syrup#pseudo-specification), a
|
||||
hybrid binary/human-readable syntax for the Preserves data model
|
||||
|
||||
### Preserves schema and queries
|
||||
|
||||
- [Preserves Schema specification](preserves-schema.html)
|
||||
- [Preserves Path specification](preserves-path.html)
|
||||
|
||||
## Implementations
|
||||
|
||||
Implementations of the data model, plus the textual and/or binary transfer syntaxes:
|
||||
|
||||
- [Preserves for Nim](https://git.sr.ht/~ehmry/preserves-nim)
|
||||
- [Preserves for Python]({{page.projecttree}}/implementations/python/) ([`pip install preserves`](https://pypi.org/project/preserves/))
|
||||
- [Preserves for Racket]({{page.projecttree}}/implementations/racket/preserves/) ([`raco pkg install preserves`](https://pkgs.racket-lang.org/package/preserves))
|
||||
- [Preserves for Rust]({{page.projecttree}}/implementations/rust/) ([crates.io package](https://crates.io/crates/preserves))
|
||||
- [Preserves for Squeak Smalltalk](https://squeaksource.com/Preserves.html) (`Installer ss project: 'Preserves'; install: 'Preserves'`)
|
||||
- [Preserves for TypeScript and JavaScript]({{page.projecttree}}/implementations/javascript/) ([`yarn add @preserves/core`](https://www.npmjs.com/package/@preserves/core))
|
||||
|
||||
Implementations of the data model, plus Syrup transfer syntax:
|
||||
|
||||
- [Syrup for Racket](https://github.com/ocapn/syrup/blob/master/impls/racket/syrup/syrup.rkt)
|
||||
- [Syrup for Guile](https://github.com/ocapn/syrup/blob/master/impls/guile/syrup.scm)
|
||||
- [Syrup for Python](https://github.com/ocapn/syrup/blob/master/impls/python/syrup.py)
|
||||
- [Syrup for JavaScript](https://github.com/zarutian/agoric-sdk/blob/zarutian/captp_variant/packages/captp/lib/syrup.js)
|
||||
- [Syrup for Haskell](https://github.com/zenhack/haskell-preserves)
|
||||
|
||||
## Tools
|
||||
|
||||
### Preserves documents
|
||||
|
||||
- [preserves-tool](doc/preserves-tool.html), generic syntax translation and pretty-printing
|
||||
|
||||
### Preserves Schema documents and codegen
|
||||
|
||||
- [Tools for working with Preserves Schema](doc/schema-tools.html)
|
||||
|
||||
## Additional resources
|
||||
|
||||
|
@ -81,4 +39,4 @@ Tony Garnock-Jones <tonyg@leastfixedpoint.com>
|
|||
The contents of this repository are made available to you under the
|
||||
[Apache License, version 2.0](LICENSE)
|
||||
(<http://www.apache.org/licenses/LICENSE-2.0>), and are Copyright
|
||||
2018-2021 Tony Garnock-Jones.
|
||||
2018-2020 Tony Garnock-Jones.
|
||||
|
|
|
@ -38,9 +38,9 @@ representations of their keys.[^no-need-for-by-value]
|
|||
**Other kinds of `Value`.**
|
||||
There are no special canonicalization restrictions on
|
||||
`SignedInteger`s, `String`s, `ByteString`s, `Symbol`s, `Boolean`s,
|
||||
`Float`s, `Double`s, `Record`s, `Sequence`s, or `Embedded`s. The
|
||||
constraints given for these `Value`s in the [specification][spec]
|
||||
suffice to ensure canonicity.
|
||||
`Float`s, `Double`s, `Record`s, or `Sequence`s. The constraints given
|
||||
for these `Value`s in the [specification][spec] suffice to ensure
|
||||
canonicity.
|
||||
|
||||
<!-- Heading to visually offset the footnotes from the main document: -->
|
||||
## Notes
|
||||
|
|
|
@ -172,46 +172,8 @@ value can be represented as `<undefined>`.
|
|||
Dates, times, moments, and timestamps can be represented with a
|
||||
`Record` with label `rfc3339` having a single field, a `String`, which
|
||||
*MUST* conform to one of the `full-date`, `partial-time`, `full-time`,
|
||||
or `date-time` productions of [section 5.6 of RFC
|
||||
3339](https://tools.ietf.org/html/rfc3339#section-5.6). (In
|
||||
`date-time`, "T" and "Z" *MUST* be upper-case and "T" *MUST* be used;
|
||||
a space separating the `full-date` and `full-time` *MUST NOT* be
|
||||
used.)
|
||||
|
||||
## XML Infoset
|
||||
|
||||
[XML Infoset](https://www.w3.org/TR/2004/REC-xml-infoset-20040204/)
|
||||
describes the semantics of XML - that is, the underlying information
|
||||
contained in a document, independent of surface syntax.
|
||||
|
||||
A useful subset of XML Infoset, namely its Element Information Items
|
||||
(omitting processing instructions, entities, entity references,
|
||||
comments, namespaces, name prefixes, and base URIs), can be captured
|
||||
with the [schema](preserves-schema.html)
|
||||
|
||||
Node = Text / Element .
|
||||
Text = string .
|
||||
Element =
|
||||
/ @withAttributes
|
||||
<<rec> @localName symbol [@attributes Attributes @children Node ...]>
|
||||
/ @withoutAttributes
|
||||
<<rec> @localName symbol @children [Node ...]> .
|
||||
Attributes = { symbol: string ...:... } .
|
||||
|
||||
**Examples.**
|
||||
|
||||
<html
|
||||
<h1 {class: "title"} "Hello World!">
|
||||
<p
|
||||
"I could swear I've seen markup like this somewhere before. "
|
||||
"Perhaps it was "
|
||||
<a {href: "https://docs.racket-lang.org/search/index.html?q=xexpr%3F"} "here">
|
||||
"?"
|
||||
>
|
||||
<table
|
||||
<tr <th> <th "Column 1"> <th "Column 2">>
|
||||
<tr <th "Row 1"> <td 123> <td 234>>>
|
||||
>
|
||||
or `date-time` productions of
|
||||
[section 5.6 of RFC 3339](https://tools.ietf.org/html/rfc3339#section-5.6).
|
||||
|
||||
<!-- Heading to visually offset the footnotes from the main document: -->
|
||||
## Notes
|
||||
|
|
10
doc/demo.prs
10
doc/demo.prs
|
@ -1,10 +0,0 @@
|
|||
version 1 .
|
||||
JSON =
|
||||
/ @string string
|
||||
/ @integer int
|
||||
/ @double double
|
||||
/ @boolean JSONBoolean
|
||||
/ @null =null
|
||||
/ @array [JSON ...]
|
||||
/ @object { string: JSON ...:... } .
|
||||
JSONBoolean = =true / =false .
|
|
@ -1,46 +0,0 @@
|
|||
---
|
||||
title: preserves-schema-rkt
|
||||
---
|
||||
|
||||
The `preserves-schema-rkt` program reads
|
||||
[Preserves Schema](../preserves-schema.html) DSL input files. For each
|
||||
input file, it produces a Racket source file of the same name but
|
||||
with `.rkt` in place of `.prs`.
|
||||
|
||||
Instead of using this tool, you may prefer to use `#lang
|
||||
preserves-schema` to use Schema DSL syntax in an ordinary Racket
|
||||
module source file.
|
||||
|
||||
## Installation
|
||||
|
||||
Install Racket. Then, `raco pkg install preserves`.
|
||||
|
||||
## Usage
|
||||
|
||||
usage: preserves-schema-rkt [ <option> ... ] [<input-glob>] ...
|
||||
|
||||
<option> is one of
|
||||
|
||||
--output <directory>
|
||||
Output directory for modules (default: next to sources)
|
||||
--stdout
|
||||
Prints each module to stdout one after the other instead of writing them to files in the `--output` directory
|
||||
--no-write-files
|
||||
Disables generation of output to the filesystem
|
||||
--base <directory>
|
||||
Base directory for sources (default: common prefix)
|
||||
* --module <namespace=path>
|
||||
Additional Namespace=path import
|
||||
* --plugin-lib <lib-path>, -l <lib-path>
|
||||
Load compiler plugin library
|
||||
* --plugin-file <rkt-file-path>, -f <rkt-file-path>
|
||||
Load compiler plugin source file
|
||||
--help, -h
|
||||
Show this help
|
||||
--
|
||||
Do not treat any remaining argument as a switch (at this level)
|
||||
|
||||
* Asterisks indicate options allowed multiple times.
|
||||
|
||||
Multiple single-letter switches can be combined after
|
||||
one `-`. For example, `-h-` is the same as `-h --`.
|
|
@ -1,71 +0,0 @@
|
|||
---
|
||||
title: preserves-schema-rs
|
||||
---
|
||||
|
||||
The `preserves-schema-rs` program reads
|
||||
[Preserves Schema](../preserves-schema.html) AST input files (such as
|
||||
are produced by [`preserves-schemac`]({% link doc/preserves-schemac.md
|
||||
%})). It produces a collection of Rust source files providing parsers,
|
||||
unparsers, and Rust data structures reflecting the definitions in the
|
||||
inputs.
|
||||
|
||||
## Using the compiler from `build.rs` instead
|
||||
|
||||
You will usually not need to use the `preserves-schema-rs`
|
||||
command-line program. Instead, access the preserves-schema compiler
|
||||
API from your `build.rs`. The following example is taken from
|
||||
[`build.rs` for the `preserves-path` crate](https://gitlab.com/preserves/preserves/-/blob/18ac9168996026073ee16164fce108054b2a0ed7/implementations/rust/preserves-path/build.rs):
|
||||
|
||||
use preserves_schema::compiler::*;
|
||||
|
||||
use std::io::Error;
|
||||
use std::path::PathBuf;
|
||||
|
||||
fn main() -> Result<(), Error> {
|
||||
let buildroot = PathBuf::from(std::env::var_os("OUT_DIR").unwrap());
|
||||
|
||||
let mut gen_dir = buildroot.clone();
|
||||
gen_dir.push("src/schemas");
|
||||
|
||||
let mut c = CompilerConfig::new(gen_dir, "crate::schemas".to_owned());
|
||||
|
||||
let inputs = expand_inputs(&vec!["path.bin".to_owned()])?;
|
||||
c.load_schemas_and_bundles(&inputs)?;
|
||||
|
||||
compile(&c)
|
||||
}
|
||||
|
||||
This approach also requires an `include!` from your main, hand-written
|
||||
source tree. The following is a snippet from
|
||||
[`preserves-path/src/lib.rs`](https://gitlab.com/preserves/preserves/-/blob/18ac9168996026073ee16164fce108054b2a0ed7/implementations/rust/preserves-path/src/lib.rs):
|
||||
|
||||
pub mod schemas {
|
||||
include!(concat!(env!("OUT_DIR"), "/src/schemas/mod.rs"));
|
||||
}
|
||||
|
||||
## Installation
|
||||
|
||||
The tool is
|
||||
[written in Rust](https://crates.io/crates/preserves-schema).
|
||||
[Install `cargo`.](https://doc.rust-lang.org/cargo/getting-started/installation.html)
|
||||
Then, `cargo install preserves-schema`.
|
||||
|
||||
## Usage
|
||||
|
||||
preserves-schema 1.0.0
|
||||
|
||||
USAGE:
|
||||
preserves-schema-rs [OPTIONS] --output-dir <output-dir> --prefix <prefix> [--] [input-glob]...
|
||||
|
||||
FLAGS:
|
||||
-h, --help Prints help information
|
||||
-V, --version Prints version information
|
||||
|
||||
OPTIONS:
|
||||
--module <module>...
|
||||
-o, --output-dir <output-dir>
|
||||
-p, --prefix <prefix>
|
||||
--support-crate <support-crate>
|
||||
|
||||
ARGS:
|
||||
<input-glob>...
|
|
@ -1,32 +0,0 @@
|
|||
---
|
||||
title: preserves-schema-ts
|
||||
---
|
||||
|
||||
The `preserves-schema-ts` program reads
|
||||
[Preserves Schema](../preserves-schema.html) DSL input files. For each
|
||||
input file, it produces a TypeScript source file of the same name but
|
||||
with `.ts` in place of `.prs`.
|
||||
|
||||
## Installation
|
||||
|
||||
Install node.js v12 or newer. Then, `yarn global add @preserves/schema`.
|
||||
|
||||
## Usage
|
||||
|
||||
Usage: preserves-schema-ts [options] [input...]
|
||||
|
||||
Compile Preserves schema definitions to TypeScript
|
||||
|
||||
Arguments:
|
||||
input Input filename or glob
|
||||
|
||||
Options:
|
||||
--output <directory> Output directory for modules (default: next to sources)
|
||||
--stdout Prints each module to stdout one after the other instead of writing them to files in the `--output`
|
||||
directory
|
||||
--base <directory> Base directory for sources (default: common prefix)
|
||||
--core <path> Import path for @preserves/core (default: "@preserves/core")
|
||||
--watch Watch base directory for changes
|
||||
--traceback Include stack traces in compiler errors
|
||||
--module <namespace=path> Additional Namespace=path import (default: [])
|
||||
-h, --help display help for command
|
|
@ -1,136 +0,0 @@
|
|||
---
|
||||
title: preserves-schemac
|
||||
---
|
||||
|
||||
The `preserves-schemac` program reads
|
||||
[Preserves Schema](../preserves-schema.html) DSL input files and
|
||||
outputs a binary-syntax Preserves document conforming to the
|
||||
[metaschema](https://gitlab.com/preserves/preserves/-/blob/main/schema/schema.prs).
|
||||
|
||||
It can either output single `Schema` records (corresponding to a
|
||||
single input file), or a `Bundle` of `Schema`s (corresponding to a
|
||||
directory tree of files).
|
||||
|
||||
## Installation
|
||||
|
||||
Install node.js v12 or newer. Then, `yarn global add @preserves/schema`.
|
||||
|
||||
## Usage
|
||||
|
||||
Usage: preserves-schemac [options] [input...]
|
||||
|
||||
Compile textual Preserves schema definitions to binary format
|
||||
|
||||
Arguments:
|
||||
input Input filename or glob
|
||||
|
||||
Options:
|
||||
--no-bundle Emit a single Schema instead of a schema Bundle
|
||||
--base <directory> Base directory for sources (default: common prefix)
|
||||
-h, --help display help for command
|
||||
|
||||
## Examples
|
||||
|
||||
### Single file (non-bundle)
|
||||
|
||||
Given a file [`demo.prs`](demo.prs) containing:
|
||||
|
||||
version 1 .
|
||||
JSON =
|
||||
/ @string string
|
||||
/ @integer int
|
||||
/ @double double
|
||||
/ @boolean JSONBoolean
|
||||
/ @null =null
|
||||
/ @array [JSON ...]
|
||||
/ @object { string: JSON ...:... } .
|
||||
JSONBoolean = =true / =false .
|
||||
|
||||
running the following:
|
||||
|
||||
preserves-schemac --no-bundle demo.prs
|
||||
|
||||
will produce the following binary file on `stdout`:
|
||||
|
||||
00000000: b4b3 0673 6368 656d 61b7 b307 7665 7273 ...schema...vers
|
||||
00000010: 696f 6e91 b30b 6465 6669 6e69 7469 6f6e ion...definition
|
||||
00000020: 73b7 b304 4a53 4f4e b4b3 026f 72b5 b5b1 s...JSON...or...
|
||||
00000030: 0673 7472 696e 67b4 b304 6174 6f6d b306 .string...atom..
|
||||
00000040: 5374 7269 6e67 8484 b5b1 0769 6e74 6567 String.....integ
|
||||
00000050: 6572 b4b3 0461 746f 6db3 0d53 6967 6e65 er...atom..Signe
|
||||
00000060: 6449 6e74 6567 6572 8484 b5b1 0664 6f75 dInteger.....dou
|
||||
00000070: 626c 65b4 b304 6174 6f6d b306 446f 7562 ble...atom..Doub
|
||||
00000080: 6c65 8484 b5b1 0762 6f6f 6c65 616e b4b3 le.....boolean..
|
||||
00000090: 0372 6566 b584 b30b 4a53 4f4e 426f 6f6c .ref....JSONBool
|
||||
000000a0: 6561 6e84 84b5 b104 6e75 6c6c b4b3 036c ean.....null...l
|
||||
000000b0: 6974 b304 6e75 6c6c 8484 b5b1 0561 7272 it..null.....arr
|
||||
000000c0: 6179 b4b3 0573 6571 6f66 b4b3 0372 6566 ay...seqof...ref
|
||||
000000d0: b584 b304 4a53 4f4e 8484 84b5 b106 6f62 ....JSON......ob
|
||||
000000e0: 6a65 6374 b4b3 0664 6963 746f 66b4 b304 ject...dictof...
|
||||
000000f0: 6174 6f6d b306 5374 7269 6e67 84b4 b303 atom..String....
|
||||
00000100: 7265 66b5 84b3 044a 534f 4e84 8484 8484 ref....JSON.....
|
||||
00000110: b30b 4a53 4f4e 426f 6f6c 6561 6eb4 b302 ..JSONBoolean...
|
||||
00000120: 6f72 b5b5 b104 7472 7565 b4b3 036c 6974 or....true...lit
|
||||
00000130: b304 7472 7565 8484 b5b1 0566 616c 7365 ..true.....false
|
||||
00000140: b4b3 036c 6974 b305 6661 6c73 6584 8484 ...lit..false...
|
||||
00000150: 8484 b30c 656d 6265 6464 6564 5479 7065 ....embeddedType
|
||||
00000160: 8084 84 ...
|
||||
|
||||
Piping the output to [`preserves-tool`](./preserves-tool.html) to
|
||||
pretty-print it produces:
|
||||
|
||||
<schema {
|
||||
version: 1,
|
||||
embeddedType: #f,
|
||||
definitions: {
|
||||
JSONBoolean: <or [
|
||||
[
|
||||
"true",
|
||||
<lit true>
|
||||
],
|
||||
[
|
||||
"false",
|
||||
<lit false>
|
||||
]
|
||||
]>,
|
||||
JSON: <or [
|
||||
[
|
||||
"string",
|
||||
<atom String>
|
||||
],
|
||||
[
|
||||
"integer",
|
||||
<atom SignedInteger>
|
||||
],
|
||||
[
|
||||
"double",
|
||||
<atom Double>
|
||||
],
|
||||
[
|
||||
"boolean",
|
||||
<ref [] JSONBoolean>
|
||||
],
|
||||
[
|
||||
"null",
|
||||
<lit null>
|
||||
],
|
||||
[
|
||||
"array",
|
||||
<seqof <ref [] JSON>>
|
||||
],
|
||||
[
|
||||
"object",
|
||||
<dictof <atom String> <ref [] JSON>>
|
||||
]
|
||||
]>
|
||||
}
|
||||
}>
|
||||
|
||||
### Multiple file (bundle)
|
||||
|
||||
Given a directory tree containing multiple `*.prs` files, running
|
||||
|
||||
preserves-schemac '**.prs'
|
||||
|
||||
will produce a binary `Bundle` on `stdout` containing one `Schema` for
|
||||
each input file in the tree.
|
|
@ -1,189 +0,0 @@
|
|||
---
|
||||
title: preserves-tool
|
||||
---
|
||||
|
||||
The `preserves-tool` program is a swiss army knife for working with
|
||||
Preserves documents.
|
||||
|
||||
preserves-tools 1.0.0
|
||||
|
||||
USAGE:
|
||||
preserves-tool <SUBCOMMAND>
|
||||
|
||||
FLAGS:
|
||||
-h, --help Print help information
|
||||
-V, --version Print version information
|
||||
|
||||
SUBCOMMANDS:
|
||||
completions
|
||||
convert
|
||||
help Print this message or the help of the given subcommand(s)
|
||||
quote
|
||||
|
||||
## Installation
|
||||
|
||||
The tool is
|
||||
[written in Rust](https://crates.io/crates/preserves-tools).
|
||||
[Install `cargo`.](https://doc.rust-lang.org/cargo/getting-started/installation.html)
|
||||
Then, `cargo install preserves-tools`.
|
||||
|
||||
## Subcommands
|
||||
|
||||
The tool includes three subcommands.
|
||||
|
||||
### `preserves-tool convert`
|
||||
|
||||
This is the main tool. It can
|
||||
|
||||
- translate between the various Preserves text and binary document
|
||||
syntaxes;
|
||||
- strip annotations;
|
||||
- pretty-print; and
|
||||
- break down and filter documents using [preserves path]({{
|
||||
site.baseurl }}{% link preserves-path.md %}) selectors.
|
||||
|
||||
#### Usage
|
||||
|
||||
preserves-tool-convert
|
||||
|
||||
USAGE:
|
||||
preserves-tool convert [FLAGS] [OPTIONS]
|
||||
|
||||
FLAGS:
|
||||
--collect
|
||||
--escape-spaces
|
||||
-h, --help Print help information
|
||||
-V, --version Print version information
|
||||
|
||||
OPTIONS:
|
||||
-i, --input-format <INPUT_FORMAT>
|
||||
[default: auto-detect] [possible values: auto-detect, text, binary]
|
||||
|
||||
--indent <on/off>
|
||||
[default: on] [possible values: disabled, no, n, off, 0, false,
|
||||
enabled, yes, y, on, 1, true]
|
||||
|
||||
--limit <LIMIT>
|
||||
|
||||
|
||||
-o, --output-format <OUTPUT_FORMAT>
|
||||
[default: text] [possible values: text, binary, unquoted]
|
||||
|
||||
--read-annotations <on/off>
|
||||
[default: on] [possible values: disabled, no, n, off, 0, false,
|
||||
enabled, yes, y, on, 1, true]
|
||||
|
||||
--select <SELECT>
|
||||
[default: *]
|
||||
|
||||
--select-output <SELECT_OUTPUT>
|
||||
[default: sequence] [possible values: sequence, set]
|
||||
|
||||
--write-annotations <on/off>
|
||||
[default: on] [possible values: disabled, no, n, off, 0, false,
|
||||
enabled, yes, y, on, 1, true]
|
||||
|
||||
### `preserves-tool quote`
|
||||
|
||||
This subcommand reads chunks from standard input and outputs each one
|
||||
as a Preserves `String`, `Symbol`, or `ByteString` using either the
|
||||
text or binary Preserves surface syntax.
|
||||
|
||||
This is useful when writing shell scripts that interact with other
|
||||
programs using Preserves as an interchange format.
|
||||
|
||||
It defaults to taking the entirety of standard input as a single large
|
||||
chunk, but it can also work with newline- or `nul`-delimited chunks.
|
||||
|
||||
#### Usage
|
||||
|
||||
```
|
||||
preserves-tool-quote
|
||||
|
||||
USAGE:
|
||||
preserves-tool quote [OPTIONS] <SUBCOMMAND>
|
||||
|
||||
FLAGS:
|
||||
-h, --help Print help information
|
||||
-V, --version Print version information
|
||||
|
||||
OPTIONS:
|
||||
-o, --output-format <OUTPUT_FORMAT> [default: text] [possible values: text, binary, unquoted]
|
||||
|
||||
SUBCOMMANDS:
|
||||
byte-string
|
||||
help Print this message or the help of the given subcommand(s)
|
||||
string
|
||||
symbol
|
||||
```
|
||||
|
||||
```
|
||||
preserves-tool-quote-string
|
||||
|
||||
USAGE:
|
||||
preserves-tool quote string [FLAGS] [OPTIONS]
|
||||
|
||||
FLAGS:
|
||||
--escape-spaces
|
||||
-h, --help Print help information
|
||||
--include-terminator
|
||||
-V, --version Print version information
|
||||
|
||||
OPTIONS:
|
||||
--input-terminator <INPUT_TERMINATOR> [default: eof] [possible values: eof, newline, nul]
|
||||
```
|
||||
|
||||
```
|
||||
preserves-tool-quote-symbol
|
||||
|
||||
USAGE:
|
||||
preserves-tool quote symbol [FLAGS] [OPTIONS]
|
||||
|
||||
FLAGS:
|
||||
--escape-spaces
|
||||
-h, --help Print help information
|
||||
--include-terminator
|
||||
-V, --version Print version information
|
||||
|
||||
OPTIONS:
|
||||
--input-terminator <INPUT_TERMINATOR> [default: eof] [possible values: eof, newline, nul]
|
||||
```
|
||||
|
||||
```
|
||||
preserves-tool-quote-byte-string
|
||||
|
||||
USAGE:
|
||||
preserves-tool quote byte-string
|
||||
|
||||
FLAGS:
|
||||
-h, --help Print help information
|
||||
-V, --version Print version information
|
||||
```
|
||||
|
||||
### `preserves-tool completions`
|
||||
|
||||
This subcommand outputs Bash completion code to stdout, for sourcing
|
||||
at shell startup time.
|
||||
|
||||
#### Usage
|
||||
|
||||
Add the following to your `.profile` or similar:
|
||||
|
||||
eval "$(preserves-tool completions bash 2>/dev/null)"
|
||||
|
||||
Multiple shell dialects are supported (courtesy of
|
||||
[`clap`](https://crates.io/crates/clap)):
|
||||
|
||||
```
|
||||
preserves-tool-completions
|
||||
|
||||
USAGE:
|
||||
preserves-tool completions <dialect>
|
||||
|
||||
ARGS:
|
||||
<dialect> [possible values: bash, zsh, power-shell, fish, elvish]
|
||||
|
||||
FLAGS:
|
||||
-h, --help Print help information
|
||||
-V, --version Print version information
|
||||
```
|
|
@ -1,11 +0,0 @@
|
|||
---
|
||||
title: Tools for working with Preserves Schema
|
||||
---
|
||||
|
||||
A number of tools for working with [Preserves Schema]({{ site.baseurl
|
||||
}}{% link preserves-schema.md %}) exist:
|
||||
|
||||
- [preserves-schemac](preserves-schemac.html), generic Schema reader and linter
|
||||
- [preserves-schema-rkt](preserves-schema-rkt.html), Racket code generator
|
||||
- [preserves-schema-rs](preserves-schema-rs.html), Rust code generator
|
||||
- [preserves-schema-ts](preserves-schema-ts.html), TypeScript code generator
|
|
@ -1,22 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
exec 1>&2
|
||||
|
||||
# Ensure that various copies of schema.prs are in fact identical.
|
||||
cmp schema/schema.prs implementations/racket/preserves/preserves-schema/schema.prs
|
||||
|
||||
# Likewise for samples.pr
|
||||
cmp tests/samples.pr implementations/racket/preserves/preserves/tests/samples.pr
|
||||
|
||||
# https://gitlab.com/preserves/preserves/-/issues/30
|
||||
#
|
||||
# So it turns out that Racket's git-checkout mechanism pays attention
|
||||
# to portions of the tree outside the package of interest, which is
|
||||
# totally fair enough!
|
||||
#
|
||||
# But it means we can't use updir-containing symlinks anywhere in the
|
||||
# repository if we want to have a Racket-installable package as well.
|
||||
#
|
||||
# So I've cloned path.bin, too.
|
||||
cmp path/path.bin implementations/rust/preserves-path/path.bin
|
|
@ -1,21 +0,0 @@
|
|||
# Preserves Implementations
|
||||
|
||||
Here you may find:
|
||||
|
||||
- [dhall](dhall/), functions for converting Dhall values to a corresponding
|
||||
subset of Preserves.
|
||||
|
||||
- [javascript](javascript/), an implementation in TypeScript,
|
||||
compiling to JavaScript, for node.js and the Browser.
|
||||
|
||||
- [python](python/), an implementation for Python 2.x and 3.x.
|
||||
|
||||
- [racket](racket/), an implementation for Racket 7.x and newer
|
||||
(though older Rackets may also work with it).
|
||||
|
||||
- [rust](rust/), an implementation for Rust that interoperates with
|
||||
serde.
|
||||
|
||||
Other implementations are also available:
|
||||
|
||||
- [Preserves for Squeak Smalltalk](https://squeaksource.com/Preserves.html)
|
|
@ -1,3 +0,0 @@
|
|||
env:DHALL_PRELUDE
|
||||
? https://prelude.dhall-lang.org/v20.2.0/package.dhall
|
||||
sha256:a6036bc38d883450598d1de7c98ead113196fe2db02e9733855668b18096f07b
|
|
@ -1,48 +0,0 @@
|
|||
# Dhall
|
||||
|
||||
Not a true implementation of Preserves, but functions for translating Dhall
|
||||
values to Preserves and rendering them.
|
||||
|
||||
For example, to generate configuration for a Syndicate server listener:
|
||||
```dhall
|
||||
let Prelude = ./Prelude.dhall
|
||||
|
||||
let Preserves = ./package.dhall
|
||||
|
||||
let Tcp/Type = { address : Text, port : Natural }
|
||||
|
||||
let RelayListener/Type = { transport : Tcp/Type }
|
||||
|
||||
let RequireService/Type = { relayListener : RelayListener/Type }
|
||||
|
||||
let Tcp/toPreserves =
|
||||
λ(tcp : Tcp/Type) →
|
||||
Preserves.record
|
||||
(Preserves.symbol "tcp")
|
||||
[ Preserves.string tcp.address
|
||||
, Preserves.integer (Prelude.Natural.toInteger tcp.port)
|
||||
]
|
||||
|
||||
let RelayListener/toPreserves =
|
||||
λ(relayListener : RelayListener/Type) →
|
||||
Preserves.record
|
||||
(Preserves.symbol "relay-listener")
|
||||
[ Tcp.toPreserves relayListener.transport ]
|
||||
|
||||
let RequireService/toPreserves =
|
||||
λ(requireService : RequireService/Type) →
|
||||
Preserves.record
|
||||
(Preserves.symbol "require-service")
|
||||
[ RelayListener.toPreserves requireService.relayListener ]
|
||||
|
||||
let example = { relayListener.transport = { address = "127.0.0.1", port = 1 } }
|
||||
|
||||
let rendering = Preserves.render (RequireService.toPreserves example)
|
||||
|
||||
let check =
|
||||
assert
|
||||
: rendering ≡ "<require-service <relay-listener <tcp \"127.0.0.1\" 1>>>"
|
||||
|
||||
in rendering
|
||||
|
||||
```
|
|
@ -1,10 +0,0 @@
|
|||
{-|
|
||||
Dhall encoding of an arbitrary Preserves value
|
||||
-}
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let Preserves/Type
|
||||
: Type
|
||||
= ∀(Preserves : Type) → ∀(value : Preserves/function Preserves) → Preserves
|
||||
|
||||
in Preserves/Type
|
|
@ -1,15 +0,0 @@
|
|||
{-|
|
||||
Create a Preserves boolean map from a `Bool` value
|
||||
-}
|
||||
let Preserves/Type = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let bool
|
||||
: Bool → Preserves/Type
|
||||
= λ(x : Bool) →
|
||||
λ(Preserves : Type) →
|
||||
λ(value : Preserves/function Preserves) →
|
||||
value.boolean x
|
||||
|
||||
in bool
|
|
@ -1,37 +0,0 @@
|
|||
{-|
|
||||
Create a Preserves dictionary value from a Dhall `Map` of `Preserves` values
|
||||
-}
|
||||
let Prelude = ./Prelude.dhall
|
||||
|
||||
let List/map = Prelude.List.map
|
||||
|
||||
let Map/Entry = Prelude.Map.Entry
|
||||
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let Preserves/Entry = Map/Entry Preserves Preserves
|
||||
|
||||
let Preserves/Map = List Preserves/Entry
|
||||
|
||||
let map
|
||||
: Preserves/Map → Preserves
|
||||
= λ(x : Preserves/Map) →
|
||||
λ(Preserves : Type) →
|
||||
let Preserves/Entry = Map/Entry Preserves Preserves
|
||||
|
||||
in λ(value : Preserves/function Preserves) →
|
||||
value.dictionary
|
||||
( List/map
|
||||
Preserves/Entry@1
|
||||
Preserves/Entry
|
||||
( λ(e : Preserves/Entry@1) →
|
||||
{ mapKey = e.mapKey Preserves value
|
||||
, mapValue = e.mapValue Preserves value
|
||||
}
|
||||
)
|
||||
x
|
||||
)
|
||||
|
||||
in map
|
|
@ -1,40 +0,0 @@
|
|||
{-|
|
||||
Create a Preserves dictionary value from a Dhall `Map`
|
||||
|
||||
See ./render.dhall for an example.
|
||||
-}
|
||||
let Prelude = ./Prelude.dhall
|
||||
|
||||
let List/map = Prelude.List.map
|
||||
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Preserves/dictionary = ./dictionary.dhall
|
||||
|
||||
let dictionaryOf
|
||||
: ∀(a : Type) →
|
||||
(a → Preserves) →
|
||||
∀(b : Type) →
|
||||
(b → Preserves) →
|
||||
Prelude.Map.Type a b →
|
||||
Preserves
|
||||
= λ(a : Type) →
|
||||
λ(key : a → Preserves) →
|
||||
λ(b : Type) →
|
||||
λ(value : b → Preserves) →
|
||||
λ(x : Prelude.Map.Type a b) →
|
||||
let ab = Prelude.Map.Entry a b
|
||||
|
||||
let pp = Prelude.Map.Entry Preserves Preserves
|
||||
|
||||
in Preserves/dictionary
|
||||
( List/map
|
||||
ab
|
||||
pp
|
||||
( λ(x : ab) →
|
||||
{ mapKey = key x.mapKey, mapValue = value x.mapValue }
|
||||
)
|
||||
x
|
||||
)
|
||||
|
||||
in dictionaryOf
|
|
@ -1,15 +0,0 @@
|
|||
{-|
|
||||
Create a Preserves floating-point value from a `Double` value
|
||||
-}
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let double
|
||||
: Double → Preserves
|
||||
= λ(x : Double) →
|
||||
λ(Preserves : Type) →
|
||||
λ(value : Preserves/function Preserves) →
|
||||
value.double x
|
||||
|
||||
in double
|
|
@ -1,15 +0,0 @@
|
|||
{-|
|
||||
Create an embedded Preserves value.
|
||||
-}
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let embedded
|
||||
: Preserves → Preserves
|
||||
= λ(value : Preserves) →
|
||||
λ(Preserves : Type) →
|
||||
λ(value : Preserves/function Preserves) →
|
||||
value.embedded (value@1 Preserves value)
|
||||
|
||||
in embedded
|
|
@ -1,40 +0,0 @@
|
|||
{-|
|
||||
Translate a `JSON` value to a `Preserves` value
|
||||
-}
|
||||
let Prelude = ./Prelude.dhall
|
||||
|
||||
let List/map = Prelude.List.map
|
||||
|
||||
let JSON = Prelude.JSON.Type
|
||||
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let fromJSON
|
||||
: JSON → Preserves
|
||||
= λ(json : JSON) →
|
||||
λ(Preserves : Type) →
|
||||
λ(value : Preserves/function Preserves) →
|
||||
json
|
||||
Preserves
|
||||
{ array = value.sequence
|
||||
, bool = λ(x : Bool) → value.symbol (if x then "true" else "false")
|
||||
, double = value.double
|
||||
, integer = value.integer
|
||||
, null = value.symbol "null"
|
||||
, object =
|
||||
let Entry = { mapKey : Text, mapValue : Preserves }
|
||||
|
||||
in λ(m : List Entry) →
|
||||
value.dictionary
|
||||
( List/map
|
||||
Entry
|
||||
{ mapKey : Preserves, mapValue : Preserves }
|
||||
(λ(e : Entry) → e with mapKey = value.string e.mapKey)
|
||||
m
|
||||
)
|
||||
, string = value.string
|
||||
}
|
||||
|
||||
in fromJSON
|
|
@ -1,12 +0,0 @@
|
|||
λ(Preserves : Type) →
|
||||
{ boolean : Bool → Preserves
|
||||
, double : Double → Preserves
|
||||
, integer : Integer → Preserves
|
||||
, string : Text → Preserves
|
||||
, symbol : Text → Preserves
|
||||
, record : Preserves → List Preserves → Preserves
|
||||
, sequence : List Preserves → Preserves
|
||||
, set : List Preserves → Preserves
|
||||
, dictionary : List { mapKey : Preserves, mapValue : Preserves } → Preserves
|
||||
, embedded : Preserves → Preserves
|
||||
}
|
|
@ -1,15 +0,0 @@
|
|||
{-|
|
||||
Create a Preserves integer value from an `Integer` value
|
||||
-}
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let integer
|
||||
: Integer → Preserves
|
||||
= λ(x : Integer) →
|
||||
λ(Preserves : Type) →
|
||||
λ(value : Preserves/function Preserves) →
|
||||
value.integer x
|
||||
|
||||
in integer
|
|
@ -1,16 +0,0 @@
|
|||
{ Type = ./Type.dhall
|
||||
, function = ./function.dhall
|
||||
, boolean = ./boolean.dhall
|
||||
, dictionary = ./dictionary.dhall
|
||||
, dictionaryOf = ./dictionaryOf.dhall
|
||||
, double = ./double.dhall
|
||||
, embedded = ./embedded.dhall
|
||||
, fromJSON = ./fromJSON.dhall
|
||||
, integer = ./integer.dhall
|
||||
, record = ./record.dhall
|
||||
, render = ./render.dhall
|
||||
, sequence = ./sequence.dhall
|
||||
, sequenceOf = ./sequenceOf.dhall
|
||||
, string = ./string.dhall
|
||||
, symbol = ./symbol.dhall
|
||||
}
|
|
@ -1,23 +0,0 @@
|
|||
let Prelude = ./Prelude.dhall
|
||||
|
||||
let List/map = Prelude.List.map
|
||||
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let record =
|
||||
λ(label : Preserves) →
|
||||
λ(fields : List Preserves) →
|
||||
λ(Preserves : Type) →
|
||||
λ(value : Preserves/function Preserves) →
|
||||
value.record
|
||||
(label Preserves value)
|
||||
( List/map
|
||||
Preserves@1
|
||||
Preserves
|
||||
(λ(value : Preserves@1) → value Preserves value@1)
|
||||
fields
|
||||
)
|
||||
|
||||
in record
|
|
@ -1,100 +0,0 @@
|
|||
{-
|
||||
Render a `Preserves` value to a diagnostic `Text` value
|
||||
-}
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Prelude = ./Prelude.dhall
|
||||
|
||||
let Map/Type = Prelude.Map.Type
|
||||
|
||||
let Text/concatSep = Prelude.Text.concatSep
|
||||
|
||||
let Text/concatMapSep = Prelude.Text.concatMapSep
|
||||
|
||||
let render
|
||||
: Preserves → Text
|
||||
= λ(value : Preserves) →
|
||||
value
|
||||
Text
|
||||
{ boolean = λ(x : Bool) → if x then "#t" else "#f"
|
||||
, double = Double/show
|
||||
, integer = Prelude.JSON.renderInteger
|
||||
, string = Text/show
|
||||
, symbol = λ(sym : Text) → "${sym}"
|
||||
, record =
|
||||
λ(label : Text) →
|
||||
λ(fields : List Text) →
|
||||
"<${label}"
|
||||
++ (if Prelude.List.null Text fields then "" else " ")
|
||||
++ Text/concatSep " " fields
|
||||
++ ">"
|
||||
, sequence = λ(xs : List Text) → "[ " ++ Text/concatSep " " xs ++ " ]"
|
||||
, set = λ(xs : List Text) → "#{" ++ Text/concatSep " " xs ++ " }"
|
||||
, dictionary =
|
||||
λ(m : Map/Type Text Text) →
|
||||
"{ "
|
||||
++ Text/concatMapSep
|
||||
" "
|
||||
{ mapKey : Text, mapValue : Text }
|
||||
( λ(e : { mapKey : Text, mapValue : Text }) →
|
||||
"${e.mapKey}: ${e.mapValue}"
|
||||
)
|
||||
m
|
||||
++ " }"
|
||||
, embedded = λ(value : Text) → "#!${value}"
|
||||
}
|
||||
|
||||
let Preserves/boolean = ./boolean.dhall
|
||||
|
||||
let Preserves/integer = ./integer.dhall
|
||||
|
||||
let Preserves/double = ./double.dhall
|
||||
|
||||
let Preserves/symbol = ./symbol.dhall
|
||||
|
||||
let Preserves/record = ./record.dhall
|
||||
|
||||
let Preserves/sequenceOf = ./sequenceOf.dhall
|
||||
|
||||
let Preserves/dictionaryOf = ./dictionaryOf.dhall
|
||||
|
||||
let Preserves/dictionaryOfSymbols = Preserves/dictionaryOf Text Preserves/symbol
|
||||
|
||||
let Preserves/embedded = ./embedded.dhall
|
||||
|
||||
let example0 =
|
||||
assert
|
||||
: ''
|
||||
${render
|
||||
( Preserves/dictionaryOfSymbols
|
||||
Preserves
|
||||
(λ(x : Preserves) → x)
|
||||
( toMap
|
||||
{ a = Preserves/integer +1
|
||||
, b =
|
||||
Preserves/sequenceOf
|
||||
Integer
|
||||
Preserves/integer
|
||||
[ +2, +3 ]
|
||||
, c =
|
||||
Preserves/dictionaryOfSymbols
|
||||
Double
|
||||
Preserves/double
|
||||
(toMap { d = 1.0, e = -1.0 })
|
||||
, d = Preserves/embedded (Preserves/boolean True)
|
||||
, e =
|
||||
Preserves/record
|
||||
(Preserves/symbol "capture")
|
||||
[ Preserves/record
|
||||
(Preserves/symbol "_")
|
||||
([] : List Preserves)
|
||||
]
|
||||
}
|
||||
)
|
||||
)}
|
||||
''
|
||||
≡ ''
|
||||
{ a: 1 b: [ 2 3 ] c: { d: 1.0 e: -1.0 } d: #!#t e: <capture <_>> }
|
||||
''
|
||||
|
||||
in render
|
|
@ -1,27 +0,0 @@
|
|||
{-|
|
||||
Create a Preserves sequence value from a `List` of `Preserve` values
|
||||
|
||||
See ./sequenceOf.dhall for an example.
|
||||
-}
|
||||
let Prelude = ./Prelude.dhall
|
||||
|
||||
let List/map = Prelude.List.map
|
||||
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let sequence
|
||||
: List Preserves → Preserves
|
||||
= λ(x : List Preserves) →
|
||||
λ(Preserves : Type) →
|
||||
λ(value : Preserves/function Preserves) →
|
||||
value.sequence
|
||||
( List/map
|
||||
Preserves@1
|
||||
Preserves
|
||||
(λ(value : Preserves@1) → value Preserves value@1)
|
||||
x
|
||||
)
|
||||
|
||||
in sequence
|
|
@ -1,21 +0,0 @@
|
|||
{-|
|
||||
Create a Preserves sequence value from a `List` of values and a conversion function
|
||||
|
||||
See ./render.dhall for an example.
|
||||
-}
|
||||
let Prelude = ./Prelude.dhall
|
||||
|
||||
let List/map = Prelude.List.map
|
||||
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Preserves/sequence = ./sequence.dhall
|
||||
|
||||
let sequenceOf
|
||||
: ∀(a : Type) → (a → Preserves) → List a → Preserves
|
||||
= λ(a : Type) →
|
||||
λ(f : a → Preserves) →
|
||||
λ(xs : List a) →
|
||||
Preserves/sequence (List/map a Preserves f xs)
|
||||
|
||||
in sequenceOf
|
|
@ -1,15 +0,0 @@
|
|||
{-|
|
||||
Create a Preserves string from a `Text` value
|
||||
-}
|
||||
let Preserves/Type = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let string
|
||||
: Text → Preserves/Type
|
||||
= λ(x : Text) →
|
||||
λ(Preserves : Type) →
|
||||
λ(value : Preserves/function Preserves) →
|
||||
value.string x
|
||||
|
||||
in string
|
|
@ -1,15 +0,0 @@
|
|||
{-|
|
||||
Create a Preserves symbol from a `Text` value
|
||||
-}
|
||||
let Preserves/Type = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let symbol
|
||||
: Text → Preserves/Type
|
||||
= λ(x : Text) →
|
||||
λ(Preserves : Type) →
|
||||
λ(value : Preserves/function Preserves) →
|
||||
value.symbol x
|
||||
|
||||
in symbol
|
|
@ -1,6 +0,0 @@
|
|||
((nil . ((eval .
|
||||
(setq tide-tsserver-executable
|
||||
(concat
|
||||
(let ((d (dir-locals-find-file ".")))
|
||||
(if (stringp d) d (car d)))
|
||||
"node_modules/typescript/lib/tsserver.js"))))))
|
|
@ -1,2 +1,4 @@
|
|||
yarn-error.log
|
||||
dist/
|
||||
lib/
|
||||
node_modules/
|
||||
package-lock.json
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
Use yarn, not npm!
|
|
@ -1,6 +0,0 @@
|
|||
{
|
||||
"packages": ["packages/*"],
|
||||
"version": "independent",
|
||||
"npmClient": "yarn",
|
||||
"useWorkspaces": true
|
||||
}
|
|
@ -1,25 +1,34 @@
|
|||
{
|
||||
"name": "@preserves/root",
|
||||
"private": true,
|
||||
"name": "preserves",
|
||||
"version": "0.5.3",
|
||||
"description": "Experimental data serialization format",
|
||||
"homepage": "https://gitlab.com/preserves/preserves",
|
||||
"license": "Apache-2.0",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"repository": "gitlab:preserves/preserves",
|
||||
"main": "dist/preserves.js",
|
||||
"module": "lib/index.js",
|
||||
"types": "lib/index.d.ts",
|
||||
"author": "Tony Garnock-Jones <tonyg@leastfixedpoint.com>",
|
||||
"devDependencies": {
|
||||
"@types/jest": "^26.0.19",
|
||||
"jest": "^26.6.3",
|
||||
"lerna": "^4.0.0",
|
||||
"rollup": "^2.40.0",
|
||||
"rollup": "^2.36.1",
|
||||
"rollup-plugin-terser": "^7.0.2",
|
||||
"ts-jest": "^26.5.2",
|
||||
"ts-node-dev": "^1.1.6",
|
||||
"typescript": "^4.2.3"
|
||||
"ts-jest": "^26.4.4",
|
||||
"ts-node-dev": "^1.1.1",
|
||||
"typescript": "^4.1.3"
|
||||
},
|
||||
"workspaces": [
|
||||
"packages/*"
|
||||
],
|
||||
"scripts": {
|
||||
"prepare": "lerna exec yarn run prepare",
|
||||
"clean": "lerna exec yarn run clean",
|
||||
"veryclean": "yarn run veryclean:local && lerna exec yarn run veryclean",
|
||||
"veryclean:local": "rm -rf node_modules",
|
||||
"build": "lerna exec yarn run prepare",
|
||||
"test": "lerna exec yarn run test"
|
||||
}
|
||||
"clean": "rm -rf lib dist",
|
||||
"prepare": "npx tsc && npx rollup -c",
|
||||
"rollupwatch": "npx rollup -c -w",
|
||||
"test": "npx jest",
|
||||
"testwatch": "npx jest --watch",
|
||||
"veryclean": "npm run clean && rm -rf node_modules",
|
||||
"watch": "npx tsc -w"
|
||||
},
|
||||
"dependencies": {}
|
||||
}
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
dist/
|
||||
lib/
|
|
@ -1 +0,0 @@
|
|||
version-tag-prefix javascript-@preserves/core@
|
|
@ -1,24 +0,0 @@
|
|||
{
|
||||
"name": "@preserves/core",
|
||||
"version": "0.17.0",
|
||||
"description": "Preserves data serialization format",
|
||||
"homepage": "https://gitlab.com/preserves/preserves",
|
||||
"license": "Apache-2.0",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"repository": "gitlab:preserves/preserves",
|
||||
"main": "dist/preserves.js",
|
||||
"module": "lib/index.js",
|
||||
"types": "lib/index.d.ts",
|
||||
"author": "Tony Garnock-Jones <tonyg@leastfixedpoint.com>",
|
||||
"scripts": {
|
||||
"clean": "rm -rf lib dist",
|
||||
"prepare": "tsc && rollup -c",
|
||||
"rollupwatch": "rollup -c -w",
|
||||
"test": "jest",
|
||||
"testwatch": "jest --watch",
|
||||
"veryclean": "yarn run clean && rm -rf node_modules",
|
||||
"watch": "tsc -w"
|
||||
}
|
||||
}
|
|
@ -1,119 +0,0 @@
|
|||
import { Encoder } from "./encoder";
|
||||
import { Tag } from "./constants";
|
||||
import { AsPreserve, PreserveOn } from "./symbols";
|
||||
import { Value } from "./values";
|
||||
import { is, isAnnotated, IsPreservesAnnotated } from "./is";
|
||||
import { stringify } from "./text";
|
||||
import { GenericEmbedded } from "./embedded";
|
||||
|
||||
export interface Position {
|
||||
line?: number;
|
||||
column?: number;
|
||||
pos: number;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
export function newPosition(name?: string): Position {
|
||||
return { line: 1, column: 0, pos: 0, name };
|
||||
}
|
||||
|
||||
export function updatePosition(p: Position, ch: string): boolean {
|
||||
p.pos++;
|
||||
if (p.line === void 0) {
|
||||
return false;
|
||||
} else {
|
||||
let advancedLine = false;
|
||||
switch (ch) {
|
||||
case '\t':
|
||||
p.column = (p.column! + 8) & ~7;
|
||||
break;
|
||||
case '\n':
|
||||
p.column = 0;
|
||||
p.line++;
|
||||
advancedLine = true;
|
||||
break;
|
||||
case '\r':
|
||||
p.column = 0;
|
||||
break;
|
||||
default:
|
||||
p.column!++;
|
||||
break;
|
||||
}
|
||||
return advancedLine;
|
||||
}
|
||||
}
|
||||
|
||||
export function formatPosition(p: Position | null | string): string {
|
||||
if (p === null) {
|
||||
return '<unknown>';
|
||||
} else if (typeof p === 'string') {
|
||||
return p;
|
||||
} else {
|
||||
return `${p.name ?? ''}:${p.line ?? ''}:${p.column ?? ''}:${p.pos}`;
|
||||
}
|
||||
}
|
||||
|
||||
export class Annotated<T = GenericEmbedded> {
|
||||
readonly annotations: Array<Value<T>>;
|
||||
readonly pos: Position | null;
|
||||
readonly item: Value<T>;
|
||||
|
||||
constructor(item: Value<T>, pos?: Position) {
|
||||
this.annotations = [];
|
||||
this.pos = pos ?? null;
|
||||
this.item = item;
|
||||
}
|
||||
|
||||
[AsPreserve](): Value<T> {
|
||||
return this;
|
||||
}
|
||||
|
||||
[PreserveOn](encoder: Encoder<T>) {
|
||||
if (encoder.includeAnnotations) {
|
||||
for (const a of this.annotations) {
|
||||
encoder.state.emitbyte(Tag.Annotation);
|
||||
encoder.push(a);
|
||||
}
|
||||
}
|
||||
encoder.push(this.item);
|
||||
}
|
||||
|
||||
equals(other: any): boolean {
|
||||
return is(this.item, Annotated.isAnnotated(other) ? other.item : other);
|
||||
}
|
||||
|
||||
// hashCode(): number {
|
||||
// return hash(this.item);
|
||||
// }
|
||||
|
||||
toString(): string {
|
||||
return this.asPreservesText();
|
||||
}
|
||||
|
||||
asPreservesText(): string {
|
||||
const anns = this.annotations.map((a) => '@' + stringify(a)).join(' ');
|
||||
return (anns ? anns + ' ' : anns) + stringify(this.item);
|
||||
}
|
||||
|
||||
get [IsPreservesAnnotated](): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
static isAnnotated<T = GenericEmbedded>(x: any): x is Annotated<T> {
|
||||
return isAnnotated(x);
|
||||
}
|
||||
}
|
||||
|
||||
export function annotate<T = GenericEmbedded>(v0: Value<T>, ...anns: Value<T>[]): Annotated<T> {
|
||||
const v = Annotated.isAnnotated<T>(v0) ? v0 : new Annotated(v0);
|
||||
anns.forEach((a) => v.annotations.push(a));
|
||||
return v;
|
||||
}
|
||||
|
||||
export function annotations<T = GenericEmbedded>(v: Value<T>): Array<Value<T>> {
|
||||
return Annotated.isAnnotated<T>(v) ? v.annotations : [];
|
||||
}
|
||||
|
||||
export function position<T = GenericEmbedded>(v: Value<T>): Position | null {
|
||||
return Annotated.isAnnotated<T>(v) ? v.pos : null;
|
||||
}
|
|
@ -1,52 +0,0 @@
|
|||
// Preserves Binary codec.
|
||||
|
||||
import { Position } from "./annotated";
|
||||
|
||||
export type ErrorType = 'DecodeError' | 'EncodeError' | 'ShortPacket';
|
||||
export const ErrorType = Symbol.for('ErrorType');
|
||||
|
||||
export abstract class PreservesCodecError {
|
||||
abstract get [ErrorType](): ErrorType;
|
||||
|
||||
static isCodecError(e: any, t: ErrorType): e is PreservesCodecError {
|
||||
return (e?.[ErrorType] === t);
|
||||
}
|
||||
}
|
||||
|
||||
export class DecodeError extends Error {
|
||||
readonly pos: Position | undefined;
|
||||
|
||||
get [ErrorType](): ErrorType { return 'DecodeError' }
|
||||
|
||||
constructor(message: string, pos?: Position) {
|
||||
super(message);
|
||||
this.pos = pos;
|
||||
}
|
||||
|
||||
static isDecodeError(e: any): e is DecodeError {
|
||||
return PreservesCodecError.isCodecError(e, 'DecodeError');
|
||||
}
|
||||
}
|
||||
|
||||
export class EncodeError extends Error {
|
||||
get [ErrorType](): ErrorType { return 'EncodeError' }
|
||||
|
||||
static isEncodeError(e: any): e is EncodeError {
|
||||
return PreservesCodecError.isCodecError(e, 'EncodeError');
|
||||
}
|
||||
|
||||
readonly irritant: any;
|
||||
|
||||
constructor(message: string, irritant: any) {
|
||||
super(message);
|
||||
this.irritant = irritant;
|
||||
}
|
||||
}
|
||||
|
||||
export class ShortPacket extends DecodeError {
|
||||
get [ErrorType](): ErrorType { return 'ShortPacket' }
|
||||
|
||||
static isShortPacket(e: any): e is ShortPacket {
|
||||
return PreservesCodecError.isCodecError(e, 'ShortPacket');
|
||||
}
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
import type { Compound, Value } from "./values";
|
||||
import type { GenericEmbedded } from "./embedded";
|
||||
import { Dictionary, Set } from "./dictionary";
|
||||
|
||||
export function isCompound<T = GenericEmbedded>(x: Value<T>): x is Compound<T>
|
||||
{
|
||||
return (Array.isArray(x) || Set.isSet(x) || Dictionary.isDictionary(x));
|
||||
}
|
|
@ -1,385 +0,0 @@
|
|||
import { Annotated } from "./annotated";
|
||||
import { DecodeError, ShortPacket } from "./codec";
|
||||
import { Tag } from "./constants";
|
||||
import { Set, Dictionary } from "./dictionary";
|
||||
import { DoubleFloat, SingleFloat } from "./float";
|
||||
import { Record } from "./record";
|
||||
import { Bytes, BytesLike, underlying } from "./bytes";
|
||||
import { Value } from "./values";
|
||||
import { is } from "./is";
|
||||
import { embed, GenericEmbedded, Embedded, EmbeddedTypeDecode } from "./embedded";
|
||||
import { ReaderStateOptions } from "reader";
|
||||
|
||||
export interface DecoderOptions {
|
||||
includeAnnotations?: boolean;
|
||||
}
|
||||
|
||||
export interface DecoderEmbeddedOptions<T> extends DecoderOptions {
|
||||
embeddedDecode?: EmbeddedTypeDecode<T>;
|
||||
}
|
||||
|
||||
export interface TypedDecoder<T> {
|
||||
atEnd(): boolean;
|
||||
|
||||
mark(): any;
|
||||
restoreMark(m: any): void;
|
||||
|
||||
skip(): void;
|
||||
next(): Value<T>;
|
||||
withEmbeddedDecode<S, R>(
|
||||
embeddedDecode: EmbeddedTypeDecode<S>,
|
||||
body: (d: TypedDecoder<S>) => R): R;
|
||||
|
||||
nextBoolean(): boolean | undefined;
|
||||
nextFloat(): SingleFloat | undefined;
|
||||
nextDouble(): DoubleFloat | undefined;
|
||||
nextEmbedded(): Embedded<T> | undefined;
|
||||
nextSignedInteger(): number | undefined;
|
||||
nextString(): string | undefined;
|
||||
nextByteString(): Bytes | undefined;
|
||||
nextSymbol(): symbol | undefined;
|
||||
|
||||
openRecord(): boolean;
|
||||
openSequence(): boolean;
|
||||
openSet(): boolean;
|
||||
openDictionary(): boolean;
|
||||
|
||||
closeCompound(): boolean;
|
||||
}
|
||||
|
||||
export function asLiteral<T, E extends Exclude<Value<T>, Annotated<T>>>(
|
||||
actual: Value<T>,
|
||||
expected: E): E | undefined
|
||||
{
|
||||
return is(actual, expected) ? expected : void 0;
|
||||
}
|
||||
|
||||
export class DecoderState {
|
||||
packet: Uint8Array;
|
||||
index = 0;
|
||||
options: DecoderOptions;
|
||||
|
||||
constructor(packet: BytesLike, options: DecoderOptions) {
|
||||
this.packet = underlying(packet);
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
get includeAnnotations(): boolean {
|
||||
return this.options.includeAnnotations ?? false;
|
||||
}
|
||||
|
||||
write(data: BytesLike) {
|
||||
if (this.index === this.packet.length) {
|
||||
this.packet = underlying(data);
|
||||
} else {
|
||||
this.packet = Bytes.concat([this.packet.slice(this.index), data])._view;
|
||||
}
|
||||
this.index = 0;
|
||||
}
|
||||
|
||||
atEnd(): boolean {
|
||||
return this.index >= this.packet.length;
|
||||
}
|
||||
|
||||
mark(): number {
|
||||
return this.index;
|
||||
}
|
||||
|
||||
restoreMark(m: number): void {
|
||||
this.index = m;
|
||||
}
|
||||
|
||||
shortGuard<R>(body: () => R, short: () => R): R {
|
||||
if (this.atEnd()) return short();
|
||||
// ^ important somewhat-common case optimization - avoid the exception
|
||||
|
||||
const start = this.mark();
|
||||
try {
|
||||
return body();
|
||||
} catch (e) {
|
||||
if (ShortPacket.isShortPacket(e)) {
|
||||
this.restoreMark(start);
|
||||
return short();
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
nextbyte(): number {
|
||||
if (this.atEnd()) throw new ShortPacket("Short packet");
|
||||
return this.packet[this.index++];
|
||||
}
|
||||
|
||||
nextbytes(n: number): DataView {
|
||||
const start = this.index;
|
||||
this.index += n;
|
||||
if (this.index > this.packet.length) throw new ShortPacket("Short packet");
|
||||
// ^ NOTE: greater-than, not greater-than-or-equal-to - this makes atEnd() inappropriate
|
||||
return new DataView(this.packet.buffer, this.packet.byteOffset + start, n);
|
||||
}
|
||||
|
||||
varint(): number {
|
||||
// TODO: Bignums :-/
|
||||
const v = this.nextbyte();
|
||||
if (v < 128) return v;
|
||||
return (this.varint() << 7) + (v - 128);
|
||||
}
|
||||
|
||||
peekend(): boolean {
|
||||
return (this.nextbyte() === Tag.End) || (this.index--, false);
|
||||
}
|
||||
|
||||
nextint(n: number): number {
|
||||
// TODO: Bignums :-/
|
||||
if (n === 0) return 0;
|
||||
let acc = this.nextbyte();
|
||||
if (acc & 0x80) acc -= 256;
|
||||
for (let i = 1; i < n; i++) acc = (acc * 256) + this.nextbyte();
|
||||
return acc;
|
||||
}
|
||||
|
||||
nextSmallOrMediumInteger(tag: number): number | undefined {
|
||||
if (tag >= Tag.SmallInteger_lo && tag <= Tag.SmallInteger_lo + 15) {
|
||||
const v = tag - Tag.SmallInteger_lo;
|
||||
return v > 12 ? v - 16 : v;
|
||||
}
|
||||
if (tag >= Tag.MediumInteger_lo && tag <= Tag.MediumInteger_lo + 15) {
|
||||
const n = tag - Tag.MediumInteger_lo;
|
||||
return this.nextint(n + 1);
|
||||
}
|
||||
return void 0;
|
||||
}
|
||||
|
||||
wrap<T>(v: Value<T>): Value<T> {
|
||||
return this.includeAnnotations ? new Annotated(v) : v;
|
||||
}
|
||||
|
||||
unshiftAnnotation<T>(a: Value<T>, v: Annotated<T>): Annotated<T> {
|
||||
if (this.includeAnnotations) {
|
||||
v.annotations.unshift(a);
|
||||
}
|
||||
return v;
|
||||
}
|
||||
}
|
||||
|
||||
export const neverEmbeddedTypeDecode: EmbeddedTypeDecode<never> = {
|
||||
decode(_s: DecoderState): never {
|
||||
throw new Error("Embeddeds not permitted at this point in Preserves document");
|
||||
},
|
||||
|
||||
fromValue(_v: Value<GenericEmbedded>, _options: ReaderStateOptions): never {
|
||||
throw new Error("Embeddeds not permitted at this point in Preserves document");
|
||||
},
|
||||
};
|
||||
|
||||
export class Decoder<T = never> implements TypedDecoder<T> {
|
||||
state: DecoderState;
|
||||
embeddedDecode: EmbeddedTypeDecode<T>;
|
||||
|
||||
constructor(state: DecoderState, embeddedDecode?: EmbeddedTypeDecode<T>);
|
||||
constructor(packet?: BytesLike, options?: DecoderEmbeddedOptions<T>);
|
||||
constructor(
|
||||
packet_or_state: (DecoderState | BytesLike) = new Uint8Array(0),
|
||||
options_or_embeddedDecode?: (DecoderEmbeddedOptions<T> | EmbeddedTypeDecode<T>))
|
||||
{
|
||||
if (packet_or_state instanceof DecoderState) {
|
||||
this.state = packet_or_state;
|
||||
this.embeddedDecode = (options_or_embeddedDecode as EmbeddedTypeDecode<T>) ?? neverEmbeddedTypeDecode;
|
||||
} else {
|
||||
const options = (options_or_embeddedDecode as DecoderEmbeddedOptions<T>) ?? {};
|
||||
this.state = new DecoderState(packet_or_state, options);
|
||||
this.embeddedDecode = options.embeddedDecode ?? neverEmbeddedTypeDecode;
|
||||
}
|
||||
}
|
||||
|
||||
write(data: BytesLike) {
|
||||
this.state.write(data);
|
||||
}
|
||||
|
||||
nextvalues(): Value<T>[] {
|
||||
const result = [];
|
||||
while (!this.state.peekend()) result.push(this.next());
|
||||
return result;
|
||||
}
|
||||
|
||||
static dictionaryFromArray<T>(vs: Value<T>[]): Dictionary<T> {
|
||||
const d = new Dictionary<T>();
|
||||
if (vs.length % 2) throw new DecodeError("Missing dictionary value");
|
||||
for (let i = 0; i < vs.length; i += 2) {
|
||||
d.set(vs[i], vs[i+1]);
|
||||
}
|
||||
return d;
|
||||
}
|
||||
|
||||
next(): Value<T> {
|
||||
const tag = this.state.nextbyte();
|
||||
switch (tag) {
|
||||
case Tag.False: return this.state.wrap<T>(false);
|
||||
case Tag.True: return this.state.wrap<T>(true);
|
||||
case Tag.Float: return this.state.wrap<T>(new SingleFloat(this.state.nextbytes(4).getFloat32(0, false)));
|
||||
case Tag.Double: return this.state.wrap<T>(new DoubleFloat(this.state.nextbytes(8).getFloat64(0, false)));
|
||||
case Tag.End: throw new DecodeError("Unexpected Compound end marker");
|
||||
case Tag.Annotation: {
|
||||
const a = this.next();
|
||||
const v = this.next() as Annotated<T>;
|
||||
return this.state.unshiftAnnotation(a, v);
|
||||
}
|
||||
case Tag.Embedded: return this.state.wrap<T>(embed(this.embeddedDecode.decode(this.state)));
|
||||
case Tag.SignedInteger: return this.state.wrap<T>(this.state.nextint(this.state.varint()));
|
||||
case Tag.String: return this.state.wrap<T>(Bytes.from(this.state.nextbytes(this.state.varint())).fromUtf8());
|
||||
case Tag.ByteString: return this.state.wrap<T>(Bytes.from(this.state.nextbytes(this.state.varint())));
|
||||
case Tag.Symbol: return this.state.wrap<T>(Symbol.for(Bytes.from(this.state.nextbytes(this.state.varint())).fromUtf8()));
|
||||
case Tag.Record: {
|
||||
const vs = this.nextvalues();
|
||||
if (vs.length === 0) throw new DecodeError("Too few elements in encoded record");
|
||||
return this.state.wrap<T>(Record(vs[0], vs.slice(1)));
|
||||
}
|
||||
case Tag.Sequence: return this.state.wrap<T>(this.nextvalues());
|
||||
case Tag.Set: return this.state.wrap<T>(new Set(this.nextvalues()));
|
||||
case Tag.Dictionary: return this.state.wrap<T>(Decoder.dictionaryFromArray(this.nextvalues()));
|
||||
default: {
|
||||
const v = this.state.nextSmallOrMediumInteger(tag);
|
||||
if (v === void 0) {
|
||||
throw new DecodeError("Unsupported Preserves tag: " + tag);
|
||||
}
|
||||
return this.state.wrap<T>(v);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try_next(): Value<T> | undefined {
|
||||
return this.state.shortGuard(() => this.next(), () => void 0);
|
||||
}
|
||||
|
||||
atEnd(): boolean {
|
||||
return this.state.atEnd();
|
||||
}
|
||||
|
||||
mark(): any {
|
||||
return this.state.mark();
|
||||
}
|
||||
|
||||
restoreMark(m: any): void {
|
||||
this.state.restoreMark(m);
|
||||
}
|
||||
|
||||
skip(): void {
|
||||
// TODO: be more efficient
|
||||
this.next();
|
||||
}
|
||||
|
||||
withEmbeddedDecode<S, R>(
|
||||
embeddedDecode: EmbeddedTypeDecode<S>,
|
||||
body: (d: TypedDecoder<S>) => R): R
|
||||
{
|
||||
return body(new Decoder(this.state, embeddedDecode));
|
||||
}
|
||||
|
||||
skipAnnotations(): void {
|
||||
if (!this.state.atEnd() && this.state.packet[this.state.index] === Tag.Annotation) {
|
||||
this.state.index++;
|
||||
this.skip();
|
||||
}
|
||||
}
|
||||
|
||||
nextBoolean(): boolean | undefined {
|
||||
this.skipAnnotations();
|
||||
switch (this.state.nextbyte()) {
|
||||
case Tag.False: return false;
|
||||
case Tag.True: return true;
|
||||
default: return void 0;
|
||||
}
|
||||
}
|
||||
|
||||
nextFloat(): SingleFloat | undefined {
|
||||
this.skipAnnotations();
|
||||
switch (this.state.nextbyte()) {
|
||||
case Tag.Float: return new SingleFloat(this.state.nextbytes(4).getFloat32(0, false));
|
||||
default: return void 0;
|
||||
}
|
||||
}
|
||||
|
||||
nextDouble(): DoubleFloat | undefined {
|
||||
this.skipAnnotations();
|
||||
switch (this.state.nextbyte()) {
|
||||
case Tag.Double: return new DoubleFloat(this.state.nextbytes(8).getFloat64(0, false));
|
||||
default: return void 0;
|
||||
}
|
||||
}
|
||||
|
||||
nextEmbedded(): Embedded<T> | undefined {
|
||||
this.skipAnnotations();
|
||||
switch (this.state.nextbyte()) {
|
||||
case Tag.Embedded: return embed(this.embeddedDecode.decode(this.state));
|
||||
default: return void 0;
|
||||
}
|
||||
}
|
||||
|
||||
nextSignedInteger(): number | undefined {
|
||||
this.skipAnnotations();
|
||||
const b = this.state.nextbyte();
|
||||
switch (b) {
|
||||
case Tag.SignedInteger: return this.state.nextint(this.state.varint());
|
||||
default: return this.state.nextSmallOrMediumInteger(b);
|
||||
}
|
||||
}
|
||||
|
||||
nextString(): string | undefined {
|
||||
this.skipAnnotations();
|
||||
switch (this.state.nextbyte()) {
|
||||
case Tag.String: return Bytes.from(this.state.nextbytes(this.state.varint())).fromUtf8();
|
||||
default: return void 0;
|
||||
}
|
||||
}
|
||||
|
||||
nextByteString(): Bytes | undefined {
|
||||
this.skipAnnotations();
|
||||
switch (this.state.nextbyte()) {
|
||||
case Tag.ByteString: return Bytes.from(this.state.nextbytes(this.state.varint()));
|
||||
default: return void 0;
|
||||
}
|
||||
}
|
||||
|
||||
nextSymbol(): symbol | undefined {
|
||||
this.skipAnnotations();
|
||||
switch (this.state.nextbyte()) {
|
||||
case Tag.Symbol:
|
||||
return Symbol.for(Bytes.from(this.state.nextbytes(this.state.varint())).fromUtf8());
|
||||
default:
|
||||
return void 0;
|
||||
}
|
||||
}
|
||||
|
||||
openRecord(): boolean {
|
||||
this.skipAnnotations();
|
||||
return (this.state.nextbyte() === Tag.Record) || (this.state.index--, false);
|
||||
}
|
||||
|
||||
openSequence(): boolean {
|
||||
this.skipAnnotations();
|
||||
return (this.state.nextbyte() === Tag.Sequence) || (this.state.index--, false);
|
||||
}
|
||||
|
||||
openSet(): boolean {
|
||||
this.skipAnnotations();
|
||||
return (this.state.nextbyte() === Tag.Set) || (this.state.index--, false);
|
||||
}
|
||||
|
||||
openDictionary(): boolean {
|
||||
this.skipAnnotations();
|
||||
return (this.state.nextbyte() === Tag.Dictionary) || (this.state.index--, false);
|
||||
}
|
||||
|
||||
closeCompound(): boolean {
|
||||
return this.state.peekend();
|
||||
}
|
||||
}
|
||||
|
||||
export function decode<T>(bs: BytesLike, options: DecoderEmbeddedOptions<T> = {}): Value<T> {
|
||||
return new Decoder(bs, options).next();
|
||||
}
|
||||
|
||||
export function decodeWithAnnotations<T>(bs: BytesLike,
|
||||
options: DecoderEmbeddedOptions<T> = {}): Annotated<T> {
|
||||
return decode(bs, { ... options, includeAnnotations: true }) as Annotated<T>;
|
||||
}
|
|
@ -1,137 +0,0 @@
|
|||
import { Encoder, canonicalEncode, canonicalString } from "./encoder";
|
||||
import { Tag } from "./constants";
|
||||
import { FlexMap, FlexSet, _iterMap } from "./flex";
|
||||
import { PreserveOn } from "./symbols";
|
||||
import { stringify } from "./text";
|
||||
import { Value } from "./values";
|
||||
import { Bytes } from './bytes';
|
||||
import { GenericEmbedded } from "./embedded";
|
||||
|
||||
export type DictionaryType = 'Dictionary' | 'Set';
|
||||
export const DictionaryType = Symbol.for('DictionaryType');
|
||||
|
||||
export class KeyedDictionary<K extends Value<T>, V, T = GenericEmbedded> extends FlexMap<K, V> {
|
||||
get [DictionaryType](): DictionaryType {
|
||||
return 'Dictionary';
|
||||
}
|
||||
|
||||
static isKeyedDictionary<K extends Value<T>, V, T = GenericEmbedded>(x: any): x is KeyedDictionary<K, V, T> {
|
||||
return x?.[DictionaryType] === 'Dictionary';
|
||||
}
|
||||
|
||||
constructor(items?: readonly [K, V][]);
|
||||
constructor(items?: Iterable<readonly [K, V]>);
|
||||
constructor(items?: Iterable<readonly [K, V]>) {
|
||||
super(canonicalString, items);
|
||||
}
|
||||
|
||||
mapEntries<W, S extends Value<R>, R = GenericEmbedded>(f: (entry: [K, V]) => [S, W]): KeyedDictionary<S, W, R> {
|
||||
const result = new KeyedDictionary<S, W, R>();
|
||||
for (let oldEntry of this.entries()) {
|
||||
const newEntry = f(oldEntry);
|
||||
result.set(newEntry[0], newEntry[1])
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
asPreservesText(): string {
|
||||
return '{' +
|
||||
Array.from(_iterMap(this.entries(), ([k, v]) =>
|
||||
stringify(k) + ': ' + stringify(v))).join(', ') +
|
||||
'}';
|
||||
}
|
||||
|
||||
clone(): KeyedDictionary<K, V, T> {
|
||||
return new KeyedDictionary(this);
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return this.asPreservesText();
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag]() { return 'Dictionary'; }
|
||||
|
||||
[PreserveOn](encoder: Encoder<T>) {
|
||||
if (encoder.canonical) {
|
||||
const entries = Array.from(this);
|
||||
const pieces = entries.map<[Bytes, number]>(([k, _v], i) => [canonicalEncode(k), i]);
|
||||
pieces.sort((a, b) => Bytes.compare(a[0], b[0]));
|
||||
encoder.state.emitbyte(Tag.Dictionary);
|
||||
pieces.forEach(([_encodedKey, i]) => {
|
||||
const [k, v] = entries[i];
|
||||
encoder.push(k);
|
||||
encoder.push(v as unknown as Value<T>); // Suuuuuuuper unsound
|
||||
});
|
||||
encoder.state.emitbyte(Tag.End);
|
||||
} else {
|
||||
encoder.state.emitbyte(Tag.Dictionary);
|
||||
this.forEach((v, k) => {
|
||||
encoder.push(k);
|
||||
encoder.push(v as unknown as Value<T>); // Suuuuuuuper unsound
|
||||
});
|
||||
encoder.state.emitbyte(Tag.End);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class Dictionary<T = GenericEmbedded, V = Value<T>> extends KeyedDictionary<Value<T>, V, T> {
|
||||
static isDictionary<T = GenericEmbedded, V = Value<T>>(x: any): x is Dictionary<T, V> {
|
||||
return x?.[DictionaryType] === 'Dictionary';
|
||||
}
|
||||
}
|
||||
|
||||
export class KeyedSet<K extends Value<T>, T = GenericEmbedded> extends FlexSet<K> {
|
||||
get [DictionaryType](): DictionaryType {
|
||||
return 'Set';
|
||||
}
|
||||
|
||||
static isKeyedSet<K extends Value<T>, T = GenericEmbedded>(x: any): x is KeyedSet<K, T> {
|
||||
return x?.[DictionaryType] === 'Set';
|
||||
}
|
||||
|
||||
constructor(items?: Iterable<K>) {
|
||||
super(canonicalString, items);
|
||||
}
|
||||
|
||||
map<S extends Value<R>, R = GenericEmbedded>(f: (value: K) => S): KeyedSet<S, R> {
|
||||
return new KeyedSet(_iterMap(this[Symbol.iterator](), f));
|
||||
}
|
||||
|
||||
filter(f: (value: K) => boolean): KeyedSet<K, T> {
|
||||
const result = new KeyedSet<K, T>();
|
||||
for (let k of this) if (f(k)) result.add(k);
|
||||
return result;
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return this.asPreservesText();
|
||||
}
|
||||
|
||||
asPreservesText(): string {
|
||||
return '#{' +
|
||||
Array.from(_iterMap(this.values(), stringify)).join(', ') +
|
||||
'}';
|
||||
}
|
||||
|
||||
clone(): KeyedSet<K, T> {
|
||||
return new KeyedSet(this);
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag]() { return 'Set'; }
|
||||
|
||||
[PreserveOn](encoder: Encoder<T>) {
|
||||
if (encoder.canonical) {
|
||||
const pieces = Array.from(this).map<[Bytes, K]>(k => [canonicalEncode(k), k]);
|
||||
pieces.sort((a, b) => Bytes.compare(a[0], b[0]));
|
||||
encoder.encodevalues(Tag.Set, pieces.map(e => e[1]));
|
||||
} else {
|
||||
encoder.encodevalues(Tag.Set, this);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class Set<T = GenericEmbedded> extends KeyedSet<Value<T>, T> {
|
||||
static isSet<T = GenericEmbedded>(x: any): x is Set<T> {
|
||||
return x?.[DictionaryType] === 'Set';
|
||||
}
|
||||
}
|
|
@ -1,56 +0,0 @@
|
|||
import type { EncoderState } from "./encoder";
|
||||
import type { DecoderState } from "./decoder";
|
||||
import type { Value } from "./values";
|
||||
import { ReaderStateOptions } from "./reader";
|
||||
|
||||
export type EmbeddedTypeEncode<T> = {
|
||||
encode(s: EncoderState, v: T): void;
|
||||
toValue(v: T): Value<GenericEmbedded>;
|
||||
}
|
||||
|
||||
export type EmbeddedTypeDecode<T> = {
|
||||
decode(s: DecoderState): T;
|
||||
fromValue(v: Value<GenericEmbedded>, options: ReaderStateOptions): T;
|
||||
}
|
||||
|
||||
export type EmbeddedType<T> = EmbeddedTypeEncode<T> & EmbeddedTypeDecode<T>;
|
||||
|
||||
export class Embedded<T> {
|
||||
embeddedValue: T;
|
||||
|
||||
constructor(embeddedValue: T) {
|
||||
this.embeddedValue = embeddedValue;
|
||||
}
|
||||
|
||||
equals(other: any, is: (a: any, b: any) => boolean) {
|
||||
return isEmbedded<T>(other) && is(this.embeddedValue, other.embeddedValue);
|
||||
}
|
||||
|
||||
asPreservesText(): string {
|
||||
return '#!' + (this.embeddedValue as any).asPreservesText();
|
||||
}
|
||||
}
|
||||
|
||||
export function embed<T>(embeddedValue: T): Embedded<T> {
|
||||
return new Embedded(embeddedValue);
|
||||
}
|
||||
|
||||
export function isEmbedded<T>(v: Value<T>): v is Embedded<T> {
|
||||
return typeof v === 'object' && 'embeddedValue' in v;
|
||||
}
|
||||
|
||||
export class GenericEmbedded {
|
||||
generic: Value;
|
||||
|
||||
constructor(generic: Value) {
|
||||
this.generic = generic;
|
||||
}
|
||||
|
||||
equals(other: any, is: (a: any, b: any) => boolean) {
|
||||
return typeof other === 'object' && 'generic' in other && is(this.generic, other.generic);
|
||||
}
|
||||
|
||||
asPreservesText(): string {
|
||||
return this.generic.asPreservesText();
|
||||
}
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
import { GenericEmbedded, EmbeddedType, EmbeddedTypeDecode, EmbeddedTypeEncode } from "./embedded";
|
||||
import { Encoder, EncoderState, identityEmbeddedTypeEncode } from "./encoder";
|
||||
import { genericEmbeddedTypeDecode, ReaderStateOptions } from "./reader";
|
||||
import { Value } from "./values";
|
||||
import { DecoderState, neverEmbeddedTypeDecode } from "./decoder";
|
||||
|
||||
export const genericEmbeddedTypeEncode: EmbeddedTypeEncode<GenericEmbedded> = {
|
||||
encode(s: EncoderState, v: GenericEmbedded): void {
|
||||
new Encoder(s, this).push(v.generic);
|
||||
},
|
||||
|
||||
toValue(v: GenericEmbedded): Value<GenericEmbedded> {
|
||||
return v.generic;
|
||||
}
|
||||
};
|
||||
|
||||
export const genericEmbeddedType: EmbeddedType<GenericEmbedded> =
|
||||
Object.assign({},
|
||||
genericEmbeddedTypeDecode,
|
||||
genericEmbeddedTypeEncode);
|
||||
|
||||
export const neverEmbeddedTypeEncode: EmbeddedTypeEncode<never> = {
|
||||
encode(_s: EncoderState, _v: never): void {
|
||||
throw new Error("Embeddeds not permitted encoding Preserves document");
|
||||
},
|
||||
|
||||
toValue(_v: never): Value<GenericEmbedded> {
|
||||
throw new Error("Embeddeds not permitted encoding Preserves document");
|
||||
}
|
||||
};
|
||||
|
||||
export const neverEmbeddedType: EmbeddedType<never> =
|
||||
Object.assign({},
|
||||
neverEmbeddedTypeDecode,
|
||||
neverEmbeddedTypeEncode);
|
||||
|
||||
export const identityEmbeddedTypeDecode: EmbeddedTypeDecode<any> = {
|
||||
decode(_s: DecoderState): any {
|
||||
throw new Error("Cannot decode identityEmbeddedType");
|
||||
},
|
||||
|
||||
fromValue(_v: Value<GenericEmbedded>, _options: ReaderStateOptions): any {
|
||||
throw new Error("Cannot decode identityEmbeddedType");
|
||||
},
|
||||
};
|
||||
|
||||
export const identityEmbeddedType: EmbeddedType<any> =
|
||||
Object.assign({},
|
||||
identityEmbeddedTypeDecode,
|
||||
identityEmbeddedTypeEncode);
|
|
@ -1,299 +0,0 @@
|
|||
import { Tag } from "./constants";
|
||||
import { Bytes } from "./bytes";
|
||||
import { Value } from "./values";
|
||||
import { PreserveOn } from "./symbols";
|
||||
import { EncodeError } from "./codec";
|
||||
import { Record, Tuple } from "./record";
|
||||
import { GenericEmbedded, EmbeddedTypeEncode } from "./embedded";
|
||||
|
||||
export type Encodable<T> =
|
||||
Value<T> | Preservable<T> | Iterable<Value<T>> | ArrayBufferView;
|
||||
|
||||
export interface Preservable<T> {
|
||||
[PreserveOn](encoder: Encoder<T>): void;
|
||||
}
|
||||
|
||||
export function isPreservable<T>(v: any): v is Preservable<T> {
|
||||
return typeof v === 'object' && v !== null && typeof v[PreserveOn] === 'function';
|
||||
}
|
||||
|
||||
export interface EncoderOptions {
|
||||
canonical?: boolean;
|
||||
includeAnnotations?: boolean;
|
||||
}
|
||||
|
||||
export interface EncoderEmbeddedOptions<T> extends EncoderOptions {
|
||||
embeddedEncode?: EmbeddedTypeEncode<T>;
|
||||
}
|
||||
|
||||
export function asLatin1(bs: Uint8Array): string {
|
||||
return String.fromCharCode.apply(null, bs as any as number[]);
|
||||
}
|
||||
|
||||
function isIterable<T>(v: any): v is Iterable<T> {
|
||||
return typeof v === 'object' && v !== null && typeof v[Symbol.iterator] === 'function';
|
||||
}
|
||||
|
||||
let _nextId = 0;
|
||||
const _registry = new WeakMap<object, number>();
|
||||
export function embeddedId(v: any): number {
|
||||
let id = _registry.get(v);
|
||||
if (id === void 0) {
|
||||
id = _nextId++;
|
||||
_registry.set(v, id);
|
||||
}
|
||||
return id;
|
||||
}
|
||||
|
||||
export const identityEmbeddedTypeEncode: EmbeddedTypeEncode<any> = {
|
||||
encode(s: EncoderState, v: any): void {
|
||||
new Encoder(s, this).push(embeddedId(v));
|
||||
},
|
||||
|
||||
toValue(v: any): Value<GenericEmbedded> {
|
||||
return embeddedId(v);
|
||||
}
|
||||
};
|
||||
|
||||
export class EncoderState {
|
||||
chunks: Array<Uint8Array>;
|
||||
view: DataView;
|
||||
index: number;
|
||||
options: EncoderOptions;
|
||||
|
||||
constructor(options: EncoderOptions) {
|
||||
this.chunks = [];
|
||||
this.view = new DataView(new ArrayBuffer(256));
|
||||
this.index = 0;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
get canonical(): boolean {
|
||||
return this.options.canonical ?? true;
|
||||
}
|
||||
|
||||
get includeAnnotations(): boolean {
|
||||
return this.options.includeAnnotations ?? !this.canonical;
|
||||
}
|
||||
|
||||
contents(): Bytes {
|
||||
if (this.chunks.length === 0) {
|
||||
const resultLength = this.index;
|
||||
this.index = 0;
|
||||
return new Bytes(this.view.buffer.slice(0, resultLength));
|
||||
} else {
|
||||
this.rotatebuffer(4096);
|
||||
return Bytes.concat(this.chunks);
|
||||
}
|
||||
}
|
||||
|
||||
/* Like contents(), but hands back a string containing binary data "encoded" via latin-1 */
|
||||
contentsString(): string {
|
||||
if (this.chunks.length === 0) {
|
||||
const s = asLatin1(new Uint8Array(this.view.buffer, 0, this.index));
|
||||
this.index = 0;
|
||||
return s;
|
||||
} else {
|
||||
this.rotatebuffer(4096);
|
||||
return this.chunks.map(asLatin1).join('');
|
||||
}
|
||||
}
|
||||
|
||||
rotatebuffer(size: number) {
|
||||
this.chunks.push(new Uint8Array(this.view.buffer, 0, this.index));
|
||||
this.view = new DataView(new ArrayBuffer(size));
|
||||
this.index = 0;
|
||||
}
|
||||
|
||||
makeroom(amount: number) {
|
||||
if (this.index + amount > this.view.byteLength) {
|
||||
this.rotatebuffer(amount + 4096);
|
||||
}
|
||||
}
|
||||
|
||||
emitbyte(b: number) {
|
||||
this.makeroom(1);
|
||||
this.view.setUint8(this.index++, b);
|
||||
}
|
||||
|
||||
emitbytes(bs: Uint8Array) {
|
||||
this.makeroom(bs.length);
|
||||
(new Uint8Array(this.view.buffer)).set(bs, this.index);
|
||||
this.index += bs.length;
|
||||
}
|
||||
|
||||
varint(v: number) {
|
||||
while (v >= 128) {
|
||||
this.emitbyte((v % 128) + 128);
|
||||
v = Math.floor(v / 128);
|
||||
}
|
||||
this.emitbyte(v);
|
||||
}
|
||||
|
||||
encodeint(v: number) {
|
||||
// TODO: Bignums :-/
|
||||
const plain_bitcount = Math.floor(Math.log2(v > 0 ? v : -(1 + v))) + 1;
|
||||
const signed_bitcount = plain_bitcount + 1;
|
||||
const bytecount = (signed_bitcount + 7) >> 3;
|
||||
if (bytecount <= 16) {
|
||||
this.emitbyte(Tag.MediumInteger_lo + bytecount - 1);
|
||||
} else {
|
||||
this.emitbyte(Tag.SignedInteger);
|
||||
this.varint(bytecount);
|
||||
}
|
||||
const enc = (n: number, x: number) => {
|
||||
if (n > 0) {
|
||||
enc(n - 1, Math.floor(x / 256));
|
||||
this.emitbyte(x & 255);
|
||||
}
|
||||
};
|
||||
enc(bytecount, v);
|
||||
}
|
||||
|
||||
encodebytes(tag: Tag, bs: Uint8Array) {
|
||||
this.emitbyte(tag);
|
||||
this.varint(bs.length);
|
||||
this.emitbytes(bs);
|
||||
}
|
||||
}
|
||||
|
||||
export class Encoder<T = object> {
|
||||
state: EncoderState;
|
||||
embeddedEncode: EmbeddedTypeEncode<T>;
|
||||
|
||||
constructor(options: EncoderEmbeddedOptions<T>);
|
||||
constructor(state: EncoderState, embeddedEncode?: EmbeddedTypeEncode<T>);
|
||||
constructor(
|
||||
state_or_options: (EncoderState | EncoderEmbeddedOptions<T>) = {},
|
||||
embeddedEncode?: EmbeddedTypeEncode<T>)
|
||||
{
|
||||
if (state_or_options instanceof EncoderState) {
|
||||
this.state = state_or_options;
|
||||
this.embeddedEncode = embeddedEncode ?? identityEmbeddedTypeEncode;
|
||||
} else {
|
||||
this.state = new EncoderState(state_or_options);
|
||||
this.embeddedEncode = state_or_options.embeddedEncode ?? identityEmbeddedTypeEncode;
|
||||
}
|
||||
}
|
||||
|
||||
withEmbeddedEncode<S>(
|
||||
embeddedEncode: EmbeddedTypeEncode<S>,
|
||||
body: (e: Encoder<S>) => void): this
|
||||
{
|
||||
body(new Encoder(this.state, embeddedEncode));
|
||||
return this;
|
||||
}
|
||||
|
||||
get canonical(): boolean {
|
||||
return this.state.canonical;
|
||||
}
|
||||
|
||||
get includeAnnotations(): boolean {
|
||||
return this.state.includeAnnotations;
|
||||
}
|
||||
|
||||
contents(): Bytes {
|
||||
return this.state.contents();
|
||||
}
|
||||
|
||||
contentsString(): string {
|
||||
return this.state.contentsString();
|
||||
}
|
||||
|
||||
encodevalues(tag: Tag, items: Iterable<Value<T>>) {
|
||||
this.state.emitbyte(tag);
|
||||
for (let i of items) { this.push(i); }
|
||||
this.state.emitbyte(Tag.End);
|
||||
}
|
||||
|
||||
push(v: Encodable<T>) {
|
||||
if (isPreservable<never>(v)) {
|
||||
v[PreserveOn](this as unknown as Encoder<never>);
|
||||
}
|
||||
else if (isPreservable<T>(v)) {
|
||||
v[PreserveOn](this);
|
||||
}
|
||||
else if (typeof v === 'boolean') {
|
||||
this.state.emitbyte(v ? Tag.True : Tag.False);
|
||||
}
|
||||
else if (typeof v === 'number') {
|
||||
if (v >= -3 && v <= 12) {
|
||||
this.state.emitbyte(Tag.SmallInteger_lo + ((v + 16) & 0xf));
|
||||
} else {
|
||||
this.state.encodeint(v);
|
||||
}
|
||||
}
|
||||
else if (typeof v === 'string') {
|
||||
this.state.encodebytes(Tag.String, new Bytes(v)._view);
|
||||
}
|
||||
else if (typeof v === 'symbol') {
|
||||
const key = Symbol.keyFor(v);
|
||||
if (key === void 0) throw new EncodeError("Cannot preserve non-global Symbol", v);
|
||||
this.state.encodebytes(Tag.Symbol, new Bytes(key)._view);
|
||||
}
|
||||
else if (ArrayBuffer.isView(v)) {
|
||||
if (v instanceof Uint8Array) {
|
||||
this.state.encodebytes(Tag.ByteString, v);
|
||||
} else {
|
||||
const bs = new Uint8Array(v.buffer, v.byteOffset, v.byteLength);
|
||||
this.state.encodebytes(Tag.ByteString, bs);
|
||||
}
|
||||
}
|
||||
else if (Record.isRecord<Value<T>, Tuple<Value<T>>, T>(v)) {
|
||||
this.state.emitbyte(Tag.Record);
|
||||
this.push(v.label);
|
||||
for (let i of v) { this.push(i); }
|
||||
this.state.emitbyte(Tag.End);
|
||||
}
|
||||
else if (Array.isArray(v)) {
|
||||
this.encodevalues(Tag.Sequence, v);
|
||||
}
|
||||
else if (isIterable<Value<T>>(v)) {
|
||||
this.encodevalues(Tag.Sequence, v as Iterable<Value<T>>);
|
||||
}
|
||||
else {
|
||||
this.state.emitbyte(Tag.Embedded);
|
||||
this.embeddedEncode.encode(this.state, v.embeddedValue);
|
||||
}
|
||||
return this; // for chaining
|
||||
}
|
||||
}
|
||||
|
||||
export function encode<T>(
|
||||
v: Encodable<T>,
|
||||
options: EncoderEmbeddedOptions<T> = {}): Bytes
|
||||
{
|
||||
return new Encoder(options).push(v).contents();
|
||||
}
|
||||
|
||||
const _canonicalEncoder = new Encoder({ canonical: true });
|
||||
let _usingCanonicalEncoder = false;
|
||||
|
||||
export function canonicalEncode(v: Encodable<never>, options?: EncoderEmbeddedOptions<never>): Bytes;
|
||||
export function canonicalEncode(v: Encodable<any>, options?: EncoderEmbeddedOptions<any>): Bytes;
|
||||
export function canonicalEncode(v: any, options?: EncoderEmbeddedOptions<any>): Bytes {
|
||||
if (options === void 0 && !_usingCanonicalEncoder) {
|
||||
_usingCanonicalEncoder = true;
|
||||
const bs = _canonicalEncoder.push(v).contents();
|
||||
_usingCanonicalEncoder = false;
|
||||
return bs;
|
||||
} else {
|
||||
return encode(v, { ... options, canonical: true });
|
||||
}
|
||||
}
|
||||
|
||||
export function canonicalString(v: Encodable<any>): string {
|
||||
if (!_usingCanonicalEncoder) {
|
||||
_usingCanonicalEncoder = true;
|
||||
const s = _canonicalEncoder.push(v).contentsString();
|
||||
_usingCanonicalEncoder = false;
|
||||
return s;
|
||||
} else {
|
||||
return new Encoder({ canonical: true }).push(v).contentsString();
|
||||
}
|
||||
}
|
||||
|
||||
export function encodeWithAnnotations<T>(v: Encodable<T>,
|
||||
options: EncoderEmbeddedOptions<T> = {}): Bytes {
|
||||
return encode(v, { ... options, includeAnnotations: true });
|
||||
}
|
|
@ -1,73 +0,0 @@
|
|||
import { embed, GenericEmbedded } from "./embedded";
|
||||
import { Bytes } from "./bytes";
|
||||
import { Record, Tuple } from "./record";
|
||||
import { AsPreserve } from "./symbols";
|
||||
import { Value } from "./values";
|
||||
import { Dictionary, Set } from "./dictionary";
|
||||
|
||||
export function fromJS<T = GenericEmbedded>(x: any): Value<T> {
|
||||
switch (typeof x) {
|
||||
case 'number':
|
||||
if (!Number.isInteger(x)) {
|
||||
// We require that clients be explicit about integer vs. non-integer types.
|
||||
throw new TypeError("Refusing to autoconvert non-integer number to Single or Double");
|
||||
}
|
||||
// FALL THROUGH
|
||||
case 'string':
|
||||
case 'symbol':
|
||||
case 'boolean':
|
||||
return x;
|
||||
|
||||
case 'undefined':
|
||||
case 'function':
|
||||
case 'bigint':
|
||||
break;
|
||||
|
||||
case 'object':
|
||||
if (x === null) {
|
||||
break;
|
||||
}
|
||||
if (typeof x[AsPreserve] === 'function') {
|
||||
return x[AsPreserve]();
|
||||
}
|
||||
if (Record.isRecord<Value<T>, Tuple<Value<T>>, T>(x)) {
|
||||
return x;
|
||||
}
|
||||
if (Array.isArray(x)) {
|
||||
return x.map<Value<T>>(fromJS);
|
||||
}
|
||||
if (ArrayBuffer.isView(x) || x instanceof ArrayBuffer) {
|
||||
return Bytes.from(x);
|
||||
}
|
||||
if (Map.isMap(x)) {
|
||||
const d = new Dictionary<T>();
|
||||
x.forEach((v, k) => d.set(fromJS(k), fromJS(v)));
|
||||
return d;
|
||||
}
|
||||
if (Set.isSet(x)) {
|
||||
const s = new Set<T>();
|
||||
x.forEach(v => s.add(fromJS(v)));
|
||||
return s;
|
||||
}
|
||||
// Just... assume it's a T.
|
||||
return embed(x as T);
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
throw new TypeError("Cannot represent JavaScript value as Preserves: " + x);
|
||||
}
|
||||
|
||||
declare module "./dictionary" {
|
||||
namespace Dictionary {
|
||||
export function fromJS<T = GenericEmbedded, V = GenericEmbedded>(x: object): Dictionary<T, Value<V>>;
|
||||
}
|
||||
}
|
||||
|
||||
Dictionary.fromJS = function <T = GenericEmbedded, V = GenericEmbedded>(x: object): Dictionary<T, Value<V>> {
|
||||
if (Dictionary.isDictionary<T, Value<V>>(x)) return x;
|
||||
const d = new Dictionary<T, Value<V>>();
|
||||
Object.entries(x).forEach(([key, value]) => d.set(key, fromJS(value)));
|
||||
return d;
|
||||
};
|
|
@ -1,6 +0,0 @@
|
|||
export * from './runtime';
|
||||
export * as Constants from './constants';
|
||||
|
||||
const _Array = Array;
|
||||
type _Array<T> = Array<T>;
|
||||
export { _Array as Array };
|
|
@ -1,29 +0,0 @@
|
|||
import type { GenericEmbedded } from "./embedded";
|
||||
import type { Annotated } from "./annotated";
|
||||
|
||||
export const IsPreservesAnnotated = Symbol.for('IsPreservesAnnotated');
|
||||
|
||||
export function isAnnotated<T = GenericEmbedded>(x: any): x is Annotated<T>
|
||||
{
|
||||
return !!x?.[IsPreservesAnnotated];
|
||||
}
|
||||
|
||||
export function is(a: any, b: any): boolean {
|
||||
if (isAnnotated(a)) a = a.item;
|
||||
if (isAnnotated(b)) b = b.item;
|
||||
if (Object.is(a, b)) return true;
|
||||
if (typeof a !== typeof b) return false;
|
||||
if (typeof a === 'object') {
|
||||
if (a === null || b === null) return false;
|
||||
if ('equals' in a && typeof a.equals === 'function') return a.equals(b, is);
|
||||
if (Array.isArray(a) && Array.isArray(b)) {
|
||||
const isRecord = 'label' in a;
|
||||
if (isRecord !== 'label' in b) return false;
|
||||
if (isRecord && !is((a as any).label, (b as any).label)) return false;
|
||||
if (a.length !== b.length) return false;
|
||||
for (let i = 0; i < a.length; i++) if (!is(a[i], b[i])) return false;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
|
@ -1,75 +0,0 @@
|
|||
import { Record, Tuple } from "./record";
|
||||
import { Bytes } from "./bytes";
|
||||
import { fold } from "./fold";
|
||||
import { is } from "./is";
|
||||
import { Value } from "./values";
|
||||
import { Set, Dictionary } from "./dictionary";
|
||||
import { Annotated } from "./annotated";
|
||||
import { unannotate } from "./strip";
|
||||
import { embed, isEmbedded, Embedded } from "./embedded";
|
||||
|
||||
export function merge<T>(
|
||||
mergeEmbeddeds: (a: T, b: T) => T | undefined,
|
||||
item0: Value<T>,
|
||||
... items: Array<Value<T>>): Value<T>
|
||||
{
|
||||
function die(): never {
|
||||
throw new Error("Cannot merge items");
|
||||
}
|
||||
|
||||
function walk(a: Value<T>, b: Value<T>): Value<T> {
|
||||
if (a === b) return a;
|
||||
return fold<T, Value<T>>(a, {
|
||||
boolean: die,
|
||||
single(_f: number) { return is(a, b) ? a : die(); },
|
||||
double(_f: number) { return is(a, b) ? a : die(); },
|
||||
integer: die,
|
||||
string: die,
|
||||
bytes(_b: Bytes) { return is(a, b) ? a : die(); },
|
||||
symbol: die,
|
||||
|
||||
record(r: Record<Value<T>, Tuple<Value<T>>, T>) {
|
||||
if (!Record.isRecord<Value<T>, Tuple<Value<T>>, T>(b)) die();
|
||||
return Record(walk(r.label, b.label), walkMany(r, b));
|
||||
},
|
||||
array(a: Array<Value<T>>) {
|
||||
if (!Array.isArray(b) || Record.isRecord(b)) die();
|
||||
return walkMany(a, b);
|
||||
},
|
||||
set(_s: Set<T>) { die(); },
|
||||
dictionary(d: Dictionary<T>) {
|
||||
if (!Dictionary.isDictionary<T>(b)) die();
|
||||
const r = new Dictionary<T>();
|
||||
d.forEach((av,ak) => {
|
||||
const bv = b.get(ak);
|
||||
r.set(ak, bv === void 0 ? av : walk(av, bv));
|
||||
});
|
||||
b.forEach((bv, bk) => {
|
||||
if (!d.has(bk)) r.set(bk, bv);
|
||||
});
|
||||
return r;
|
||||
},
|
||||
|
||||
annotated(a: Annotated<T>) {
|
||||
return walk(a, unannotate(b));
|
||||
},
|
||||
|
||||
embedded(t: Embedded<T>) {
|
||||
if (!isEmbedded<T>(b)) die();
|
||||
const r = mergeEmbeddeds(t.embeddedValue, b.embeddedValue);
|
||||
if (r === void 0) die();
|
||||
return embed(r);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
function walkMany(a: Array<Value<T>>, b: Array<Value<T>>): Array<Value<T>> {
|
||||
if (a.length <= b.length) {
|
||||
return b.map((bb, i) => (i < a.length) ? walk(a[i], bb) : bb);
|
||||
} else {
|
||||
return a.map((aa, i) => (i < b.length) ? walk(aa, b[i]) : aa);
|
||||
}
|
||||
}
|
||||
|
||||
return items.reduce(walk, item0);
|
||||
}
|
|
@ -1,480 +0,0 @@
|
|||
// Text syntax reader.
|
||||
|
||||
import type { Value } from './values';
|
||||
import { DecodeError, ShortPacket } from './codec';
|
||||
import { Dictionary, Set } from './dictionary';
|
||||
import { strip, unannotate } from './strip';
|
||||
import { Bytes, unhexDigit } from './bytes';
|
||||
import { decode, Decoder, DecoderState, neverEmbeddedTypeDecode } from './decoder';
|
||||
import { Record } from './record';
|
||||
import { Annotated, newPosition, Position, updatePosition } from './annotated';
|
||||
import { Double, DoubleFloat, Single, SingleFloat } from './float';
|
||||
import { stringify } from './text';
|
||||
import { embed, GenericEmbedded, EmbeddedTypeDecode } from './embedded';
|
||||
|
||||
export interface ReaderStateOptions {
|
||||
includeAnnotations?: boolean;
|
||||
name?: string | Position;
|
||||
}
|
||||
|
||||
export interface ReaderOptions<T> extends ReaderStateOptions {
|
||||
embeddedDecode?: EmbeddedTypeDecode<T>;
|
||||
}
|
||||
|
||||
type IntOrFloat = 'int' | 'float';
|
||||
type Numeric = number | SingleFloat | DoubleFloat;
|
||||
type IntContinuation = (kind: IntOrFloat, acc: string) => Numeric;
|
||||
|
||||
export class ReaderState {
|
||||
buffer: string;
|
||||
pos: Position;
|
||||
index: number;
|
||||
discarded = 0;
|
||||
options: ReaderStateOptions;
|
||||
|
||||
constructor(buffer: string, options: ReaderStateOptions) {
|
||||
this.buffer = buffer;
|
||||
switch (typeof options.name) {
|
||||
case 'undefined': this.pos = newPosition(); break;
|
||||
case 'string': this.pos = newPosition(options.name); break;
|
||||
case 'object': this.pos = { ... options.name }; break;
|
||||
}
|
||||
this.index = 0;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
error(message: string, pos: Position): never {
|
||||
throw new DecodeError(message, { ... pos });
|
||||
}
|
||||
|
||||
get includeAnnotations(): boolean {
|
||||
return this.options.includeAnnotations ?? false;
|
||||
}
|
||||
|
||||
copyPos(): Position {
|
||||
return { ... this.pos };
|
||||
}
|
||||
|
||||
write(data: string) {
|
||||
if (this.atEnd()) {
|
||||
this.buffer = data;
|
||||
} else {
|
||||
this.buffer = this.buffer.substr(this.index) + data;
|
||||
}
|
||||
this.discarded += this.index;
|
||||
this.index = 0;
|
||||
}
|
||||
|
||||
atEnd(): boolean {
|
||||
return (this.index >= this.buffer.length);
|
||||
}
|
||||
|
||||
peek(): string {
|
||||
if (this.atEnd()) throw new ShortPacket("Short term", this.pos);
|
||||
return this.buffer[this.index];
|
||||
}
|
||||
|
||||
advance(): number {
|
||||
const n = this.index++;
|
||||
updatePosition(this.pos, this.buffer[n]);
|
||||
return n;
|
||||
}
|
||||
|
||||
nextchar(): string {
|
||||
if (this.atEnd()) throw new ShortPacket("Short term", this.pos);
|
||||
return this.buffer[this.advance()];
|
||||
}
|
||||
|
||||
nextcharcode(): number {
|
||||
if (this.atEnd()) throw new ShortPacket("Short term", this.pos);
|
||||
return this.buffer.charCodeAt(this.advance());
|
||||
}
|
||||
|
||||
skipws() {
|
||||
while (true) {
|
||||
if (this.atEnd()) break;
|
||||
if (!isSpace(this.peek())) break;
|
||||
this.advance();
|
||||
}
|
||||
}
|
||||
|
||||
readHex2(): number {
|
||||
const x1 = unhexDigit(this.nextcharcode());
|
||||
const x2 = unhexDigit(this.nextcharcode());
|
||||
return (x1 << 4) | x2;
|
||||
}
|
||||
|
||||
readHex4(): number {
|
||||
const x1 = unhexDigit(this.nextcharcode());
|
||||
const x2 = unhexDigit(this.nextcharcode());
|
||||
const x3 = unhexDigit(this.nextcharcode());
|
||||
const x4 = unhexDigit(this.nextcharcode());
|
||||
return (x1 << 12) | (x2 << 8) | (x3 << 4) | x4;
|
||||
}
|
||||
|
||||
readHexBinary(): Bytes {
|
||||
const acc: number[] = [];
|
||||
while (true) {
|
||||
this.skipws();
|
||||
if (this.peek() === '"') {
|
||||
this.advance();
|
||||
return Bytes.from(acc);
|
||||
}
|
||||
acc.push(this.readHex2());
|
||||
}
|
||||
}
|
||||
|
||||
readBase64Binary(): Bytes {
|
||||
let acc = '';
|
||||
while (true) {
|
||||
this.skipws();
|
||||
const c = this.nextchar();
|
||||
if (c === ']') break;
|
||||
acc = acc + c;
|
||||
}
|
||||
return decodeBase64(acc);
|
||||
}
|
||||
|
||||
readIntpart(acc: string, ch: string): Numeric {
|
||||
if (ch === '0') return this.readFracexp('int', acc + ch);
|
||||
return this.readDigit1('int', acc, (kind, acc) => this.readFracexp(kind, acc), ch);
|
||||
}
|
||||
|
||||
readDigit1(kind: IntOrFloat, acc: string, k: IntContinuation, ch?: string): Numeric {
|
||||
if (ch === void 0) ch = this.nextchar();
|
||||
if (ch >= '0' && ch <= '9') return this.readDigit0(kind, acc + ch, k);
|
||||
this.error('Incomplete number', this.pos);
|
||||
}
|
||||
|
||||
readDigit0(kind: IntOrFloat, acc: string, k: IntContinuation): Numeric {
|
||||
while (true) {
|
||||
const ch = this.peek();
|
||||
if (!(ch >= '0' && ch <= '9')) break;
|
||||
this.advance();
|
||||
acc = acc + ch;
|
||||
}
|
||||
return k(kind, acc);
|
||||
}
|
||||
|
||||
readFracexp(kind: IntOrFloat, acc: string): Numeric {
|
||||
if (this.peek() === '.') {
|
||||
this.advance();
|
||||
return this.readDigit1('float', acc + '.', (kind, acc) => this.readExp(kind, acc));
|
||||
}
|
||||
return this.readExp(kind, acc);
|
||||
}
|
||||
|
||||
readExp(kind: IntOrFloat, acc: string): Numeric {
|
||||
const ch = this.peek();
|
||||
if (ch === 'e' || ch === 'E') {
|
||||
this.advance();
|
||||
return this.readSignAndExp(acc + ch);
|
||||
}
|
||||
return this.finishNumber(kind, acc);
|
||||
}
|
||||
|
||||
readSignAndExp(acc: string): Numeric {
|
||||
const ch = this.peek();
|
||||
if (ch === '+' || ch === '-') {
|
||||
this.advance();
|
||||
return this.readDigit1('float', acc + ch, (kind, acc) => this.finishNumber(kind, acc));
|
||||
}
|
||||
return this.readDigit1('float', acc, (kind, acc) => this.finishNumber(kind, acc));
|
||||
}
|
||||
|
||||
finishNumber(kind: IntOrFloat, acc: string): Numeric {
|
||||
const i = parseFloat(acc);
|
||||
if (kind === 'int') return i;
|
||||
const ch = this.peek();
|
||||
if (ch === 'f' || ch === 'F') {
|
||||
this.advance();
|
||||
return Single(i);
|
||||
} else {
|
||||
return Double(i);
|
||||
}
|
||||
}
|
||||
|
||||
readRawSymbol<T>(acc: string): Value<T> {
|
||||
while (true) {
|
||||
if (this.atEnd()) break;
|
||||
const ch = this.peek();
|
||||
if (('(){}[]<>";,@#:|'.indexOf(ch) !== -1) || isSpace(ch)) break;
|
||||
this.advance();
|
||||
acc = acc + ch;
|
||||
}
|
||||
return Symbol.for(acc);
|
||||
}
|
||||
|
||||
readStringlike<E, R>(xform: (ch: string) => E,
|
||||
finish: (acc: E[]) => R,
|
||||
terminator: string,
|
||||
hexescape: string,
|
||||
hex: () => E): R
|
||||
{
|
||||
let acc: E[] = [];
|
||||
while (true) {
|
||||
const ch = this.nextchar();
|
||||
switch (ch) {
|
||||
case terminator:
|
||||
return finish(acc);
|
||||
case '\\': {
|
||||
const ch = this.nextchar();
|
||||
switch (ch) {
|
||||
case hexescape: acc.push(hex()); break;
|
||||
|
||||
case terminator:
|
||||
case '\\':
|
||||
case '/':
|
||||
acc.push(xform(ch)); break;
|
||||
|
||||
case 'b': acc.push(xform('\x08')); break;
|
||||
case 'f': acc.push(xform('\x0c')); break;
|
||||
case 'n': acc.push(xform('\x0a')); break;
|
||||
case 'r': acc.push(xform('\x0d')); break;
|
||||
case 't': acc.push(xform('\x09')); break;
|
||||
|
||||
default:
|
||||
this.error(`Invalid escape code \\${ch}`, this.pos);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
acc.push(xform(ch));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
readString(terminator: string): string {
|
||||
return this.readStringlike(x => x, xs => xs.join(''), terminator, 'u', () => {
|
||||
const n1 = this.readHex4();
|
||||
if ((n1 >= 0xd800) && (n1 <= 0xdfff)) {
|
||||
if ((this.nextchar() === '\\') && (this.nextchar() === 'u')) {
|
||||
const n2 = this.readHex4();
|
||||
if ((n2 >= 0xdc00) && (n2 <= 0xdfff) && (n1 <= 0xdbff)) {
|
||||
return String.fromCharCode(n1, n2);
|
||||
}
|
||||
}
|
||||
this.error('Invalid surrogate pair', this.pos);
|
||||
}
|
||||
return String.fromCharCode(n1);
|
||||
});
|
||||
}
|
||||
|
||||
readLiteralBinary(): Bytes {
|
||||
return this.readStringlike(
|
||||
x => {
|
||||
const v = x.charCodeAt(0);
|
||||
if (v >= 256) this.error(`Invalid code point ${v} in literal binary`, this.pos);
|
||||
return v;
|
||||
},
|
||||
Bytes.from,
|
||||
'"',
|
||||
'x',
|
||||
() => this.readHex2());
|
||||
}
|
||||
}
|
||||
|
||||
export const genericEmbeddedTypeDecode: EmbeddedTypeDecode<GenericEmbedded> = {
|
||||
decode(s: DecoderState): GenericEmbedded {
|
||||
return new GenericEmbedded(new Decoder(s, this).next());
|
||||
},
|
||||
|
||||
fromValue(v: Value<GenericEmbedded>, options: ReaderStateOptions): GenericEmbedded {
|
||||
return new GenericEmbedded(options.includeAnnotations ? v : strip(v));
|
||||
},
|
||||
};
|
||||
|
||||
export class Reader<T> {
|
||||
state: ReaderState;
|
||||
embeddedType: EmbeddedTypeDecode<T>;
|
||||
|
||||
constructor(state: ReaderState, embeddedType: EmbeddedTypeDecode<T>);
|
||||
constructor(buffer: string, options?: ReaderOptions<T>);
|
||||
constructor(
|
||||
state_or_buffer: (ReaderState | string) = '',
|
||||
embeddedType_or_options?: (EmbeddedTypeDecode<T> | ReaderOptions<T>))
|
||||
{
|
||||
if (state_or_buffer instanceof ReaderState) {
|
||||
this.state = state_or_buffer;
|
||||
this.embeddedType = embeddedType_or_options as EmbeddedTypeDecode<T>;
|
||||
} else {
|
||||
const options = (embeddedType_or_options as ReaderOptions<T>) ?? {};
|
||||
this.state = new ReaderState(state_or_buffer, options);
|
||||
this.embeddedType = options.embeddedDecode ?? neverEmbeddedTypeDecode;
|
||||
}
|
||||
}
|
||||
|
||||
write(data: string) {
|
||||
this.state.write(data);
|
||||
}
|
||||
|
||||
readCommentLine(): Value<T> {
|
||||
const startPos = this.state.copyPos();
|
||||
let acc = '';
|
||||
while (true) {
|
||||
const c = this.state.nextchar();
|
||||
if (c === '\n' || c === '\r') {
|
||||
return this.wrap(acc, startPos);
|
||||
}
|
||||
acc = acc + c;
|
||||
}
|
||||
}
|
||||
|
||||
wrap(v: Value<T>, pos: Position): Value<T> {
|
||||
if (this.state.includeAnnotations && !Annotated.isAnnotated(v)) {
|
||||
v = new Annotated(v, pos);
|
||||
}
|
||||
return v;
|
||||
}
|
||||
|
||||
annotateNextWith(v: Value<T>): Value<T> {
|
||||
this.state.skipws();
|
||||
if (this.state.atEnd()) {
|
||||
throw new DecodeError("Trailing annotations and comments are not permitted",
|
||||
this.state.pos);
|
||||
}
|
||||
const u = this.next();
|
||||
if (this.state.includeAnnotations) (u as Annotated<T>).annotations.unshift(v);
|
||||
return u;
|
||||
}
|
||||
|
||||
readToEnd(): Array<Value<T>> {
|
||||
const acc = [];
|
||||
while (true) {
|
||||
this.state.skipws();
|
||||
if (this.state.atEnd()) return acc;
|
||||
acc.push(this.next());
|
||||
}
|
||||
}
|
||||
|
||||
next(): Value<T> {
|
||||
this.state.skipws();
|
||||
const startPos = this.state.copyPos();
|
||||
const unwrapped = ((): Value<T> => {
|
||||
const c = this.state.nextchar();
|
||||
switch (c) {
|
||||
case '-':
|
||||
return this.state.readIntpart('-', this.state.nextchar());
|
||||
case '0': case '1': case '2': case '3': case '4':
|
||||
case '5': case '6': case '7': case '8': case '9':
|
||||
return this.state.readIntpart('', c);
|
||||
case '"':
|
||||
return this.state.readString('"');
|
||||
case '|':
|
||||
return Symbol.for(this.state.readString('|'));
|
||||
case ';':
|
||||
return this.annotateNextWith(this.readCommentLine());
|
||||
case '@':
|
||||
return this.annotateNextWith(this.next());
|
||||
case ':':
|
||||
this.state.error('Unexpected key/value separator between items', startPos);
|
||||
case '#': {
|
||||
const c = this.state.nextchar();
|
||||
switch (c) {
|
||||
case 'f': return false;
|
||||
case 't': return true;
|
||||
case '{': return this.seq(new Set<T>(), (v, s) => s.add(v), '}');
|
||||
case '"': return this.state.readLiteralBinary();
|
||||
case 'x':
|
||||
if (this.state.nextchar() !== '"') {
|
||||
this.state.error('Expected open-quote at start of hex ByteString',
|
||||
startPos);
|
||||
}
|
||||
return this.state.readHexBinary();
|
||||
case '[': return this.state.readBase64Binary();
|
||||
case '=': {
|
||||
const bs = unannotate(this.next());
|
||||
if (!Bytes.isBytes(bs)) this.state.error('ByteString must follow #=',
|
||||
startPos);
|
||||
return decode<T>(bs, {
|
||||
embeddedDecode: this.embeddedType,
|
||||
includeAnnotations: this.state.options.includeAnnotations,
|
||||
});
|
||||
}
|
||||
case '!': return embed(this.embeddedType.fromValue(
|
||||
new Reader<GenericEmbedded>(this.state, genericEmbeddedTypeDecode).next(),
|
||||
this.state.options));
|
||||
default:
|
||||
this.state.error(`Invalid # syntax: ${c}`, startPos);
|
||||
}
|
||||
}
|
||||
case '<': {
|
||||
const label = this.next();
|
||||
const fields = this.readSequence('>');
|
||||
return Record(label, fields);
|
||||
}
|
||||
case '[': return this.readSequence(']');
|
||||
case '{': return this.readDictionary();
|
||||
case '>': this.state.error('Unexpected >', startPos);
|
||||
case ']': this.state.error('Unexpected ]', startPos);
|
||||
case '}': this.state.error('Unexpected }', startPos);
|
||||
default:
|
||||
return this.state.readRawSymbol(c);
|
||||
}
|
||||
})();
|
||||
return this.wrap(unwrapped, startPos);
|
||||
}
|
||||
|
||||
seq<S>(acc: S, update: (v: Value<T>, acc: S) => void, ch: string): S {
|
||||
while (true) {
|
||||
this.state.skipws();
|
||||
if (this.state.peek() === ch) {
|
||||
this.state.advance();
|
||||
return acc;
|
||||
}
|
||||
update(this.next(), acc);
|
||||
}
|
||||
}
|
||||
|
||||
readSequence(ch: string): Array<Value<T>> {
|
||||
return this.seq([] as Array<Value<T>>, (v, acc) => acc.push(v), ch);
|
||||
}
|
||||
|
||||
readDictionary(): Dictionary<T> {
|
||||
return this.seq(new Dictionary<T>(),
|
||||
(k, acc) => {
|
||||
this.state.skipws();
|
||||
switch (this.state.peek()) {
|
||||
case ':':
|
||||
if (acc.has(k)) this.state.error(
|
||||
`Duplicate key: ${stringify(k)}`, this.state.pos);
|
||||
this.state.advance();
|
||||
acc.set(k, this.next());
|
||||
break;
|
||||
default:
|
||||
this.state.error('Missing key/value separator', this.state.pos);
|
||||
}
|
||||
},
|
||||
'}');
|
||||
}
|
||||
}
|
||||
|
||||
const BASE64: {[key: string]: number} = {};
|
||||
[... 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'].forEach(
|
||||
(c, i) => BASE64[c] = i);
|
||||
BASE64['+'] = BASE64['-'] = 62;
|
||||
BASE64['/'] = BASE64['_'] = 63;
|
||||
|
||||
export function decodeBase64(s: string): Bytes {
|
||||
const bs = new Uint8Array(Math.floor(s.length * 3/4));
|
||||
let i = 0;
|
||||
let j = 0;
|
||||
while (i < s.length) {
|
||||
const v1 = BASE64[s[i++]];
|
||||
const v2 = BASE64[s[i++]];
|
||||
const v3 = BASE64[s[i++]];
|
||||
const v4 = BASE64[s[i++]];
|
||||
const v = (v1 << 18) | (v2 << 12) | (v3 << 6) | v4;
|
||||
bs[j++] = (v >> 16) & 255;
|
||||
if (v3 === void 0) break;
|
||||
bs[j++] = (v >> 8) & 255;
|
||||
if (v4 === void 0) break;
|
||||
bs[j++] = v & 255;
|
||||
}
|
||||
return Bytes.from(bs.subarray(0, j));
|
||||
}
|
||||
|
||||
function isSpace(s: string): boolean {
|
||||
return ' \t\n\r,'.indexOf(s) !== -1;
|
||||
}
|
|
@ -1,101 +0,0 @@
|
|||
import { GenericEmbedded } from "./embedded";
|
||||
import { is } from "./is";
|
||||
import { Value } from "./values";
|
||||
|
||||
export type Tuple<T> = Array<T> | [T];
|
||||
|
||||
export type Record<LabelType extends Value<T>, FieldsType extends Tuple<Value<T>>, T = GenericEmbedded>
|
||||
= FieldsType & { label: LabelType };
|
||||
|
||||
export type RecordGetters<Fs, R> = {
|
||||
[K in string & keyof Fs]: (r: R) => Fs[K];
|
||||
};
|
||||
|
||||
export type CtorTypes<Fs, Names extends Tuple<keyof Fs>> =
|
||||
{ [K in keyof Names]: Fs[keyof Fs & Names[K]] } & any[];
|
||||
|
||||
export interface RecordConstructor<L extends Value<T>, Fs, Names extends Tuple<keyof Fs>, T = GenericEmbedded> {
|
||||
(...fields: CtorTypes<Fs, Names>): Record<L, CtorTypes<Fs, Names>, T>;
|
||||
constructorInfo: RecordConstructorInfo<L, T>;
|
||||
isClassOf(v: any): v is Record<L, CtorTypes<Fs, Names>, T>;
|
||||
_: RecordGetters<Fs, Record<L, CtorTypes<Fs, Names>, T>>;
|
||||
};
|
||||
|
||||
export interface RecordConstructorInfo<L extends Value<T>, T = GenericEmbedded> {
|
||||
label: L;
|
||||
arity: number;
|
||||
}
|
||||
|
||||
export type InferredRecordType<L, FieldsType extends Tuple<any>> =
|
||||
L extends symbol ? (FieldsType extends Tuple<Value<infer T>>
|
||||
? (Exclude<T, never> extends symbol ? Record<L, FieldsType, never> : Record<L, FieldsType, T>)
|
||||
: (FieldsType extends Tuple<Value<never>>
|
||||
? Record<L, FieldsType, never>
|
||||
: "TYPE_ERROR_cannotInferFieldsType" & [never])) :
|
||||
L extends Value<infer T> ? (FieldsType extends Tuple<Value<T>>
|
||||
? Record<L, FieldsType, T>
|
||||
: "TYPE_ERROR_cannotMatchFieldsTypeToLabelType" & [never]) :
|
||||
"TYPE_ERROR_cannotInferEmbeddedType" & [never];
|
||||
|
||||
export function Record<L, FieldsType extends Tuple<any>>(
|
||||
label: L,
|
||||
fields: FieldsType): InferredRecordType<L, FieldsType>
|
||||
{
|
||||
(fields as any).label = label;
|
||||
return fields as any;
|
||||
}
|
||||
|
||||
export namespace Record {
|
||||
export function isRecord<L extends Value<T>, FieldsType extends Tuple<Value<T>>, T = GenericEmbedded>(x: any): x is Record<L, FieldsType, T> {
|
||||
return Array.isArray(x) && 'label' in x;
|
||||
}
|
||||
|
||||
export function fallbackToString (_f: Value<any>): string {
|
||||
return '<unprintable_preserves_field_value>';
|
||||
}
|
||||
|
||||
export function constructorInfo<L extends Value<T>, FieldsType extends Tuple<Value<T>>, T = GenericEmbedded>(
|
||||
r: Record<L, FieldsType, T>): RecordConstructorInfo<L, T>
|
||||
{
|
||||
return { label: r.label, arity: r.length };
|
||||
}
|
||||
|
||||
export function isClassOf<L extends Value<T>, FieldsType extends Tuple<Value<T>>, T = GenericEmbedded>(
|
||||
ci: RecordConstructorInfo<L, T>, v: any): v is Record<L, FieldsType, T>
|
||||
{
|
||||
return (Record.isRecord(v)) && is(ci.label, v.label) && (ci.arity === v.length);
|
||||
}
|
||||
|
||||
export function makeConstructor<Fs, T = GenericEmbedded>()
|
||||
: (<L extends Value<T>, Names extends Tuple<keyof Fs>>(label: L, fieldNames: Names) =>
|
||||
RecordConstructor<L, Fs, Names, T>)
|
||||
{
|
||||
return <L extends Value<T>, Names extends Tuple<keyof Fs>>(label: L, fieldNames: Names) => {
|
||||
const ctor: RecordConstructor<L, Fs, Names, T> =
|
||||
((...fields: CtorTypes<Fs, Names>) =>
|
||||
Record(label, fields)) as unknown as RecordConstructor<L, Fs, Names, T>;
|
||||
const constructorInfo = { label, arity: fieldNames.length };
|
||||
ctor.constructorInfo = constructorInfo;
|
||||
ctor.isClassOf = (v: any): v is Record<L, CtorTypes<Fs, Names>, T> => Record.isClassOf<L, CtorTypes<Fs, Names>, T>(constructorInfo, v);
|
||||
(ctor as any)._ = {};
|
||||
fieldNames.forEach((name, i) => (ctor._ as any)[name] = (r: Record<L, CtorTypes<Fs, Names>, T>) => r[i]);
|
||||
return ctor;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
Array.prototype.asPreservesText = function (): string {
|
||||
if ('label' in (this as any)) {
|
||||
const r = this as Record<Value, Tuple<Value>, GenericEmbedded>;
|
||||
return '<' + r.label.asPreservesText() + (r.length > 0 ? ' ': '') +
|
||||
r.map(f => {
|
||||
try {
|
||||
return f.asPreservesText();
|
||||
} catch (e) {
|
||||
return Record.fallbackToString(f);
|
||||
}
|
||||
}).join(' ') + '>';
|
||||
} else {
|
||||
return '[' + this.map(i => i.asPreservesText()).join(', ') + ']';
|
||||
}
|
||||
};
|
|
@ -1,21 +0,0 @@
|
|||
export * from './annotated';
|
||||
export * from './bytes';
|
||||
export * from './codec';
|
||||
export * from './compound';
|
||||
export * from './decoder';
|
||||
export * from './dictionary';
|
||||
export * from './embedded';
|
||||
export * from './embeddedTypes';
|
||||
export * from './encoder';
|
||||
export * from './flex';
|
||||
export * from './float';
|
||||
export * from './fold';
|
||||
export * from './fromjs';
|
||||
export * from './is';
|
||||
export * from './merge';
|
||||
export * from './reader';
|
||||
export * from './record';
|
||||
export * from './strip';
|
||||
export * from './symbols';
|
||||
export * from './text';
|
||||
export * from './values';
|
|
@ -1,43 +0,0 @@
|
|||
import { Value } from "./values";
|
||||
import { Annotated } from "./annotated";
|
||||
import { Record, Tuple } from "./record";
|
||||
import { Set, Dictionary } from "./dictionary";
|
||||
import type { GenericEmbedded } from "./embedded";
|
||||
|
||||
export function unannotate<T = GenericEmbedded>(v: Value<T>): Value<T> {
|
||||
return Annotated.isAnnotated<T>(v) ? v.item : v;
|
||||
}
|
||||
|
||||
export function peel<T = GenericEmbedded>(v: Value<T>): Value<T> {
|
||||
return strip(v, 1);
|
||||
}
|
||||
|
||||
export function strip<T = GenericEmbedded>(
|
||||
v: Value<T>,
|
||||
depth: number = Infinity): Value<T>
|
||||
{
|
||||
function step(v: Value<T>, depth: number): Value<T> {
|
||||
if (depth === 0) return v;
|
||||
if (!Annotated.isAnnotated<T>(v)) return v;
|
||||
|
||||
const nextDepth = depth - 1;
|
||||
function walk(v: Value<T>): Value<T> { return step(v, nextDepth); }
|
||||
|
||||
if (Record.isRecord<Value<T>, Tuple<Value<T>>, T>(v.item)) {
|
||||
return Record(step(v.item.label, depth), v.item.map(walk));
|
||||
} else if (Annotated.isAnnotated(v.item)) {
|
||||
throw new Error("Improper annotation structure");
|
||||
} else if (nextDepth === 0) {
|
||||
return v.item;
|
||||
} else if (Array.isArray(v.item)) {
|
||||
return (v.item as Value<T>[]).map(walk);
|
||||
} else if (Set.isSet<T>(v.item)) {
|
||||
return v.item.map(walk);
|
||||
} else if (Dictionary.isDictionary<T>(v.item)) {
|
||||
return v.item.mapEntries((e) => [walk(e[0]), walk(e[1])]);
|
||||
} else {
|
||||
return v.item;
|
||||
}
|
||||
}
|
||||
return step(v, depth);
|
||||
}
|
|
@ -1,52 +0,0 @@
|
|||
import type { Value } from './values';
|
||||
|
||||
export function stringify(x: any): string {
|
||||
if (typeof x?.asPreservesText === 'function') {
|
||||
return x.asPreservesText();
|
||||
} else {
|
||||
try {
|
||||
return JSON.stringify(x);
|
||||
} catch (_e) {
|
||||
return ('' + x).asPreservesText();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function preserves<T>(pieces: TemplateStringsArray, ...values: Value<T>[]): string {
|
||||
const result = [pieces[0]];
|
||||
values.forEach((v, i) => {
|
||||
result.push(stringify(v));
|
||||
result.push(pieces[i + 1]);
|
||||
});
|
||||
return result.join('');
|
||||
}
|
||||
|
||||
|
||||
declare global {
|
||||
interface Object { asPreservesText(): string; }
|
||||
}
|
||||
|
||||
Object.defineProperty(Object.prototype, 'asPreservesText', {
|
||||
enumerable: false,
|
||||
writable: true,
|
||||
value: function(): string {
|
||||
return JSON.stringify(this);
|
||||
}
|
||||
});
|
||||
|
||||
Boolean.prototype.asPreservesText = function (): string {
|
||||
return this ? '#t' : '#f';
|
||||
};
|
||||
|
||||
Number.prototype.asPreservesText = function (): string {
|
||||
return '' + this;
|
||||
};
|
||||
|
||||
String.prototype.asPreservesText = function (): string {
|
||||
return JSON.stringify(this);
|
||||
};
|
||||
|
||||
Symbol.prototype.asPreservesText = function (): string {
|
||||
// TODO: escaping
|
||||
return this.description ?? '||';
|
||||
};
|
|
@ -1,31 +0,0 @@
|
|||
// Preserves Values.
|
||||
|
||||
import type { Bytes } from './bytes';
|
||||
import type { DoubleFloat, SingleFloat } from './float';
|
||||
import type { Annotated } from './annotated';
|
||||
import type { Set, Dictionary } from './dictionary';
|
||||
import type { Embedded, GenericEmbedded } from './embedded';
|
||||
|
||||
export type Value<T = GenericEmbedded> =
|
||||
| Atom
|
||||
| Compound<T>
|
||||
| Embedded<T>
|
||||
| Annotated<T>;
|
||||
export type Atom =
|
||||
| boolean
|
||||
| SingleFloat
|
||||
| DoubleFloat
|
||||
| number
|
||||
| string
|
||||
| Bytes
|
||||
| symbol;
|
||||
export type Compound<T = GenericEmbedded> =
|
||||
| (Array<Value<T>> | [Value<T>]) & { label: Value<T> }
|
||||
// ^ expanded from definition of Record<> in record.ts,
|
||||
// because if we use Record<Value<T>, Tuple<Value<T>>, T>,
|
||||
// TypeScript currently complains about circular use of Value<T>,
|
||||
// and if we use Record<any, any, T>, it accepts it but collapses
|
||||
// Value<T> to any.
|
||||
| Array<Value<T>>
|
||||
| Set<T>
|
||||
| Dictionary<T>;
|
|
@ -1,335 +0,0 @@
|
|||
import {
|
||||
Value,
|
||||
Dictionary,
|
||||
decode, decodeWithAnnotations, encode, encodeWithAnnotations, canonicalEncode,
|
||||
DecodeError, ShortPacket,
|
||||
Bytes, Record,
|
||||
annotate,
|
||||
strip, peel,
|
||||
preserves,
|
||||
fromJS,
|
||||
Constants,
|
||||
Encoder,
|
||||
GenericEmbedded,
|
||||
EncoderState,
|
||||
EmbeddedType,
|
||||
DecoderState,
|
||||
Decoder,
|
||||
Embedded,
|
||||
embed,
|
||||
genericEmbeddedTypeDecode,
|
||||
genericEmbeddedTypeEncode,
|
||||
} from '../src/index';
|
||||
const { Tag } = Constants;
|
||||
import './test-utils';
|
||||
|
||||
import * as fs from 'fs';
|
||||
|
||||
const _discard = Symbol.for('discard');
|
||||
const _capture = Symbol.for('capture');
|
||||
const _observe = Symbol.for('observe');
|
||||
const Discard = Record.makeConstructor<{}, GenericEmbedded>()(_discard, []);
|
||||
const Capture = Record.makeConstructor<{pattern: Value<GenericEmbedded>}, GenericEmbedded>()(_capture, ['pattern']);
|
||||
const Observe = Record.makeConstructor<{pattern: Value<GenericEmbedded>}, GenericEmbedded>()(_observe, ['pattern']);
|
||||
|
||||
describe('record constructors', () => {
|
||||
it('should have constructorInfo', () => {
|
||||
expect(Discard.constructorInfo.label).toEqual(Symbol.for('discard'));
|
||||
expect(Capture.constructorInfo.label).toEqual(Symbol.for('capture'));
|
||||
expect(Observe.constructorInfo.label).toEqual(Symbol.for('observe'));
|
||||
expect(Discard.constructorInfo.arity).toEqual(0);
|
||||
expect(Capture.constructorInfo.arity).toEqual(1);
|
||||
expect(Observe.constructorInfo.arity).toEqual(1);
|
||||
});
|
||||
})
|
||||
|
||||
describe('RecordConstructorInfo', () => {
|
||||
const C1 = Record.makeConstructor<{x: number, y: number}>()([1], ['x', 'y']);
|
||||
const C2 = Record.makeConstructor<{z: number, w: number}>()([1], ['z', 'w']);
|
||||
it('instance comparison should ignore embedded and fieldname differences', () => {
|
||||
expect(C1(9,9)).is(C2(9,9));
|
||||
expect(C1(9,9)).not.is(C2(9,8));
|
||||
});
|
||||
it('comparison based on embedded equality should not work', () => {
|
||||
expect(C1.constructorInfo).not.toBe(C2.constructorInfo);
|
||||
});
|
||||
it('comparison based on .equals should work', () => {
|
||||
expect(C1.constructorInfo).toEqual(C2.constructorInfo);
|
||||
});
|
||||
});
|
||||
|
||||
describe('records', () => {
|
||||
it('should have correct getConstructorInfo', () => {
|
||||
expect(Record.constructorInfo(Discard())).toEqual(Discard.constructorInfo);
|
||||
expect(Record.constructorInfo(Capture(Discard()))).toEqual(Capture.constructorInfo);
|
||||
expect(Record.constructorInfo(Observe(Capture(Discard())))).toEqual(Observe.constructorInfo);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parsing from subarray', () => {
|
||||
it('should maintain alignment of nextbytes', () => {
|
||||
const u = Uint8Array.of(1, 1, 1, 1, 0xb1, 0x03, 0x33, 0x33, 0x33);
|
||||
const bs = Bytes.from(u.subarray(4));
|
||||
expect(decode(bs)).is("333");
|
||||
});
|
||||
});
|
||||
|
||||
describe('reusing buffer space', () => {
|
||||
it('should be done safely, even with nested dictionaries', () => {
|
||||
expect(canonicalEncode(fromJS(['aaa', Dictionary.fromJS({a: 1}), 'zzz'])).toHex()).is(
|
||||
`b5
|
||||
b103616161
|
||||
b7
|
||||
b10161 91
|
||||
84
|
||||
b1037a7a7a
|
||||
84`.replace(/\s+/g, ''));
|
||||
});
|
||||
});
|
||||
|
||||
describe('encoding and decoding embeddeds', () => {
|
||||
class LookasideEmbeddedType implements EmbeddedType<object> {
|
||||
readonly objects: object[];
|
||||
|
||||
constructor(objects: object[]) {
|
||||
this.objects = objects;
|
||||
}
|
||||
|
||||
decode(d: DecoderState): object {
|
||||
return this.fromValue(new Decoder<GenericEmbedded>(d).next());
|
||||
}
|
||||
|
||||
encode(e: EncoderState, v: object): void {
|
||||
new Encoder(e).push(this.toValue(v));
|
||||
}
|
||||
|
||||
equals(a: object, b: object): boolean {
|
||||
return Object.is(a, b);
|
||||
}
|
||||
|
||||
fromValue(v: Value<GenericEmbedded>): object {
|
||||
if (typeof v !== 'number' || v < 0 || v >= this.objects.length) {
|
||||
throw new Error("Unknown embedded target");
|
||||
}
|
||||
return this.objects[v];
|
||||
}
|
||||
|
||||
toValue(v: object): number {
|
||||
let i = this.objects.indexOf(v);
|
||||
if (i !== -1) return i;
|
||||
this.objects.push(v);
|
||||
return this.objects.length - 1;
|
||||
}
|
||||
}
|
||||
|
||||
it('should encode using embeddedId when no function has been supplied', () => {
|
||||
const A1 = embed({a: 1});
|
||||
const A2 = embed({a: 1});
|
||||
const bs1 = canonicalEncode(A1);
|
||||
const bs2 = canonicalEncode(A2);
|
||||
const bs3 = canonicalEncode(A1);
|
||||
expect(bs1.get(0)).toBe(Tag.Embedded);
|
||||
expect(bs2.get(0)).toBe(Tag.Embedded);
|
||||
expect(bs3.get(0)).toBe(Tag.Embedded);
|
||||
// Can't really check the value assigned to the object. But we
|
||||
// can check that it's different to a similar object!
|
||||
expect(bs1).not.is(bs2);
|
||||
expect(bs1).is(bs3);
|
||||
});
|
||||
it('should refuse to decode embeddeds when no function has been supplied', () => {
|
||||
expect(() => decode(Bytes.from([Tag.Embedded, Tag.SmallInteger_lo])))
|
||||
.toThrow("Embeddeds not permitted at this point in Preserves document");
|
||||
});
|
||||
it('should encode properly', () => {
|
||||
const objects: object[] = [];
|
||||
const pt = new LookasideEmbeddedType(objects);
|
||||
const A = embed({a: 1});
|
||||
const B = embed({b: 2});
|
||||
expect(encode([A, B], { embeddedEncode: pt })).is(
|
||||
Bytes.from([Tag.Sequence,
|
||||
Tag.Embedded, Tag.SmallInteger_lo,
|
||||
Tag.Embedded, Tag.SmallInteger_lo + 1,
|
||||
Tag.End]));
|
||||
expect(objects).toEqual([A.embeddedValue, B.embeddedValue]);
|
||||
});
|
||||
it('should decode properly', () => {
|
||||
const objects: object[] = [];
|
||||
const pt = new LookasideEmbeddedType(objects);
|
||||
const X: Embedded<object> = embed({x: 123});
|
||||
const Y: Embedded<object> = embed({y: 456});
|
||||
objects.push(X.embeddedValue);
|
||||
objects.push(Y.embeddedValue);
|
||||
expect(decode(Bytes.from([
|
||||
Tag.Sequence,
|
||||
Tag.Embedded, Tag.SmallInteger_lo,
|
||||
Tag.Embedded, Tag.SmallInteger_lo + 1,
|
||||
Tag.End
|
||||
]), { embeddedDecode: pt })).is([X, Y]);
|
||||
});
|
||||
it('should store embeddeds embedded in map keys correctly', () => {
|
||||
const A1a = {a: 1};
|
||||
const A1: Embedded<object> = embed(A1a);
|
||||
const A2: Embedded<object> = embed({a: 1});
|
||||
const m = new Dictionary<object, number>();
|
||||
m.set([A1], 1);
|
||||
m.set([A2], 2);
|
||||
expect(m.get(A1)).toBeUndefined();
|
||||
expect(m.get([A1])).toBe(1);
|
||||
expect(m.get([A2])).toBe(2);
|
||||
expect(m.get([embed({a: 1})])).toBeUndefined();
|
||||
A1a.a = 3;
|
||||
expect(m.get([A1])).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('common test suite', () => {
|
||||
const samples_bin = fs.readFileSync(__dirname + '/../../../../../tests/samples.bin');
|
||||
const samples = decodeWithAnnotations(samples_bin, { embeddedDecode: genericEmbeddedTypeDecode });
|
||||
|
||||
const TestCases = Record.makeConstructor<{
|
||||
cases: Dictionary<GenericEmbedded>
|
||||
}>()(Symbol.for('TestCases'), ['cases']);
|
||||
type TestCases = ReturnType<typeof TestCases>;
|
||||
|
||||
function DS(bs: Bytes) {
|
||||
return decode(bs, { embeddedDecode: genericEmbeddedTypeDecode });
|
||||
}
|
||||
function D(bs: Bytes) {
|
||||
return decodeWithAnnotations(bs, { embeddedDecode: genericEmbeddedTypeDecode });
|
||||
}
|
||||
function E(v: Value<GenericEmbedded>) {
|
||||
return encodeWithAnnotations(v, { embeddedEncode: genericEmbeddedTypeEncode });
|
||||
}
|
||||
|
||||
interface ExpectedValues {
|
||||
[testName: string]: ({
|
||||
value: Value<GenericEmbedded>;
|
||||
} | {
|
||||
forward: Value<GenericEmbedded>;
|
||||
back: Value<GenericEmbedded>;
|
||||
});
|
||||
}
|
||||
|
||||
const expectedValues: ExpectedValues = {
|
||||
annotation1: { forward: annotate<GenericEmbedded>(9, "abc"),
|
||||
back: 9 },
|
||||
annotation2: { forward: annotate<GenericEmbedded>([[], annotate<GenericEmbedded>([], "x")],
|
||||
"abc",
|
||||
"def"),
|
||||
back: [[], []] },
|
||||
annotation3: { forward: annotate<GenericEmbedded>(5,
|
||||
annotate<GenericEmbedded>(2, 1),
|
||||
annotate<GenericEmbedded>(4, 3)),
|
||||
back: 5 },
|
||||
annotation5: {
|
||||
forward: annotate<GenericEmbedded>(
|
||||
Record<symbol, any>(Symbol.for('R'),
|
||||
[annotate<GenericEmbedded>(Symbol.for('f'),
|
||||
Symbol.for('af'))]),
|
||||
Symbol.for('ar')),
|
||||
back: Record<Value<GenericEmbedded>, any>(Symbol.for('R'), [Symbol.for('f')])
|
||||
},
|
||||
annotation6: {
|
||||
forward: Record<Value<GenericEmbedded>, any>(
|
||||
annotate<GenericEmbedded>(Symbol.for('R'),
|
||||
Symbol.for('ar')),
|
||||
[annotate<GenericEmbedded>(Symbol.for('f'),
|
||||
Symbol.for('af'))]),
|
||||
back: Record<symbol, any>(Symbol.for('R'), [Symbol.for('f')])
|
||||
},
|
||||
annotation7: {
|
||||
forward: annotate<GenericEmbedded>([], Symbol.for('a'), Symbol.for('b'), Symbol.for('c')),
|
||||
back: []
|
||||
},
|
||||
list1: {
|
||||
forward: [1, 2, 3, 4],
|
||||
back: [1, 2, 3, 4]
|
||||
},
|
||||
record2: {
|
||||
value: Observe(Record(Symbol.for("speak"), [
|
||||
Discard(),
|
||||
Capture(Discard())
|
||||
]))
|
||||
},
|
||||
};
|
||||
|
||||
type Variety = 'normal' | 'nondeterministic' | 'decode';
|
||||
|
||||
function runTestCase(variety: Variety,
|
||||
tName: string,
|
||||
binaryForm: Bytes,
|
||||
annotatedTextForm: Value<GenericEmbedded>)
|
||||
{
|
||||
describe(tName, () => {
|
||||
const textForm = strip(annotatedTextForm);
|
||||
const {forward, back} = (function () {
|
||||
const entry = expectedValues[tName] ?? {value: textForm};
|
||||
if ('value' in entry) {
|
||||
return {forward: entry.value, back: entry.value};
|
||||
} else if ('forward' in entry && 'back' in entry) {
|
||||
return entry;
|
||||
} else {
|
||||
throw new Error('Invalid expectedValues entry for ' + tName);
|
||||
}
|
||||
})();
|
||||
it('should match the expected value', () => expect(textForm).is(back));
|
||||
it('should round-trip', () => expect(DS(E(textForm))).is(back));
|
||||
it('should go forward', () => expect(DS(E(forward))).is(back));
|
||||
it('should go back', () => expect(DS(binaryForm)).is(back));
|
||||
it('should go back with annotations',
|
||||
() => expect(D(E(annotatedTextForm))).is(annotatedTextForm));
|
||||
if (variety !== 'decode' && variety !== 'nondeterministic') {
|
||||
it('should encode correctly', () => expect(E(forward)).is(binaryForm));
|
||||
it('should encode correctly with annotations',
|
||||
() => expect(E(annotatedTextForm)).is(binaryForm));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const tests = (peel(TestCases._.cases(peel(samples) as TestCases)) as
|
||||
Dictionary<GenericEmbedded>);
|
||||
tests.forEach((t0: Value<GenericEmbedded>, tName0: Value<GenericEmbedded>) => {
|
||||
const tName = Symbol.keyFor(strip(tName0) as symbol)!;
|
||||
const t = peel(t0) as Record<symbol, any, GenericEmbedded>;
|
||||
switch (t.label) {
|
||||
case Symbol.for('Test'):
|
||||
runTestCase('normal', tName, strip(t[0]) as Bytes, t[1]);
|
||||
break;
|
||||
case Symbol.for('NondeterministicTest'):
|
||||
runTestCase('nondeterministic', tName, strip(t[0]) as Bytes, t[1]);
|
||||
break;
|
||||
case Symbol.for('DecodeTest'):
|
||||
runTestCase('decode', tName, strip(t[0]) as Bytes, t[1]);
|
||||
break;
|
||||
case Symbol.for('DecodeError'):
|
||||
describe(tName, () => {
|
||||
it('should fail with DecodeError', () => {
|
||||
expect(() => D(strip(t[0]) as Bytes))
|
||||
.toThrowFilter(e =>
|
||||
DecodeError.isDecodeError(e) &&
|
||||
!ShortPacket.isShortPacket(e));
|
||||
});
|
||||
});
|
||||
break;
|
||||
case Symbol.for('DecodeEOF'): // fall through
|
||||
case Symbol.for('DecodeShort'):
|
||||
describe(tName, () => {
|
||||
it('should fail with ShortPacket', () => {
|
||||
expect(() => D(strip(t[0]) as Bytes))
|
||||
.toThrowFilter(e => ShortPacket.isShortPacket(e));
|
||||
});
|
||||
});
|
||||
break;
|
||||
case Symbol.for('ParseError'):
|
||||
case Symbol.for('ParseEOF'):
|
||||
case Symbol.for('ParseShort'):
|
||||
/* Skipped for now, until we have an implementation of text syntax */
|
||||
break;
|
||||
default:{
|
||||
const e = new Error(preserves`Unsupported test kind ${t}`);
|
||||
console.error(e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
|
@ -1,31 +0,0 @@
|
|||
import { Bytes, Decoder, genericEmbeddedType, encode, Reader } from '../src/index';
|
||||
import './test-utils';
|
||||
|
||||
import * as fs from 'fs';
|
||||
|
||||
describe('reading common test suite', () => {
|
||||
const samples_bin = fs.readFileSync(__dirname + '/../../../../../tests/samples.bin');
|
||||
const samples_pr = fs.readFileSync(__dirname + '/../../../../../tests/samples.pr', 'utf-8');
|
||||
|
||||
it('should read equal to decoded binary without annotations', () => {
|
||||
const s1 = new Reader(samples_pr, { embeddedDecode: genericEmbeddedType, includeAnnotations: false }).next();
|
||||
const s2 = new Decoder(samples_bin, { embeddedDecode: genericEmbeddedType, includeAnnotations: false }).next();
|
||||
expect(s1).is(s2);
|
||||
});
|
||||
|
||||
it('should read equal to decoded binary with annotations', () => {
|
||||
const s1 = new Reader(samples_pr, { embeddedDecode: genericEmbeddedType, includeAnnotations: true }).next();
|
||||
const s2 = new Decoder(samples_bin, { embeddedDecode: genericEmbeddedType, includeAnnotations: true }).next();
|
||||
expect(s1).is(s2);
|
||||
});
|
||||
|
||||
it('should read and encode back to binary with annotations', () => {
|
||||
const s = new Reader(samples_pr, { embeddedDecode: genericEmbeddedType, includeAnnotations: true }).next();
|
||||
const bs = Bytes.toIO(encode(s, {
|
||||
embeddedEncode: genericEmbeddedType,
|
||||
includeAnnotations: true,
|
||||
canonical: true,
|
||||
}));
|
||||
expect(bs).toEqual(new Uint8Array(samples_bin));
|
||||
});
|
||||
});
|
|
@ -1,2 +0,0 @@
|
|||
dist/
|
||||
lib/
|
|
@ -1 +0,0 @@
|
|||
version-tag-prefix javascript-@preserves/schema@
|
|
@ -1,2 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
require('../dist/bin/preserves-schema-ts.js').main(process.argv.slice(2));
|
|
@ -1,2 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
require('../dist/bin/preserves-schemac.js').main(process.argv.slice(2));
|
|
@ -1,4 +0,0 @@
|
|||
export default {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
};
|
|
@ -1,39 +0,0 @@
|
|||
{
|
||||
"name": "@preserves/schema",
|
||||
"version": "0.17.0",
|
||||
"description": "Schema support for Preserves data serialization format",
|
||||
"homepage": "https://gitlab.com/preserves/preserves",
|
||||
"license": "Apache-2.0",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"repository": "gitlab:preserves/preserves",
|
||||
"main": "dist/preserves-schema.js",
|
||||
"module": "lib/index.js",
|
||||
"types": "lib/index.d.ts",
|
||||
"author": "Tony Garnock-Jones <tonyg@leastfixedpoint.com>",
|
||||
"scripts": {
|
||||
"regenerate": "rm -rf ./src/gen && ./bin/preserves-schema-ts.js --output ./src/gen ../../../../schema/schema.prs",
|
||||
"clean": "rm -rf lib dist",
|
||||
"prepare": "tsc && rollup -c",
|
||||
"rollupwatch": "rollup -c -w",
|
||||
"test": "jest",
|
||||
"testwatch": "jest --watch",
|
||||
"veryclean": "yarn run clean && rm -rf node_modules",
|
||||
"watch": "tsc -w"
|
||||
},
|
||||
"bin": {
|
||||
"preserves-schema-ts": "./bin/preserves-schema-ts.js",
|
||||
"preserves-schemac": "./bin/preserves-schemac.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@preserves/core": "^0.17.0",
|
||||
"@types/glob": "^7.1.3",
|
||||
"@types/minimatch": "^3.0.3",
|
||||
"chalk": "^4.1.0",
|
||||
"chokidar": "^3.5.1",
|
||||
"commander": "^7.2.0",
|
||||
"glob": "^7.1.6",
|
||||
"minimatch": "^3.0.4"
|
||||
}
|
||||
}
|
|
@ -1,58 +0,0 @@
|
|||
import { terser } from 'rollup-plugin-terser';
|
||||
|
||||
const distfile = (insertion) => `dist/preserves-schema${insertion}.js`;
|
||||
|
||||
function umd(insertion, extra) {
|
||||
return {
|
||||
file: distfile(insertion),
|
||||
format: 'umd',
|
||||
name: 'PreservesSchema',
|
||||
globals: {
|
||||
'@preserves/core': 'Preserves',
|
||||
},
|
||||
... (extra || {})
|
||||
};
|
||||
}
|
||||
|
||||
function es6(insertion, extra) {
|
||||
return {
|
||||
file: distfile('.es6' + insertion),
|
||||
format: 'es',
|
||||
globals: {
|
||||
'@preserves/core': 'Preserves',
|
||||
},
|
||||
... (extra || {}),
|
||||
};
|
||||
}
|
||||
|
||||
function cli(name) {
|
||||
return {
|
||||
input: `lib/bin/${name}.js`,
|
||||
output: [{file: `dist/bin/${name}.js`, format: 'commonjs'}],
|
||||
external: [
|
||||
'@preserves/core',
|
||||
'chalk',
|
||||
'chokidar',
|
||||
'fs',
|
||||
'glob',
|
||||
'minimatch',
|
||||
'path',
|
||||
'commander',
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
export default [
|
||||
{
|
||||
input: 'lib/index.js',
|
||||
output: [
|
||||
umd(''),
|
||||
umd('.min', { plugins: [terser()] }),
|
||||
es6(''),
|
||||
es6('.min', { plugins: [terser()] }),
|
||||
],
|
||||
external: ['@preserves/core'],
|
||||
},
|
||||
cli('preserves-schema-ts'),
|
||||
cli('preserves-schemac'),
|
||||
];
|
|
@ -1,99 +0,0 @@
|
|||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { glob } from 'glob';
|
||||
import { formatPosition, Position } from '@preserves/core';
|
||||
import { readSchema } from '../reader';
|
||||
import chalk from 'chalk';
|
||||
import * as M from '../meta';
|
||||
|
||||
export interface Diagnostic {
|
||||
type: 'warn' | 'error';
|
||||
file: string | null;
|
||||
detail: Error | { message: string, pos: Position | null };
|
||||
};
|
||||
|
||||
export type Expanded = {
|
||||
base: string,
|
||||
inputFiles: Array<{
|
||||
inputFilePath: string,
|
||||
text: string,
|
||||
baseRelPath: string,
|
||||
modulePath: M.ModulePath,
|
||||
schema: M.Schema,
|
||||
}>,
|
||||
failures: Array<Diagnostic>,
|
||||
};
|
||||
|
||||
export function computeBase(paths: string[]): string {
|
||||
if (paths.length === 0) {
|
||||
return '';
|
||||
} else if (paths.length === 1) {
|
||||
const d = path.dirname(paths[0]);
|
||||
return (d === '.') ? '' : d + '/';
|
||||
} else {
|
||||
let i = 0;
|
||||
while (true) {
|
||||
let ch: string | null = null
|
||||
for (const p of paths) {
|
||||
if (i >= p.length) return p.slice(0, i);
|
||||
if (ch === null) ch = p[i];
|
||||
if (p[i] !== ch) return p.slice(0, i);
|
||||
}
|
||||
i++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function expandInputGlob(input: string[], base0: string | undefined): Expanded {
|
||||
const matches = input.flatMap(i => glob.sync(i));
|
||||
const base = base0 ?? computeBase(matches);
|
||||
const failures: Array<Diagnostic> = [];
|
||||
|
||||
return {
|
||||
base,
|
||||
inputFiles: matches.flatMap(inputFilePath => {
|
||||
if (!inputFilePath.startsWith(base)) {
|
||||
throw new Error(`Input filename ${inputFilePath} falls outside base ${base}`);
|
||||
}
|
||||
try {
|
||||
const text = fs.readFileSync(inputFilePath, 'utf-8');
|
||||
const baseRelPath = inputFilePath.slice(base.length);
|
||||
const modulePath = baseRelPath.split('/').map(p => p.split('.')[0]).map(Symbol.for);
|
||||
const schema = readSchema(text, {
|
||||
name: inputFilePath,
|
||||
readInclude(includePath: string): string {
|
||||
return fs.readFileSync(
|
||||
path.resolve(path.dirname(inputFilePath), includePath),
|
||||
'utf-8');
|
||||
},
|
||||
});
|
||||
return [{ inputFilePath, text, baseRelPath, modulePath, schema }];
|
||||
} catch (e) {
|
||||
failures.push({ type: 'error', file: inputFilePath, detail: e });
|
||||
return [];
|
||||
}
|
||||
}),
|
||||
failures,
|
||||
};
|
||||
}
|
||||
|
||||
export function changeExt(p: string, newext: string): string {
|
||||
return p.slice(0, -path.extname(p).length) + newext;
|
||||
}
|
||||
|
||||
export function formatFailures(failures: Array<Diagnostic>, traceback = false): void {
|
||||
for (const d of failures) {
|
||||
console.error(
|
||||
(d.type === 'error' ? chalk.redBright('[ERROR]') : chalk.yellowBright('[WARNING]'))
|
||||
+ ' '
|
||||
+ chalk.blueBright(formatPosition((d.detail as any).pos ?? d.file))
|
||||
+ ': '
|
||||
+ d.detail.message
|
||||
+ (traceback && (d.detail instanceof Error)
|
||||
? '\n' + d.detail.stack
|
||||
: ''));
|
||||
}
|
||||
if (failures.length > 0) {
|
||||
console.error();
|
||||
}
|
||||
}
|
|
@ -1,178 +0,0 @@
|
|||
import { compile } from '../index';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import minimatch from 'minimatch';
|
||||
import { Command } from 'commander';
|
||||
import * as M from '../meta';
|
||||
import chalk from 'chalk';
|
||||
import { Position } from '@preserves/core';
|
||||
import chokidar from 'chokidar';
|
||||
import { changeExt, Diagnostic, expandInputGlob, formatFailures } from './cli-utils';
|
||||
|
||||
export type CommandLineArguments = {
|
||||
inputs: string[];
|
||||
base: string | undefined;
|
||||
output: string | undefined;
|
||||
stdout: boolean;
|
||||
core: string;
|
||||
watch: boolean;
|
||||
traceback: boolean;
|
||||
module: string[];
|
||||
};
|
||||
|
||||
export type CompilationResult = {
|
||||
options: CommandLineArguments,
|
||||
inputFiles: Array<InputFile>,
|
||||
failures: Array<Diagnostic>,
|
||||
base: string,
|
||||
output: string,
|
||||
};
|
||||
|
||||
export type InputFile = {
|
||||
inputFilePath: string,
|
||||
outputFilePath: string,
|
||||
schemaPath: M.ModulePath,
|
||||
schema: M.Schema,
|
||||
};
|
||||
|
||||
function failureCount(type: 'warn' | 'error', r: CompilationResult): number {
|
||||
return r.failures.filter(f => f.type === type).length;
|
||||
}
|
||||
|
||||
export function run(options: CommandLineArguments): void {
|
||||
if (!options.watch) {
|
||||
if (failureCount('error', runOnce(options)) > 0) {
|
||||
process.exit(1);
|
||||
}
|
||||
} else {
|
||||
function runWatch() {
|
||||
console.clear();
|
||||
console.log(chalk.gray(new Date().toISOString()) +
|
||||
' Compiling Schemas in watch mode...\n');
|
||||
const r = runOnce(options);
|
||||
const warningCount = failureCount('warn', r);
|
||||
const errorCount = failureCount('error', r);
|
||||
const wMsg = (warningCount > 0) && chalk.yellowBright(`${warningCount} warning(s)`);
|
||||
const eMsg = (errorCount > 0) && chalk.redBright(`${errorCount} error(s)`);
|
||||
const errorSummary =
|
||||
(wMsg && eMsg) ? `with ${eMsg} and ${wMsg}` :
|
||||
(wMsg) ? `with ${wMsg}` :
|
||||
(eMsg) ? `with ${eMsg}` :
|
||||
chalk.greenBright('successfully');
|
||||
console.log(chalk.gray(new Date().toISOString()) +
|
||||
` Processed ${r.inputFiles.length} file(s) ${errorSummary}. Waiting for changes.`);
|
||||
const watcher = chokidar.watch(r.base, {
|
||||
ignoreInitial: true,
|
||||
}).on('all', (_event, filename) => {
|
||||
if (options.inputs.some(i => minimatch(filename, i))) {
|
||||
watcher.close();
|
||||
runWatch();
|
||||
}
|
||||
});
|
||||
}
|
||||
runWatch();
|
||||
}
|
||||
}
|
||||
|
||||
export function modulePathTo(file1: string, file2: string): string {
|
||||
let naive = path.relative(path.dirname(file1), file2);
|
||||
if (naive[0] !== '.' && naive[0] !== '/') naive = './' + naive;
|
||||
return changeExt(naive, '');
|
||||
}
|
||||
|
||||
export function runOnce(options: CommandLineArguments): CompilationResult {
|
||||
const { base, failures, inputFiles: inputFiles0 } =
|
||||
expandInputGlob(options.inputs, options.base);
|
||||
const output = options.output ?? base;
|
||||
|
||||
const extensionEnv: M.Environment = options.module.map(arg => {
|
||||
const i = arg.indexOf('=');
|
||||
if (i === -1) throw new Error(`--module argument must be Namespace=path: ${arg}`);
|
||||
const ns = arg.slice(0, i);
|
||||
const path = arg.slice(i + 1);
|
||||
return {
|
||||
schema: null,
|
||||
schemaModulePath: ns.split('.').map(Symbol.for),
|
||||
typescriptModulePath: path,
|
||||
};
|
||||
});
|
||||
|
||||
const inputFiles: Array<InputFile> = inputFiles0.map(i => {
|
||||
const { inputFilePath, baseRelPath, modulePath, schema } = i;
|
||||
const outputFilePath = path.join(output, changeExt(baseRelPath, '.ts'));
|
||||
return { inputFilePath, outputFilePath, schemaPath: modulePath, schema };
|
||||
});
|
||||
|
||||
inputFiles.forEach(c => {
|
||||
const env: M.Environment = [
|
||||
... extensionEnv.flatMap(e => {
|
||||
const p = modulePathTo(c.outputFilePath, e.typescriptModulePath);
|
||||
if (p === null) return [];
|
||||
return [{... e, typescriptModulePath: p}];
|
||||
}),
|
||||
... inputFiles.map(cc => ({
|
||||
schema: cc.schema,
|
||||
schemaModulePath: cc.schemaPath,
|
||||
typescriptModulePath: modulePathTo(c.outputFilePath, cc.outputFilePath),
|
||||
})),
|
||||
];
|
||||
fs.mkdirSync(path.dirname(c.outputFilePath), { recursive: true });
|
||||
let compiledModule;
|
||||
try {
|
||||
compiledModule = compile(env, c.schemaPath, c.schema, {
|
||||
preservesModule: options.core,
|
||||
warn: (message: string, pos: Position | null) =>
|
||||
failures.push({ type: 'warn', file: c.inputFilePath, detail: { message, pos } }),
|
||||
});
|
||||
} catch (e) {
|
||||
failures.push({ type: 'error', file: c.inputFilePath, detail: e });
|
||||
}
|
||||
if (compiledModule !== void 0) {
|
||||
if (options.stdout) {
|
||||
console.log('////------------------------------------------------------------');
|
||||
console.log('//// ' + c.outputFilePath);
|
||||
console.log();
|
||||
console.log(compiledModule);
|
||||
} else {
|
||||
fs.writeFileSync(c.outputFilePath, compiledModule, 'utf-8');
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
formatFailures(failures, options.traceback);
|
||||
|
||||
return { options, inputFiles, failures, base, output };
|
||||
}
|
||||
|
||||
export function main(argv: Array<string>) {
|
||||
new Command()
|
||||
.arguments('[input...]')
|
||||
.description('Compile Preserves schema definitions to TypeScript', {
|
||||
input: 'Input filename or glob',
|
||||
})
|
||||
.option('--output <directory>', 'Output directory for modules (default: next to sources)')
|
||||
.option('--stdout', 'Prints each module to stdout one after the other instead ' +
|
||||
'of writing them to files in the `--output` directory')
|
||||
.option('--base <directory>', 'Base directory for sources (default: common prefix)')
|
||||
.option('--core <path>', 'Import path for @preserves/core', '@preserves/core')
|
||||
.option('--watch', 'Watch base directory for changes')
|
||||
.option('--traceback', 'Include stack traces in compiler errors')
|
||||
.option('--module <namespace=path>', 'Additional Namespace=path import',
|
||||
(nsPath: string, previous: string[]): string[] => [... previous, nsPath],
|
||||
[])
|
||||
.action((inputs: string[], rawOptions) => {
|
||||
const options: CommandLineArguments = {
|
||||
inputs: inputs.map(i => path.normalize(i)),
|
||||
base: rawOptions.base,
|
||||
output: rawOptions.output,
|
||||
stdout: rawOptions.stdout,
|
||||
core: rawOptions.core,
|
||||
watch: rawOptions.watch,
|
||||
traceback: rawOptions.traceback,
|
||||
module: rawOptions.module,
|
||||
};
|
||||
Error.stackTraceLimit = Infinity;
|
||||
run(options);
|
||||
})
|
||||
.parse(argv, { from: 'user' });
|
||||
}
|
|
@ -1,58 +0,0 @@
|
|||
import { Command } from 'commander';
|
||||
import { canonicalEncode, KeyedDictionary, underlying } from '@preserves/core';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import * as M from '../meta';
|
||||
import { expandInputGlob, formatFailures } from './cli-utils';
|
||||
|
||||
export type CommandLineArguments = {
|
||||
inputs: string[];
|
||||
base: string | undefined;
|
||||
bundle: boolean;
|
||||
};
|
||||
|
||||
export function run(options: CommandLineArguments): void {
|
||||
const { failures, inputFiles } = expandInputGlob(options.inputs, options.base);
|
||||
|
||||
if (!options.bundle && inputFiles.length !== 1) {
|
||||
failures.push({ type: 'error', file: null, detail: {
|
||||
message: 'Cannot emit non-bundle with anything other than exactly one input file',
|
||||
pos: null,
|
||||
}});
|
||||
}
|
||||
|
||||
formatFailures(failures);
|
||||
|
||||
if (failures.length === 0) {
|
||||
if (options.bundle) {
|
||||
fs.writeSync(1, underlying(canonicalEncode(M.fromBundle({
|
||||
modules: new KeyedDictionary<M.ModulePath, M.Schema, M.InputEmbedded>(
|
||||
inputFiles.map(i => [i.modulePath, i.schema])),
|
||||
}))));
|
||||
} else {
|
||||
fs.writeSync(1, underlying(canonicalEncode(M.fromSchema(inputFiles[0].schema))));
|
||||
}
|
||||
} else {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
export function main(argv: Array<string>) {
|
||||
new Command()
|
||||
.arguments('[input...]')
|
||||
.description('Compile textual Preserves schema definitions to binary format', {
|
||||
input: 'Input filename or glob',
|
||||
})
|
||||
.option('--no-bundle', 'Emit a single Schema instead of a schema Bundle')
|
||||
.option('--base <directory>', 'Base directory for sources (default: common prefix)')
|
||||
.action((inputs: string[], rawOptions) => {
|
||||
const options: CommandLineArguments = {
|
||||
inputs: inputs.map(i => path.normalize(i)),
|
||||
base: rawOptions.base,
|
||||
bundle: rawOptions.bundle,
|
||||
};
|
||||
Error.stackTraceLimit = Infinity;
|
||||
run(options);
|
||||
})
|
||||
.parse(argv, { from: 'user' });
|
||||
}
|
|
@ -1,130 +0,0 @@
|
|||
import * as M from './meta';
|
||||
|
||||
export function checkSchema(schema: M.Schema): (
|
||||
{ ok: true, schema: M.Schema } | { ok: false, problems: Array<string> })
|
||||
{
|
||||
const checker = new Checker();
|
||||
schema.definitions.forEach(checker.checkDefinition.bind(checker));
|
||||
if (checker.problems.length > 0) {
|
||||
return { ok: false, problems: checker.problems };
|
||||
} else {
|
||||
return { ok: true, schema };
|
||||
}
|
||||
}
|
||||
|
||||
enum ValueAvailability {
|
||||
AVAILABLE,
|
||||
NOT_AVAILABLE,
|
||||
};
|
||||
|
||||
class Checker {
|
||||
problems: Array<string> = [];
|
||||
|
||||
recordProblem(context: string, detail: string): void {
|
||||
this.problems.push(`${detail} in ${context}`);
|
||||
}
|
||||
|
||||
checkBinding(scope: Set<string>, sym: symbol, context: string): void {
|
||||
const name = sym.description!;
|
||||
if (scope.has(name)) {
|
||||
this.recordProblem(context, `duplicate binding named ${JSON.stringify(name)}`);
|
||||
}
|
||||
if (!M.isValidToken(name)) {
|
||||
this.recordProblem(context, `invalid binding name ${JSON.stringify(name)}`);
|
||||
}
|
||||
scope.add(name);
|
||||
}
|
||||
|
||||
checkDefinition(def: M.Definition, name: symbol): void {
|
||||
switch (def._variant) {
|
||||
case 'or': {
|
||||
const labels = new Set<string>();
|
||||
[def.pattern0, def.pattern1, ... def.patternN].forEach(({ variantLabel, pattern }) => {
|
||||
const context = `variant ${variantLabel} of ${name.description!}`;
|
||||
if (labels.has(variantLabel)) {
|
||||
this.recordProblem(context, `duplicate variant label`);
|
||||
}
|
||||
if (!M.isValidToken(variantLabel)) {
|
||||
this.recordProblem(context, `invalid variant label`);
|
||||
}
|
||||
labels.add(variantLabel);
|
||||
this.checkPattern(new Set(), pattern, context, ValueAvailability.AVAILABLE);
|
||||
});
|
||||
break;
|
||||
}
|
||||
case 'and': {
|
||||
const ps = [def.pattern0, def.pattern1, ... def.patternN];
|
||||
const scope = new Set<string>();
|
||||
ps.forEach((p) => this.checkNamedPattern(scope, p, name.description!));
|
||||
break;
|
||||
}
|
||||
case 'Pattern':
|
||||
this.checkPattern(
|
||||
new Set(), def.value, name.description!, ValueAvailability.AVAILABLE);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
checkNamedPattern(scope: Set<string>, p: M.NamedPattern, context: string): void
|
||||
{
|
||||
switch (p._variant) {
|
||||
case 'named': {
|
||||
this.checkBinding(scope, p.value.name, context);
|
||||
this.checkPattern(scope,
|
||||
M.Pattern.SimplePattern(p.value.pattern),
|
||||
`${JSON.stringify(p.value.name.description!)} of ${context}`,
|
||||
ValueAvailability.AVAILABLE);
|
||||
break;
|
||||
}
|
||||
case 'anonymous':
|
||||
this.checkPattern(scope, p.value, context, ValueAvailability.NOT_AVAILABLE);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
checkPattern(scope: Set<string>,
|
||||
p: M.Pattern,
|
||||
context: string,
|
||||
availability: ValueAvailability): void
|
||||
{
|
||||
switch (p._variant) {
|
||||
case 'SimplePattern':
|
||||
if (p.value._variant !== 'lit' && availability === ValueAvailability.NOT_AVAILABLE) {
|
||||
this.recordProblem(context, 'cannot recover serialization of non-literal pattern');
|
||||
}
|
||||
if (p.value._variant === 'Ref' &&
|
||||
!(M.isValidToken(p.value.value.name.description!) &&
|
||||
p.value.value.module.every(n => M.isValidToken(n.description!))))
|
||||
{
|
||||
this.recordProblem(context, 'invalid reference name');
|
||||
}
|
||||
break;
|
||||
case 'CompoundPattern':
|
||||
((p: M.CompoundPattern): void => {
|
||||
switch (p._variant) {
|
||||
case 'rec':
|
||||
this.checkNamedPattern(scope, p.label, `label of ${context}`);
|
||||
this.checkNamedPattern(scope, p.fields, `fields of ${context}`);
|
||||
break;
|
||||
case 'tuple':
|
||||
p.patterns.forEach((pp, i) =>
|
||||
this.checkNamedPattern(scope, pp, `item ${i} of ${context}`));
|
||||
break;
|
||||
case 'tuplePrefix':
|
||||
p.fixed.forEach((pp, i) =>
|
||||
this.checkNamedPattern(scope, pp, `item ${i} of ${context}`));
|
||||
this.checkNamedPattern(
|
||||
scope, M.promoteNamedSimplePattern(p.variable), `tail of ${context}`);
|
||||
break;
|
||||
case 'dict':
|
||||
p.entries.forEach((np, key) =>
|
||||
this.checkNamedPattern(
|
||||
scope,
|
||||
M.promoteNamedSimplePattern(np),
|
||||
`entry ${key.asPreservesText()} in dictionary in ${context}`));
|
||||
break;
|
||||
}
|
||||
})(p.value);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,99 +0,0 @@
|
|||
import { stringify } from "@preserves/core";
|
||||
import * as M from "./meta";
|
||||
import { CompilerOptions, ModuleContext } from "./compiler/context";
|
||||
import { Formatter, block, seq } from "./compiler/block";
|
||||
import { typeForDefinition } from "./compiler/gentype";
|
||||
import { converterForDefinition } from "./compiler/genconverter";
|
||||
import { renderType } from "./compiler/rendertype";
|
||||
import { genConstructor } from "./compiler/genctor";
|
||||
import { unconverterForDefinition } from "./compiler/genunconverter";
|
||||
import { sourceCodeFor } from "./compiler/value";
|
||||
|
||||
export function compile(
|
||||
env: M.Environment,
|
||||
modulePath: M.ModulePath,
|
||||
schema: M.Schema,
|
||||
options: CompilerOptions = {},
|
||||
): string {
|
||||
const mod = new ModuleContext(env, modulePath, schema, options);
|
||||
|
||||
const embeddedName = schema.embeddedType;
|
||||
if (embeddedName._variant !== 'false') {
|
||||
mod.defineType(seq(`export type _embedded = `, mod.embeddedType, `;`));
|
||||
}
|
||||
|
||||
for (const [name, def] of schema.definitions) {
|
||||
const t = typeForDefinition(mod.resolver(), def);
|
||||
const nameStr = stringify(name);
|
||||
const resultTypeItem = nameStr + mod.genericArgsFor(t);
|
||||
|
||||
mod.defineType(seq(`export type ${nameStr}`, mod.genericParametersFor(t),
|
||||
` = `, renderType(mod, t), `;`));
|
||||
|
||||
if (t.kind === 'union') {
|
||||
mod.defineFunction(_ctx =>
|
||||
seq(`export namespace ${nameStr} `, block(
|
||||
... Array.from(t.variants).map(([vn, vt]) =>
|
||||
genConstructor(mod, vn, vn, vt, t, resultTypeItem))
|
||||
)));
|
||||
} else {
|
||||
mod.defineFunction(_ctx =>
|
||||
genConstructor(mod, nameStr, void 0, t, t, resultTypeItem));
|
||||
}
|
||||
}
|
||||
|
||||
for (const [name0, def] of schema.definitions) {
|
||||
const t = typeForDefinition(mod.resolver(), def);
|
||||
const name = name0 as symbol;
|
||||
|
||||
mod.defineFunction(ctx =>
|
||||
seq(`export function as${name.description!}`, mod.genericParameters(),
|
||||
`(v: _.Value<_embedded>): `, name.description!, mod.genericArgsFor(t), ` `,
|
||||
ctx.block(() => [
|
||||
seq(`let result = to${name.description!}(v)`),
|
||||
seq(`if (result === void 0) `,
|
||||
`throw new TypeError(\`Invalid ${name.description!}: \${_.stringify(v)}\`)`),
|
||||
seq(`return result`)])));
|
||||
|
||||
mod.defineFunction(ctx =>
|
||||
seq(`export function to${name.description!}`, mod.genericParameters(),
|
||||
`(v: _.Value<_embedded>): undefined | `, name.description!, mod.genericArgsFor(t), ` `,
|
||||
ctx.block(() => [seq(`let result: undefined | `, name.description!, mod.genericArgsFor(t)),
|
||||
... converterForDefinition(ctx, def, `v`, `result`),
|
||||
seq(`return result`)])));
|
||||
|
||||
mod.defineFunction(ctx =>
|
||||
seq(`export function from${name.description!}`, mod.genericParameters(),
|
||||
`(_v: `, name.description!, mod.genericArgsFor(t), `): _.Value<_embedded> `,
|
||||
ctx.block(() => unconverterForDefinition(ctx, def, `_v`))));
|
||||
}
|
||||
|
||||
const f = new Formatter();
|
||||
f.write(`import * as _ from ${JSON.stringify(options.preservesModule ?? '@preserves/core')};\n`);
|
||||
mod.imports.forEach(([identifier, path]) => {
|
||||
f.write(`import * as ${identifier} from ${JSON.stringify(path)};\n`);
|
||||
});
|
||||
f.newline();
|
||||
|
||||
const sortedLiterals = Array.from(mod.literals);
|
||||
sortedLiterals.sort((a, b) => a[1] < b[1] ? -1 : a[1] === b[1] ? 0 : 1);
|
||||
for (const [lit, varname] of sortedLiterals) {
|
||||
f.write(seq(`export const ${varname} = `, sourceCodeFor(lit), `;\n`));
|
||||
}
|
||||
f.newline();
|
||||
|
||||
mod.typedefs.forEach(t => {
|
||||
f.write(t);
|
||||
f.newline();
|
||||
f.newline();
|
||||
});
|
||||
f.newline();
|
||||
|
||||
mod.functiondefs.forEach(p => {
|
||||
f.write(p);
|
||||
f.newline();
|
||||
f.newline();
|
||||
});
|
||||
|
||||
return f.toString();
|
||||
}
|
|
@ -1,177 +0,0 @@
|
|||
export type Item = Emittable | string;
|
||||
|
||||
export const DEFAULT_WIDTH = 80;
|
||||
|
||||
export class Formatter {
|
||||
width = DEFAULT_WIDTH;
|
||||
indentDelta = ' ';
|
||||
currentIndent = '\n';
|
||||
buffer: Array<string> = [];
|
||||
|
||||
get indentSize(): number { return this.indentDelta.length; }
|
||||
set indentSize(n: number) { this.indentDelta = new Array(n + 1).join(' '); }
|
||||
|
||||
write(i: Item) {
|
||||
if (typeof i === 'string') {
|
||||
this.buffer.push(i);
|
||||
} else {
|
||||
i.writeOn(this);
|
||||
}
|
||||
}
|
||||
|
||||
newline() {
|
||||
this.write(this.currentIndent);
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return this.buffer.join('');
|
||||
}
|
||||
|
||||
withIndent(f: () => void): void {
|
||||
const oldIndent = this.currentIndent;
|
||||
try {
|
||||
this.currentIndent = this.currentIndent + this.indentDelta;
|
||||
f();
|
||||
} finally {
|
||||
this.currentIndent = oldIndent;
|
||||
}
|
||||
}
|
||||
|
||||
clone(): Formatter {
|
||||
const f = Object.assign(new Formatter(), this);
|
||||
f.buffer = [];
|
||||
return f;
|
||||
}
|
||||
}
|
||||
|
||||
export function formatItems(i: Item[], width = DEFAULT_WIDTH): string {
|
||||
const f = new Formatter();
|
||||
f.width = width;
|
||||
i.forEach(i => f.write(i));
|
||||
return f.toString();
|
||||
}
|
||||
|
||||
export interface Emittable {
|
||||
writeOn(f: Formatter): void;
|
||||
}
|
||||
|
||||
export class Sequence implements Emittable {
|
||||
items: Array<Item>;
|
||||
|
||||
constructor(items: Array<Item>) {
|
||||
if (items.some(i => i === void 0)) throw new Error('aiee');
|
||||
this.items = items;
|
||||
}
|
||||
|
||||
get separator(): string { return ''; }
|
||||
get terminator(): string { return ''; }
|
||||
|
||||
writeOn(f: Formatter): void {
|
||||
let needSeparator = false;
|
||||
this.items.forEach(i => {
|
||||
if (needSeparator) {
|
||||
f.write(this.separator);
|
||||
} else {
|
||||
needSeparator = true;
|
||||
}
|
||||
f.write(i);
|
||||
});
|
||||
f.write(this.terminator);
|
||||
}
|
||||
}
|
||||
|
||||
export class CommaSequence extends Sequence {
|
||||
get separator(): string { return ', '; }
|
||||
}
|
||||
|
||||
export abstract class Grouping extends CommaSequence {
|
||||
abstract get open(): string;
|
||||
abstract get close(): string;
|
||||
|
||||
writeHorizontally(f: Formatter): void {
|
||||
f.write(this.open);
|
||||
super.writeOn(f);
|
||||
f.write(this.close);
|
||||
}
|
||||
|
||||
writeVertically(f: Formatter): void {
|
||||
f.write(this.open);
|
||||
if (this.items.length > 0) {
|
||||
f.withIndent(() => {
|
||||
this.items.forEach((i, index) => {
|
||||
f.newline();
|
||||
f.write(i);
|
||||
const delim = index === this.items.length - 1 ? this.terminator : this.separator;
|
||||
f.write(delim.trimRight());
|
||||
});
|
||||
});
|
||||
f.newline();
|
||||
}
|
||||
f.write(this.close);
|
||||
}
|
||||
|
||||
writeOn(f: Formatter): void {
|
||||
const g = f.clone();
|
||||
this.writeHorizontally(g);
|
||||
const s = g.toString();
|
||||
if (s.length <= f.width) {
|
||||
f.write(s);
|
||||
} else {
|
||||
this.writeVertically(f);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class Parens extends Grouping {
|
||||
get open(): string { return '('; }
|
||||
get close(): string { return ')'; }
|
||||
}
|
||||
|
||||
export class OperatorSequence extends Parens {
|
||||
operator: string;
|
||||
|
||||
constructor(operator: string, items: Array<Item>) {
|
||||
super(items);
|
||||
this.operator = operator;
|
||||
}
|
||||
|
||||
get separator(): string { return this.operator; }
|
||||
}
|
||||
|
||||
export class Brackets extends Grouping {
|
||||
get open(): string { return '['; }
|
||||
get close(): string { return ']'; }
|
||||
}
|
||||
|
||||
export class AngleBrackets extends Grouping {
|
||||
get open(): string { return '<'; }
|
||||
get close(): string { return '>'; }
|
||||
}
|
||||
|
||||
export class Braces extends Grouping {
|
||||
get open(): string { return '{'; }
|
||||
get close(): string { return '}'; }
|
||||
}
|
||||
|
||||
export class Block extends Braces {
|
||||
get separator(): string { return '; ' }
|
||||
get terminator(): string { return ';' }
|
||||
}
|
||||
|
||||
export const seq = (... items: Item[]) => new Sequence(items);
|
||||
export const commas = (... items: Item[]) => new CommaSequence(items);
|
||||
export const parens = (... items: Item[]) => new Parens(items);
|
||||
export const opseq = (zero: string, op: string, ... items: Item[]) =>
|
||||
(items.length === 0) ? zero : new OperatorSequence(op, items);
|
||||
export const brackets = (... items: Item[]) => new Brackets(items);
|
||||
export const anglebrackets = (... items: Item[]) => new AngleBrackets(items);
|
||||
export const braces = (... items: Item[]) => new Braces(items);
|
||||
export const block = (... items: Item[]) => {
|
||||
if (items.length === 1 && items[0] instanceof Block) {
|
||||
return items[0];
|
||||
} else {
|
||||
return new Block(items);
|
||||
}
|
||||
}
|
||||
export const fnblock = (... items: Item[]) => seq('((() => ', block(... items), ')())');
|
||||
export const keyvalue = (k: string, v: Item) => seq(JSON.stringify(k), ': ', v);
|
|
@ -1,291 +0,0 @@
|
|||
import { Dictionary, KeyedSet, FlexSet, Position, stringify, is } from "@preserves/core";
|
||||
import { refPosition } from "../reader";
|
||||
import * as M from "../meta";
|
||||
import { anglebrackets, block, braces, commas, formatItems, Item, keyvalue, seq } from "./block";
|
||||
import { ANY_TYPE, RefType, Type } from "./type";
|
||||
import { renderType, variantInitFor } from "./rendertype";
|
||||
import { typeForDefinition } from "./gentype";
|
||||
import { SchemaSyntaxError } from "../error";
|
||||
|
||||
export interface CompilerOptions {
|
||||
preservesModule?: string;
|
||||
defaultEmbeddedType?: M.Ref;
|
||||
warn?(message: string, pos: Position | null): void;
|
||||
}
|
||||
|
||||
export interface Capture {
|
||||
fieldName: string;
|
||||
sourceExpr: string;
|
||||
}
|
||||
|
||||
export const RECURSION_LIMIT = 128;
|
||||
|
||||
export class ModuleContext {
|
||||
readonly env: M.Environment;
|
||||
readonly modulePath: M.ModulePath;
|
||||
readonly schema: M.Schema;
|
||||
readonly options: CompilerOptions;
|
||||
readonly embeddedType: Item;
|
||||
|
||||
readonly literals = new Dictionary<M.InputEmbedded, string>();
|
||||
readonly typedefs: Item[] = [];
|
||||
readonly functiondefs: Item[] = [];
|
||||
readonly imports = new KeyedSet<[string, string]>();
|
||||
|
||||
constructor(
|
||||
env: M.Environment,
|
||||
modulePath: M.ModulePath,
|
||||
schema: M.Schema,
|
||||
options: CompilerOptions,
|
||||
) {
|
||||
this.env = env;
|
||||
this.modulePath = modulePath;
|
||||
this.schema = schema;
|
||||
this.options = options;
|
||||
switch (schema.embeddedType._variant) {
|
||||
case 'false':
|
||||
this.embeddedType = '_.GenericEmbedded';
|
||||
break;
|
||||
case 'Ref': {
|
||||
const t = this.resolver()(schema.embeddedType.value);
|
||||
this.embeddedType = t.typeName;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
literal(v: M.Input): Item {
|
||||
let varname = this.literals.get(v);
|
||||
if (varname === void 0) {
|
||||
varname = M.jsId('$' + v.asPreservesText(), () => '__lit' + this.literals.size);
|
||||
this.literals.set(v, varname);
|
||||
}
|
||||
return varname;
|
||||
}
|
||||
|
||||
derefPattern(p: M.Definition, refCount = 0): M.Definition {
|
||||
if (refCount > RECURSION_LIMIT) {
|
||||
throw new Error('Recursion limit exceeded');
|
||||
}
|
||||
if (p._variant === 'Pattern' &&
|
||||
p.value._variant === 'SimplePattern' &&
|
||||
p.value.value._variant === 'Ref')
|
||||
{
|
||||
return this.lookup(p.value.value.value,
|
||||
(p, _t) => this.derefPattern(p, refCount + 1),
|
||||
(_modId, _modPath, pp, _tt) => this.derefPattern(pp ?? p, refCount + 1));
|
||||
} else {
|
||||
return p;
|
||||
}
|
||||
}
|
||||
|
||||
defineType(f: Item): void {
|
||||
this.typedefs.push(f);
|
||||
}
|
||||
|
||||
defineFunction(f: (ctx: FunctionContext) => Item): void {
|
||||
this.functiondefs.push(f(new FunctionContext(this)));
|
||||
}
|
||||
|
||||
resolver(modulePath?: M.ModulePath): (ref: M.Ref) => RefType {
|
||||
return (ref) => this.lookup(ref,
|
||||
(_p, _t) => Type.ref(ref.name.description!, ref),
|
||||
(modId, modPath, _p, _t) => {
|
||||
this.imports.add([modId, modPath]);
|
||||
return Type.ref(`${modId}.${ref.name.description!}`, ref);
|
||||
},
|
||||
modulePath);
|
||||
}
|
||||
|
||||
lookupType(name: M.Ref, modulePath?: M.ModulePath): Type | null {
|
||||
const t = this.lookup(name, (_p, t) => t, (_modId, _modPath, _p, t) => t, modulePath);
|
||||
return t ? t() : null;
|
||||
}
|
||||
|
||||
lookup<R>(name: M.Ref,
|
||||
kLocal: (p: M.Definition, t: () => Type) => R,
|
||||
kOther: (modId: string, modPath: string, p: M.Definition | null, t: (() => Type) | null) => R,
|
||||
modulePath?: M.ModulePath): R
|
||||
{
|
||||
const soughtModule = name.module.length ? name.module : (modulePath ?? this.modulePath);
|
||||
|
||||
for (const e of this.env) {
|
||||
if (is(e.schemaModulePath, soughtModule)) {
|
||||
if (e.schema === null) {
|
||||
// It's an artificial module, not from a schema. Assume the identifier is present.
|
||||
return kOther(M.modsymFor(e), e.typescriptModulePath, null, null);
|
||||
} else {
|
||||
const p = e.schema.definitions.get(name.name);
|
||||
if (p !== void 0) {
|
||||
let t = () => typeForDefinition(this.resolver(soughtModule), p);
|
||||
if (name.module.length) {
|
||||
return kOther(M.modsymFor(e), e.typescriptModulePath, p, t);
|
||||
} else {
|
||||
return kLocal(p, t);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw new SchemaSyntaxError(`Undefined reference: ${M.formatRef(name)}`, refPosition(name));
|
||||
}
|
||||
|
||||
genericParameters(): Item {
|
||||
return anglebrackets(seq('_embedded = ', this.embeddedType));
|
||||
}
|
||||
|
||||
genericParametersFor(t: Type): Item {
|
||||
return this.hasEmbedded(t) ? this.genericParameters() : '';
|
||||
}
|
||||
|
||||
genericArgs(): Item {
|
||||
return `<_embedded>`;
|
||||
}
|
||||
|
||||
genericArgsFor(t: Type): Item {
|
||||
return this.hasEmbedded(t) ? this.genericArgs() : '';
|
||||
}
|
||||
|
||||
hasEmbedded(t: Type): boolean {
|
||||
const self = this;
|
||||
const state = new WalkState(this.modulePath);
|
||||
|
||||
function walk(t: Type): boolean {
|
||||
switch (t.kind) {
|
||||
case 'union':
|
||||
for (const v of t.variants.values()) { if (walk(v)) return true; };
|
||||
return false;
|
||||
case 'unit': return false;
|
||||
case 'array': return walk(t.type);
|
||||
case 'set': return true; // because ref to _embedded in renderType()
|
||||
case 'dictionary': return true; // because ref to _embedded in renderType()
|
||||
case 'ref': {
|
||||
if (t.ref === null) {
|
||||
switch (t.typeName) {
|
||||
case '_embedded': return true;
|
||||
case '_.Value': return true;
|
||||
default: return false;
|
||||
}
|
||||
} else {
|
||||
return state.cycleCheck(
|
||||
t.ref,
|
||||
ref => self.lookupType(ref, state.modulePath),
|
||||
t => t ? walk(t) : false,
|
||||
() => false);
|
||||
}
|
||||
}
|
||||
case 'record':
|
||||
for (const v of t.fields.values()) { if (walk(v)) return true; };
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return walk(t);
|
||||
}
|
||||
}
|
||||
|
||||
export class FunctionContext {
|
||||
readonly mod: ModuleContext;
|
||||
|
||||
tempCounter = 0;
|
||||
temps: Map<string, { type: Item, names: string[] }> = new Map();
|
||||
|
||||
captures: Capture[] = [];
|
||||
variantName: string | undefined = void 0;
|
||||
|
||||
constructor(mod: ModuleContext) {
|
||||
this.mod = mod;
|
||||
}
|
||||
|
||||
gentempname(): string {
|
||||
return '_tmp' + this.tempCounter++;
|
||||
}
|
||||
|
||||
gentemp(vartype: Type = ANY_TYPE): string {
|
||||
const typeitem = renderType(this.mod, vartype);
|
||||
const typestr = formatItems([typeitem], Infinity);
|
||||
const varname = this.gentempname();
|
||||
let e = this.temps.get(typestr);
|
||||
if (e === void 0) {
|
||||
e = { type: typeitem, names: [] };
|
||||
this.temps.set(typestr, e);
|
||||
}
|
||||
e.names.push(varname);
|
||||
return varname;
|
||||
}
|
||||
|
||||
block(f: () => Item[]): Item {
|
||||
const oldTemps = this.temps;
|
||||
this.temps = new Map();
|
||||
const items = f();
|
||||
const ts = this.temps;
|
||||
this.temps = oldTemps;
|
||||
return block(
|
||||
... Array.from(ts).map(([_typestr, { type, names }]) =>
|
||||
seq(`let `, commas(... names), `: (`, type, `) | undefined`)),
|
||||
... items);
|
||||
}
|
||||
|
||||
withCapture<R>(
|
||||
fieldName: string | undefined, sourceExpr: string, ks: (sourceExpr: string) => R): R
|
||||
{
|
||||
if (fieldName !== void 0) this.captures.push({ fieldName, sourceExpr });
|
||||
const result = ks(sourceExpr);
|
||||
if (fieldName !== void 0) this.captures.pop();
|
||||
return result;
|
||||
}
|
||||
|
||||
convertCapture(
|
||||
fieldName: string | undefined, sourceExpr: string, ks: () => Item[]): Item
|
||||
{
|
||||
return this.withCapture(fieldName, sourceExpr, sourceExpr =>
|
||||
seq(`if (${sourceExpr} !== void 0) `, this.block(() => ks())));
|
||||
}
|
||||
|
||||
buildCapturedCompound(dest: string): Item {
|
||||
const fields = [
|
||||
... variantInitFor(this.variantName),
|
||||
... this.captures.map(({ fieldName, sourceExpr }) =>
|
||||
keyvalue(fieldName, sourceExpr))
|
||||
];
|
||||
return seq(`${dest} = `, fields.length === 0 ? `null` : braces(... fields));
|
||||
}
|
||||
}
|
||||
|
||||
export class WalkState {
|
||||
modulePath: M.ModulePath;
|
||||
readonly seen: FlexSet<M.Ref>;
|
||||
|
||||
constructor(modulePath: M.ModulePath) {
|
||||
this.modulePath = modulePath;
|
||||
this.seen = new FlexSet(refCanonicalizer);
|
||||
}
|
||||
|
||||
cycleCheck<E, R>(
|
||||
r0: M.Ref,
|
||||
step: (ref: M.Ref) => E,
|
||||
ks: (e: E) => R,
|
||||
kf: () => R,
|
||||
): R {
|
||||
const r = M.Ref({
|
||||
module: r0.module.length ? r0.module : this.modulePath,
|
||||
name: r0.name
|
||||
});
|
||||
if (this.seen.has(r)) {
|
||||
return kf();
|
||||
} else {
|
||||
this.seen.add(r);
|
||||
const maybe_e = step(r);
|
||||
const saved = this.modulePath;
|
||||
this.modulePath = r.module;
|
||||
const result = ks(maybe_e);
|
||||
this.modulePath = saved;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function refCanonicalizer(r: M.Ref): string {
|
||||
return stringify([... r.module, r.name]);
|
||||
}
|
|
@ -1,237 +0,0 @@
|
|||
import { FunctionContext } from "./context";
|
||||
import * as M from '../meta';
|
||||
import { Item, seq } from "./block";
|
||||
import { simpleType, typeFor } from "./gentype";
|
||||
import { ANY_TYPE, Type } from "./type";
|
||||
|
||||
export function converterForDefinition(
|
||||
ctx: FunctionContext,
|
||||
p: M.Definition,
|
||||
src: string,
|
||||
dest: string): Item[]
|
||||
{
|
||||
switch (p._variant) {
|
||||
case 'or': {
|
||||
const alts = [p.pattern0, p.pattern1, ... p.patternN];
|
||||
function loop(i: number): Item[] {
|
||||
ctx.variantName = alts[i].variantLabel;
|
||||
return [... converterForPattern(ctx, alts[i].pattern, src, dest),
|
||||
... ((i < alts.length - 1)
|
||||
? [seq(`if (${dest} === void 0) `, ctx.block(() => loop(i + 1)))]
|
||||
: [])];
|
||||
}
|
||||
return loop(0);
|
||||
}
|
||||
case 'and': {
|
||||
const pcs = [p.pattern0, p.pattern1, ... p.patternN];
|
||||
function loop(i: number): Item[] {
|
||||
return (i < pcs.length)
|
||||
? converterFor(ctx, pcs[i], src, () => loop(i + 1))
|
||||
: [ctx.buildCapturedCompound(dest)];
|
||||
}
|
||||
return loop(0);
|
||||
}
|
||||
case 'Pattern':
|
||||
ctx.variantName = void 0;
|
||||
return converterForPattern(ctx, p.value, src, dest);
|
||||
}
|
||||
}
|
||||
|
||||
function converterForPattern(
|
||||
ctx: FunctionContext,
|
||||
p: M.Pattern,
|
||||
src: string,
|
||||
dest: string): Item[]
|
||||
{
|
||||
return converterFor(ctx, M.NamedPattern.anonymous(p), src, simpleValue => {
|
||||
if (simpleValue === void 0) {
|
||||
return [ctx.buildCapturedCompound(dest)];
|
||||
} else if (ctx.variantName !== void 0) {
|
||||
if (typeFor(ctx.mod.resolver(), p).kind === 'unit') {
|
||||
return [ctx.buildCapturedCompound(dest)];
|
||||
} else {
|
||||
return [ctx.withCapture('value',
|
||||
simpleValue,
|
||||
() => ctx.buildCapturedCompound(dest))];
|
||||
}
|
||||
} else {
|
||||
return [`${dest} = ${simpleValue}`];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function converterForTuple(ctx: FunctionContext,
|
||||
ps: M.NamedPattern[],
|
||||
src: string,
|
||||
knownArray: boolean,
|
||||
variablePattern: M.NamedSimplePattern | undefined,
|
||||
k: () => Item[]): Item[]
|
||||
{
|
||||
function loop(i: number): Item[] {
|
||||
if (i < ps.length) {
|
||||
return converterFor(ctx, ps[i], `${src}[${i}]`, () => loop(i + 1));
|
||||
} else {
|
||||
if (variablePattern === void 0) {
|
||||
return k();
|
||||
} else {
|
||||
const vN = ctx.gentemp(Type.array(ANY_TYPE));
|
||||
return [ps.length > 0 ? `${vN} = ${src}.slice(${ps.length})` : `${vN} = ${src}`,
|
||||
... converterFor(ctx, M.promoteNamedSimplePattern(variablePattern), vN, k, true)];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const lengthCheck = variablePattern === void 0
|
||||
? seq(` && ${src}.length === ${ps.length}`)
|
||||
: ((ps.length === 0) ? '' : seq(` && ${src}.length >= ${ps.length}`));
|
||||
|
||||
return knownArray
|
||||
? loop(0)
|
||||
: [seq(`if (_.Array.isArray(${src})`, lengthCheck, `) `, ctx.block(() => loop(0)))];
|
||||
}
|
||||
|
||||
function converterFor(
|
||||
ctx: FunctionContext,
|
||||
np: M.NamedPattern,
|
||||
src: string,
|
||||
ks: (dest: string | undefined) => Item[],
|
||||
knownArray = false): Item[]
|
||||
{
|
||||
let p = M.unnamePattern(np);
|
||||
let maybeName = M.nameFor(np);
|
||||
|
||||
if (p._variant === 'SimplePattern') {
|
||||
const dest = ctx.gentemp(simpleType(ctx.mod.resolver(), p.value));
|
||||
return [... converterForSimple(ctx, p.value, src, dest, knownArray),
|
||||
ctx.convertCapture(maybeName, dest, () => ks(dest))];
|
||||
} else {
|
||||
return converterForCompound(ctx, p.value, src, knownArray, () => ks(void 0));
|
||||
}
|
||||
}
|
||||
|
||||
export function converterForSimple(
|
||||
ctx: FunctionContext,
|
||||
p: M.SimplePattern,
|
||||
src: string,
|
||||
dest: string,
|
||||
knownArray: boolean): Item[]
|
||||
{
|
||||
switch (p._variant) {
|
||||
case 'any':
|
||||
return [`${dest} = ${src}`];
|
||||
case 'atom': {
|
||||
let test: Item;
|
||||
let valexp: Item = `${src}`;
|
||||
switch (p.atomKind._variant) {
|
||||
case 'Boolean': test = `typeof ${src} === 'boolean'`; break;
|
||||
case 'Float': test = `_.Float.isSingle(${src})`; valexp = `${src}.value`; break;
|
||||
case 'Double': test =`_.Float.isDouble(${src})`; valexp = `${src}.value`; break;
|
||||
case 'SignedInteger': test = `typeof ${src} === 'number'`; break;
|
||||
case 'String': test = `typeof ${src} === 'string'`; break;
|
||||
case 'ByteString': test = `_.Bytes.isBytes(${src})`; break;
|
||||
case 'Symbol': test = `typeof ${src} === 'symbol'`; break;
|
||||
}
|
||||
return [seq(`${dest} = `, test, ` ? `, valexp, ` : void 0`)];
|
||||
}
|
||||
case 'embedded':
|
||||
return [`${dest} = _.isEmbedded<_embedded>(${src}) ? ${src}.embeddedValue : void 0`];
|
||||
case 'lit':
|
||||
return [`${dest} = _.is(${src}, ${ctx.mod.literal(p.value)}) ? null : void 0`];
|
||||
|
||||
case 'seqof': {
|
||||
const kKnownArray = () => {
|
||||
const v = ctx.gentempname();
|
||||
return [
|
||||
seq(`${dest} = []`),
|
||||
seq(`for (const ${v} of ${src}) `, ctx.block(() => [
|
||||
... converterFor(ctx, M.anonymousSimplePattern(p.pattern), v, vv =>
|
||||
[`${dest}.push(${vv})`, `continue`]),
|
||||
seq(`${dest} = void 0`),
|
||||
seq(`break`)]))];
|
||||
};
|
||||
if (knownArray) {
|
||||
return kKnownArray();
|
||||
} else {
|
||||
return [`${dest} = void 0`,
|
||||
seq(`if (_.Array.isArray(${src})) `, ctx.block(kKnownArray))];
|
||||
}
|
||||
}
|
||||
case 'setof':
|
||||
return [`${dest} = void 0`,
|
||||
seq(`if (_.Set.isSet<_embedded>(${src})) `, ctx.block(() => {
|
||||
const v = ctx.gentempname();
|
||||
return [
|
||||
seq(`${dest} = new _.KeyedSet()`),
|
||||
seq(`for (const ${v} of ${src}) `, ctx.block(() => [
|
||||
... converterFor(ctx, M.anonymousSimplePattern(p.pattern), v, vv =>
|
||||
[`${dest}.add(${vv})`, `continue`]),
|
||||
seq(`${dest} = void 0`),
|
||||
seq(`break`)]))];
|
||||
}))];
|
||||
case 'dictof':
|
||||
return [`${dest} = void 0`,
|
||||
seq(`if (_.Dictionary.isDictionary<_embedded>(${src})) `, ctx.block(() => {
|
||||
const v = ctx.gentempname();
|
||||
const k = ctx.gentempname();
|
||||
return [
|
||||
seq(`${dest} = new _.KeyedDictionary()`),
|
||||
seq(`for (const [${k}, ${v}] of ${src}) `, ctx.block(() => [
|
||||
... converterFor(ctx, M.anonymousSimplePattern(p.key), k, kk =>
|
||||
converterFor(ctx, M.anonymousSimplePattern(p.value), v, vv =>
|
||||
[`${dest}.set(${kk}, ${vv})`, `continue`])),
|
||||
seq(`${dest} = void 0`),
|
||||
seq(`break`)]))];
|
||||
}))];
|
||||
case 'Ref':
|
||||
return ctx.mod.lookup(p.value,
|
||||
(_p, _t) => [`${dest} = to${p.value.name.description!}(${src})`],
|
||||
(modId, modPath, _p, _t) => {
|
||||
ctx.mod.imports.add([modId, modPath]);
|
||||
return [`${dest} = ${modId}.to${p.value.name.description!}(${src})`];
|
||||
});
|
||||
default:
|
||||
((_p: never) => {})(p);
|
||||
throw new Error("Unreachable");
|
||||
}
|
||||
}
|
||||
|
||||
function converterForCompound(
|
||||
ctx: FunctionContext,
|
||||
p: M.CompoundPattern,
|
||||
src: string,
|
||||
knownArray: boolean,
|
||||
ks: () => Item[]): Item[]
|
||||
{
|
||||
switch (p._variant) {
|
||||
case 'rec':
|
||||
return [seq(`if (_.Record.isRecord<_.Value<_embedded>, _.Tuple<_.Value<_embedded>>, _embedded>(${src})) `, ctx.block(() =>
|
||||
converterFor(ctx, p.label, `${src}.label`, () =>
|
||||
converterFor(ctx, p.fields, src, ks, true))))];
|
||||
case 'tuple':
|
||||
return converterForTuple(ctx, p.patterns, src, knownArray, void 0, ks);
|
||||
case 'tuplePrefix':
|
||||
return converterForTuple(ctx, p.fixed, src, knownArray, p.variable, ks);
|
||||
case 'dict': {
|
||||
const entries = Array.from(p.entries);
|
||||
function loop(i: number): Item[] {
|
||||
if (i < entries.length) {
|
||||
const [k, n] = entries[i];
|
||||
const tmpSrc = ctx.gentemp();
|
||||
return [seq(`if ((${tmpSrc} = ${src}.get(${ctx.mod.literal(k)})) !== void 0) `,
|
||||
ctx.block(() =>
|
||||
converterFor(
|
||||
ctx,
|
||||
M.promoteNamedSimplePattern(n),
|
||||
tmpSrc,
|
||||
() => loop(i + 1))))];
|
||||
} else {
|
||||
return ks();
|
||||
}
|
||||
}
|
||||
return [seq(`if (_.Dictionary.isDictionary<_embedded>(${src})) `, ctx.block(() => loop(0)))];
|
||||
}
|
||||
default:
|
||||
((_p: never) => {})(p);
|
||||
throw new Error("Unreachable");
|
||||
}
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
import * as M from '../meta';
|
||||
import { block, braces, Item, keyvalue, parens, seq } from "./block";
|
||||
import { FieldType, SimpleType, Type } from "./type";
|
||||
import { renderType } from "./rendertype";
|
||||
import { ModuleContext } from './context';
|
||||
|
||||
export function genConstructor(
|
||||
mod: ModuleContext,
|
||||
name: string,
|
||||
variant: string | undefined,
|
||||
arg: SimpleType,
|
||||
resultType: Type,
|
||||
resultTypeItem: Item,
|
||||
): Item {
|
||||
const formals: Array<[string, FieldType]> = [];
|
||||
let simpleValue = false;
|
||||
|
||||
function examine(t: FieldType, name: string): void {
|
||||
if (t.kind !== 'unit') {
|
||||
formals.push([name, t]);
|
||||
}
|
||||
}
|
||||
|
||||
if (arg.kind === 'record') {
|
||||
arg.fields.forEach(examine);
|
||||
} else {
|
||||
examine(arg, 'value');
|
||||
simpleValue = variant === void 0;
|
||||
}
|
||||
|
||||
const initializers: Item[] = (variant !== void 0)
|
||||
? [keyvalue('_variant', JSON.stringify(variant))]
|
||||
: [];
|
||||
formals.forEach(([n, _t]) => initializers.push(seq(JSON.stringify(n), ': ', M.jsId(n))));
|
||||
|
||||
const declArgs: Array<Item> = (formals.length > 1)
|
||||
? [seq(braces(...formals.map(f => M.jsId(f[0]))), ': ',
|
||||
braces(...formals.map(f => seq(M.jsId(f[0]), ': ', renderType(mod, f[1])))))]
|
||||
: formals.map(f => seq(M.jsId(f[0]), ': ', renderType(mod, f[1])));
|
||||
|
||||
return seq(`export function ${M.jsId(name)}`, mod.genericParametersFor(resultType),
|
||||
parens(... declArgs),
|
||||
': ', resultTypeItem, ' ', block(
|
||||
seq(`return `,
|
||||
((arg.kind === 'unit' && initializers.length === 0)
|
||||
? 'null'
|
||||
: (simpleValue
|
||||
? 'value'
|
||||
: braces(... initializers))))));
|
||||
}
|
|
@ -1,97 +0,0 @@
|
|||
import * as M from "../meta";
|
||||
import { ANY_TYPE, FieldType, FieldMap, SimpleType, Type } from "./type";
|
||||
|
||||
export type RefResolver = (ref: M.Ref) => FieldType;
|
||||
|
||||
export function typeForDefinition(resolver: RefResolver, d: M.Definition): Type {
|
||||
switch (d._variant) {
|
||||
case 'or':
|
||||
return Type.union(
|
||||
new Map([d.pattern0, d.pattern1, ... d.patternN].map(a =>
|
||||
[a.variantLabel, typeFor(resolver, a.pattern)])));
|
||||
case 'and':
|
||||
return typeForIntersection(resolver, [d.pattern0, d.pattern1, ... d.patternN]);
|
||||
case 'Pattern':
|
||||
return typeFor(resolver, d.value);
|
||||
}
|
||||
}
|
||||
|
||||
export function typeForIntersection(resolver: RefResolver, ps: M.NamedPattern[]): SimpleType {
|
||||
const fs = new Map();
|
||||
ps.forEach(p => gatherFields(fs, resolver, p));
|
||||
return fs.size > 0 ? Type.record(fs) : Type.unit();
|
||||
}
|
||||
|
||||
export function typeFor(resolver: RefResolver, p: M.Pattern): SimpleType {
|
||||
if (p._variant === 'SimplePattern') {
|
||||
return simpleType(resolver, p.value);
|
||||
} else {
|
||||
return typeForIntersection(resolver, [M.NamedPattern.anonymous(p)]);
|
||||
}
|
||||
}
|
||||
|
||||
export function simpleType(resolver: RefResolver, p: M.SimplePattern): FieldType {
|
||||
switch (p._variant) {
|
||||
case 'any':
|
||||
return ANY_TYPE;
|
||||
case 'atom':
|
||||
switch (p.atomKind._variant) {
|
||||
case 'Boolean': return Type.ref(`boolean`, null);
|
||||
case 'Float': return Type.ref(`number`, null);
|
||||
case 'Double': return Type.ref(`number`, null);
|
||||
case 'SignedInteger': return Type.ref(`number`, null);
|
||||
case 'String': return Type.ref(`string`, null);
|
||||
case 'ByteString': return Type.ref(`_.Bytes`, null);
|
||||
case 'Symbol': return Type.ref(`symbol`, null);
|
||||
}
|
||||
case 'embedded':
|
||||
return Type.ref(`_embedded`, null);
|
||||
case 'lit':
|
||||
return Type.unit();
|
||||
case 'seqof':
|
||||
return Type.array(simpleType(resolver, p.pattern));
|
||||
case 'setof':
|
||||
return Type.set(simpleType(resolver, p.pattern));
|
||||
case 'dictof':
|
||||
return Type.dictionary(simpleType(resolver, p.key), simpleType(resolver, p.value));
|
||||
case 'Ref':
|
||||
return resolver(p.value);
|
||||
default:
|
||||
((_p: never) => {})(p);
|
||||
throw new Error("Unreachable");
|
||||
}
|
||||
}
|
||||
|
||||
function compoundFields(fs: FieldMap, resolver: RefResolver, p: M.CompoundPattern): void {
|
||||
switch (p._variant) {
|
||||
case 'rec':
|
||||
gatherFields(fs, resolver, p.label);
|
||||
gatherFields(fs, resolver, p.fields);
|
||||
break;
|
||||
case 'tuple':
|
||||
p.patterns.forEach(pp => gatherFields(fs, resolver, pp));
|
||||
break;
|
||||
case 'tuplePrefix':
|
||||
p.fixed.forEach(pp => gatherFields(fs, resolver, pp));
|
||||
gatherFields(fs, resolver, M.promoteNamedSimplePattern(p.variable));
|
||||
break;
|
||||
case 'dict':
|
||||
p.entries.forEach((n, _k) =>
|
||||
gatherFields(fs, resolver, M.promoteNamedSimplePattern(n)));
|
||||
break;
|
||||
default:
|
||||
((_p: never) => {})(p);
|
||||
throw new Error("Unreachable");
|
||||
}
|
||||
}
|
||||
|
||||
function gatherFields(fs: FieldMap, resolver: RefResolver, n: M.NamedPattern): void {
|
||||
if (n._variant === 'named') {
|
||||
const t = simpleType(resolver, n.value.pattern);
|
||||
if (t.kind !== 'unit') {
|
||||
fs.set(n.value.name.description!, t);
|
||||
}
|
||||
} else if (n.value._variant === 'CompoundPattern') {
|
||||
compoundFields(fs, resolver, n.value.value);
|
||||
}
|
||||
}
|
|
@ -1,129 +0,0 @@
|
|||
import * as M from '../meta';
|
||||
import { block, brackets, Item, parens, seq } from './block';
|
||||
import { FunctionContext } from "./context";
|
||||
|
||||
export function unconverterForDefinition(
|
||||
ctx: FunctionContext,
|
||||
def: M.Definition,
|
||||
src: string): Item[]
|
||||
{
|
||||
switch (def._variant) {
|
||||
case 'or':
|
||||
return [seq(`switch (${src}._variant) `, block(
|
||||
... [def.pattern0, def.pattern1, ... def.patternN].map(p =>
|
||||
seq(`case `, JSON.stringify(p.variantLabel), `: `, ctx.block(() => {
|
||||
const hasValueField = p.pattern._variant === 'SimplePattern';
|
||||
return [seq(`return `, unconverterFor(
|
||||
ctx, p.pattern, hasValueField ? `${src}.value` : src))];
|
||||
})))))];
|
||||
case 'and':
|
||||
return [seq(`return _.merge`, parens(
|
||||
`(a, b) => (a === b) ? a : void 0`,
|
||||
... [def.pattern0, def.pattern1, ... def.patternN].flatMap(p => {
|
||||
if (p._variant === 'anonymous' && p.value._variant === 'SimplePattern') {
|
||||
return [];
|
||||
} else {
|
||||
return [unconverterForNamed(ctx, p, src)];
|
||||
}
|
||||
})))];
|
||||
case 'Pattern':
|
||||
return [seq(`return `, unconverterFor(ctx, def.value, `${src}`))];
|
||||
}
|
||||
}
|
||||
|
||||
function unconverterFor(ctx: FunctionContext, p: M.Pattern, src: string): Item {
|
||||
switch (p._variant) {
|
||||
case 'SimplePattern':
|
||||
return ((p: M.SimplePattern) => {
|
||||
switch (p._variant) {
|
||||
case 'any':
|
||||
return `${src}`;
|
||||
case 'atom':
|
||||
switch (p.atomKind._variant) {
|
||||
case 'Float': return `_.Single(${src})`;
|
||||
case 'Double': return `_.Double(${src})`;
|
||||
default: return `${src}`;
|
||||
}
|
||||
case 'lit':
|
||||
return ctx.mod.literal(p.value);
|
||||
case 'embedded':
|
||||
return `_.embed(${src})`;
|
||||
case 'seqof':
|
||||
return seq(`${src}.map(v => `,
|
||||
unconverterFor(ctx, M.Pattern.SimplePattern(p.pattern), 'v'),
|
||||
`)`);
|
||||
case 'setof':
|
||||
return seq(`new _.Set<_embedded>`, parens(
|
||||
`_.Array.from(${src}.values()).map(v => `,
|
||||
unconverterFor(ctx, M.Pattern.SimplePattern(p.pattern), 'v'),
|
||||
`)`));
|
||||
case 'dictof':
|
||||
return seq(`new _.Dictionary<_embedded>`, parens(seq(
|
||||
`_.Array.from(${src}.entries()).map(([k, v]) => `,
|
||||
brackets(
|
||||
unconverterFor(ctx, M.Pattern.SimplePattern(p.key), 'k'),
|
||||
unconverterFor(ctx, M.Pattern.SimplePattern(p.value), 'v')),
|
||||
`)`)));
|
||||
case 'Ref':
|
||||
return ctx.mod.lookup(p.value,
|
||||
(_p, _t) => `from${p.value.name.description!}${ctx.mod.genericArgs()}(${src})`,
|
||||
(modId, modPath, _p, _t) => {
|
||||
ctx.mod.imports.add([modId, modPath]);
|
||||
return `${modId}.from${p.value.name.description!}${ctx.mod.genericArgs()}(${src})`;
|
||||
});
|
||||
}
|
||||
})(p.value);
|
||||
case 'CompoundPattern':
|
||||
return ((p: M.CompoundPattern) => {
|
||||
switch (p._variant) {
|
||||
case 'rec':
|
||||
return seq(`_.Record`, parens(
|
||||
unconverterForNamed(ctx, p.label, src),
|
||||
unconverterForNamed(ctx, p.fields, src)));
|
||||
case 'tuple':
|
||||
return brackets(... p.patterns.map(pp =>
|
||||
unconverterForNamed(ctx, pp, src)));
|
||||
case 'tuplePrefix': {
|
||||
const varExp =
|
||||
unconverterForNamed(ctx, M.promoteNamedSimplePattern(p.variable), src);
|
||||
if (p.fixed.length === 0) {
|
||||
return varExp;
|
||||
} else {
|
||||
return brackets(
|
||||
... p.fixed.map(pp => unconverterForNamed(ctx, pp, src)),
|
||||
seq(`... `, varExp));
|
||||
}
|
||||
}
|
||||
case 'dict':
|
||||
return seq(`new _.Dictionary<_embedded>`, parens(
|
||||
brackets(... Array.from(p.entries.entries()).map(([k, n]) =>
|
||||
brackets(
|
||||
ctx.mod.literal(k),
|
||||
unconverterForNamedSimple(ctx, n, src))))));
|
||||
}
|
||||
})(p.value);
|
||||
}
|
||||
}
|
||||
|
||||
function stepSource(src: string, key: string): string
|
||||
{
|
||||
return `${src}[${JSON.stringify(key)}]`;
|
||||
}
|
||||
|
||||
function unconverterForNamed(ctx: FunctionContext, p: M.NamedPattern, src: string): Item {
|
||||
if (p._variant === 'named') {
|
||||
const steppedSrc = stepSource(src, p.value.name.description!);
|
||||
return unconverterFor(ctx, M.Pattern.SimplePattern(p.value.pattern), steppedSrc);
|
||||
} else {
|
||||
return unconverterFor(ctx, p.value, src);
|
||||
}
|
||||
}
|
||||
|
||||
function unconverterForNamedSimple(ctx: FunctionContext, p: M.NamedSimplePattern, src: string): Item {
|
||||
if (p._variant === 'named') {
|
||||
const steppedSrc = stepSource(src, p.value.name.description!);
|
||||
return unconverterFor(ctx, M.Pattern.SimplePattern(p.value.pattern), steppedSrc);
|
||||
} else {
|
||||
return unconverterFor(ctx, M.Pattern.SimplePattern(p.value), src);
|
||||
}
|
||||
}
|
|
@ -1,68 +0,0 @@
|
|||
export function isJsKeyword(s: string): boolean {
|
||||
return JS_KEYWORDS.has(s);
|
||||
}
|
||||
|
||||
export const JS_KEYWORDS = new Set([
|
||||
'abstract',
|
||||
'await',
|
||||
'boolean',
|
||||
'break',
|
||||
'byte',
|
||||
'case',
|
||||
'catch',
|
||||
'char',
|
||||
'class',
|
||||
'const',
|
||||
'continue',
|
||||
'debugger',
|
||||
'default',
|
||||
'delete',
|
||||
'do',
|
||||
'double',
|
||||
'else',
|
||||
'enum',
|
||||
'export',
|
||||
'extends',
|
||||
'false',
|
||||
'final',
|
||||
'finally',
|
||||
'float',
|
||||
'for',
|
||||
'function',
|
||||
'goto',
|
||||
'if',
|
||||
'implements',
|
||||
'import',
|
||||
'in',
|
||||
'instanceof',
|
||||
'int',
|
||||
'interface',
|
||||
'let',
|
||||
'long',
|
||||
'native',
|
||||
'new',
|
||||
'null',
|
||||
'package',
|
||||
'private',
|
||||
'protected',
|
||||
'public',
|
||||
'return',
|
||||
'short',
|
||||
'static',
|
||||
'super',
|
||||
'switch',
|
||||
'synchronized',
|
||||
'this',
|
||||
'throw',
|
||||
'throws',
|
||||
'transient',
|
||||
'true',
|
||||
'try',
|
||||
'typeof',
|
||||
'var',
|
||||
'void',
|
||||
'volatile',
|
||||
'while',
|
||||
'with',
|
||||
'yield',
|
||||
]);
|
|
@ -1,60 +0,0 @@
|
|||
import { SimpleType, Type } from "./type";
|
||||
import { anglebrackets, braces, Item, keyvalue, opseq, seq } from "./block";
|
||||
import { ModuleContext } from "./context";
|
||||
|
||||
export function variantInitFor(variantName: string | undefined) : Item[] {
|
||||
return variantName === void 0 ? [] : [variantFor(variantName)];
|
||||
}
|
||||
|
||||
export function variantFor(variantName: string): Item {
|
||||
return keyvalue('_variant', JSON.stringify(variantName));
|
||||
}
|
||||
|
||||
export function renderVariant(ctxt: ModuleContext, [variantName, t]: [string, SimpleType]): Item {
|
||||
let fields: Item[];
|
||||
switch (t.kind) {
|
||||
case 'unit':
|
||||
fields = [];
|
||||
break;
|
||||
case 'ref':
|
||||
case 'set':
|
||||
case 'dictionary':
|
||||
case 'array':
|
||||
fields = [keyvalue('value', renderType(ctxt, t))];
|
||||
break;
|
||||
case 'record':
|
||||
fields = Array.from(t.fields).map(([nn, tt]) => keyvalue(nn, renderType(ctxt, tt)));
|
||||
break;
|
||||
default:
|
||||
((_: never) => {})(t);
|
||||
throw new Error("Unreachable");
|
||||
}
|
||||
return braces(variantFor(variantName), ... fields);
|
||||
}
|
||||
|
||||
export function renderType(ctxt: ModuleContext, t: Type): Item {
|
||||
switch (t.kind) {
|
||||
case 'union': return opseq('never', ' | ', ...
|
||||
Array.from(t.variants).flatMap(entry => renderVariant(ctxt, entry)));
|
||||
case 'unit': return 'null';
|
||||
case 'ref':
|
||||
if (t.ref === null && t.typeName === '_embedded') {
|
||||
return t.typeName;
|
||||
} else {
|
||||
return seq(t.typeName, ctxt.genericArgsFor(t));
|
||||
}
|
||||
case 'set': return seq('_.KeyedSet', anglebrackets(
|
||||
renderType(ctxt, t.type),
|
||||
'_embedded'));
|
||||
case 'dictionary': return seq('_.KeyedDictionary', anglebrackets(
|
||||
renderType(ctxt, t.key),
|
||||
renderType(ctxt, t.value),
|
||||
'_embedded'));
|
||||
case 'array': return seq('Array', anglebrackets(renderType(ctxt, t.type)));
|
||||
case 'record': return braces(... Array.from(t.fields).map(([nn, tt]) =>
|
||||
keyvalue(nn, renderType(ctxt, tt))));
|
||||
default:
|
||||
((_: never) => {})(t);
|
||||
throw new Error("Unreachable");
|
||||
}
|
||||
}
|
|
@ -1,37 +0,0 @@
|
|||
import * as M from '../meta';
|
||||
|
||||
export type Type =
|
||||
| { kind: 'union', variants: VariantMap } // zero: never
|
||||
| SimpleType
|
||||
|
||||
export type SimpleType = FieldType | RecordType
|
||||
|
||||
export type FieldType =
|
||||
| { kind: 'unit' }
|
||||
| { kind: 'array', type: FieldType }
|
||||
| { kind: 'set', type: FieldType }
|
||||
| { kind: 'dictionary', key: FieldType, value: FieldType }
|
||||
| RefType
|
||||
|
||||
export type RefType =
|
||||
| { kind: 'ref', typeName: string, ref: M.Ref | null } // ref === null for base types
|
||||
|
||||
export type RecordType =
|
||||
| { kind: 'record', fields: FieldMap }
|
||||
|
||||
export type VariantMap = Map<string, SimpleType>;
|
||||
export type FieldMap = Map<string, FieldType>;
|
||||
|
||||
export namespace Type {
|
||||
export const union = (variants: VariantMap): Type => ({ kind: 'union', variants });
|
||||
export const unit = (): FieldType => ({ kind: 'unit' });
|
||||
export const ref = (typeName: string, ref: M.Ref | null): RefType => (
|
||||
{ kind: 'ref', typeName, ref });
|
||||
export const array = (type: FieldType): FieldType => ({ kind: 'array', type });
|
||||
export const set = (type: FieldType): FieldType => ({ kind: 'set', type });
|
||||
export const dictionary = (key: FieldType, value: FieldType): FieldType => (
|
||||
{ kind: 'dictionary', key, value });
|
||||
export const record = (fields: FieldMap): RecordType => ({ kind: 'record', fields });
|
||||
}
|
||||
|
||||
export const ANY_TYPE: FieldType = Type.ref('_.Value', null);
|
|
@ -1,39 +0,0 @@
|
|||
import { Annotated, Bytes, Set, Dictionary, Fold, fold, Record, Tuple, Value, stringify, Embedded } from "@preserves/core";
|
||||
import { brackets, Item, parens, seq } from "./block";
|
||||
import * as M from '../meta';
|
||||
|
||||
export function sourceCodeFor(v: Value<M.InputEmbedded>): Item {
|
||||
return fold(v, {
|
||||
boolean(b: boolean): Item { return b.toString(); },
|
||||
single(f: number): Item { return f.toString(); },
|
||||
double(f: number): Item { return f.toString(); },
|
||||
integer(i: number): Item { return i.toString(); },
|
||||
string(s: string): Item { return JSON.stringify(s); },
|
||||
bytes(b: Bytes): Item {
|
||||
return seq(`Uint8Array.from(`, brackets(... Array.from(b).map(b => b.toString())), `)`);
|
||||
},
|
||||
symbol(s: symbol): Item { return `Symbol.for(${JSON.stringify(s.description!)})`; },
|
||||
|
||||
record(r: Record<Value<M.InputEmbedded>, Tuple<Value<M.InputEmbedded>>, M.InputEmbedded>, k: Fold<M.InputEmbedded, Item>): Item {
|
||||
return seq(`_.Record<_.Value<_embedded>, _.Tuple<_.Value<_embedded>>, _embedded>`, parens(k(r.label), brackets(... r.map(k))));
|
||||
},
|
||||
array(a: Array<Value<M.InputEmbedded>>, k: Fold<M.InputEmbedded, Item>): Item {
|
||||
return brackets(... a.map(k));
|
||||
},
|
||||
set(s: Set<M.InputEmbedded>, k: Fold<M.InputEmbedded, Item>): Item {
|
||||
return seq('new _.Set<_.Value<_embedded>>', parens(brackets(... Array.from(s).map(k))));
|
||||
},
|
||||
dictionary(d: Dictionary<M.InputEmbedded>, k: Fold<M.InputEmbedded, Item>): Item {
|
||||
return seq('new _.Dictionary<_embedded>', parens(brackets(... Array.from(d).map(([kk,vv]) =>
|
||||
brackets(k(kk), k(vv))))));
|
||||
},
|
||||
|
||||
annotated(a: Annotated<M.InputEmbedded>, k: Fold<M.InputEmbedded, Item>): Item {
|
||||
return seq('_.annotate<_embedded>', parens(k(a.item), ... a.annotations.map(k)));
|
||||
},
|
||||
|
||||
embedded(t: Embedded<M.InputEmbedded>, _k: Fold<M.InputEmbedded, Item>): Item {
|
||||
throw new Error(`Cannot emit source code for construction of embedded ${stringify(t)}`);
|
||||
},
|
||||
});
|
||||
}
|
|
@ -1,10 +0,0 @@
|
|||
import { Position } from '@preserves/core';
|
||||
|
||||
export class SchemaSyntaxError extends Error {
|
||||
readonly pos: Position | null;
|
||||
|
||||
constructor(message: string, pos: Position | null) {
|
||||
super(message);
|
||||
this.pos = pos;
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -1,6 +0,0 @@
|
|||
export * from './checker';
|
||||
export * from './error';
|
||||
export * from './reader';
|
||||
export * from './compiler';
|
||||
export * as Meta from './meta';
|
||||
export * as Type from './compiler/type';
|
|
@ -1,87 +0,0 @@
|
|||
import { GenericEmbedded, Value } from '@preserves/core';
|
||||
import * as M from './gen/schema';
|
||||
import { isJsKeyword } from './compiler/jskw';
|
||||
|
||||
export * from './gen/schema';
|
||||
|
||||
export type Input = Value;
|
||||
export type InputEmbedded = GenericEmbedded;
|
||||
|
||||
export function qidLast(s: string): string {
|
||||
const m = s.match(/^(.*\.)?([^.]+)$/);
|
||||
return m![2];
|
||||
}
|
||||
|
||||
export function isValidToken(s: string): boolean {
|
||||
return /^[a-zA-Z][a-zA-Z_0-9]*$/.test(s);
|
||||
}
|
||||
|
||||
export function isValidQid(s: string): boolean {
|
||||
return s.split('.').every(isValidToken);
|
||||
}
|
||||
|
||||
export function isValidJsId(s: string): boolean {
|
||||
return /^[$_a-zA-Z][$_a-zA-Z0-9]*$/.test(s) && !isJsKeyword(s);
|
||||
}
|
||||
|
||||
export function jsId(v: string, kf?: () => string): string {
|
||||
return jsToken(v.replace('$', '$$'), kf);
|
||||
}
|
||||
|
||||
export function jsToken(s: string, kf?: () => string): string {
|
||||
if (isValidJsId(s)) return s;
|
||||
if (isValidJsId('$' + s)) return '$' + s;
|
||||
if (kf !== void 0) return kf();
|
||||
throw new Error(`Internal error: jsToken needs to be completed (${s})`);
|
||||
}
|
||||
|
||||
export const ANDSYM = Symbol.for('&');
|
||||
export const DOT = Symbol.for('.');
|
||||
export const DOTDOTDOT = Symbol.for('...');
|
||||
export const EQUALS = Symbol.for('=');
|
||||
export const INCLUDE = Symbol.for('include');
|
||||
export const ORSYM = Symbol.for('/');
|
||||
|
||||
export type SchemaEnvEntry = {
|
||||
schemaModulePath: M.ModulePath,
|
||||
typescriptModulePath: string,
|
||||
schema: M.Schema | null, // null means it's an artificial one, not corresponding to an input
|
||||
};
|
||||
|
||||
export type Environment = Array<SchemaEnvEntry>;
|
||||
|
||||
export function modsymFor(e: SchemaEnvEntry): string {
|
||||
return '_i_' + e.schemaModulePath.map(s => s.description!).join('$');
|
||||
}
|
||||
|
||||
export function formatRef(r: M.Ref): string {
|
||||
return [... r.module, r.name].map(s => s.description!).join('.');
|
||||
}
|
||||
|
||||
export function unnamePattern(p: M.NamedPattern): M.Pattern {
|
||||
return (p._variant === 'named') ? M.Pattern.SimplePattern(p.value.pattern) : p.value;
|
||||
}
|
||||
|
||||
export function unnameSimplePattern(p: M.NamedSimplePattern): M.SimplePattern {
|
||||
return (p._variant === 'named') ? p.value.pattern : p.value;
|
||||
}
|
||||
|
||||
export function promoteNamedSimplePattern(p: M.NamedSimplePattern): M.NamedPattern {
|
||||
return (p._variant === 'named') ? p : M.NamedPattern.anonymous(M.Pattern.SimplePattern(p.value));
|
||||
}
|
||||
|
||||
export function nameFor(p: M.NamedSimplePattern | M.NamedPattern) : string | undefined {
|
||||
return (p._variant === 'named') ? p.value.name.description! : void 0;
|
||||
}
|
||||
|
||||
export function anonymousSimplePattern(p: M.SimplePattern): M.NamedPattern {
|
||||
return M.NamedPattern.anonymous(M.Pattern.SimplePattern(p));
|
||||
}
|
||||
|
||||
export function namelike(x: Input): string | undefined {
|
||||
if (typeof x === 'string') return x;
|
||||
if (typeof x === 'symbol') return x.description!;
|
||||
if (typeof x === 'number') return '' + x;
|
||||
if (typeof x === 'boolean') return '' + x;
|
||||
return void 0;
|
||||
}
|
|
@ -1,383 +0,0 @@
|
|||
import { Reader, Annotated, Dictionary, is, peel, preserves, Record, strip, Tuple, Position, position, stringify, isCompound, KeyedDictionary, annotate, annotations, isEmbedded, GenericEmbedded, genericEmbeddedTypeDecode } from '@preserves/core';
|
||||
import { Input, Pattern, Schema, Definition, CompoundPattern, SimplePattern } from './meta';
|
||||
import * as M from './meta';
|
||||
import { SchemaSyntaxError } from './error';
|
||||
import { checkSchema } from './checker';
|
||||
|
||||
const positionTable = new WeakMap<object, Position>();
|
||||
|
||||
export function recordPosition<X extends object>(v: X, pos: Position | null): X {
|
||||
if (pos === null) { console.error('Internal error in Schema reader: null source position for', v); }
|
||||
if (pos !== null) positionTable.set(v, pos);
|
||||
return v;
|
||||
}
|
||||
|
||||
export function refPosition(v: object): Position | null {
|
||||
return positionTable.get(v) ?? null;
|
||||
}
|
||||
|
||||
function splitBy<T>(items: Array<T>, separator: T): Array<Array<T>> {
|
||||
const groups: Array<Array<T>> = [];
|
||||
let group: Array<T> = [];
|
||||
function finish() {
|
||||
if (group.length > 0) {
|
||||
groups.push(group);
|
||||
group = [];
|
||||
}
|
||||
}
|
||||
for (const item of items) {
|
||||
if (is(item, separator)) {
|
||||
finish();
|
||||
} else {
|
||||
group.push(item);
|
||||
}
|
||||
}
|
||||
finish();
|
||||
return groups;
|
||||
}
|
||||
|
||||
function invalidClause(clause: Array<Input>): never {
|
||||
throw new SchemaSyntaxError(preserves`Invalid Schema clause: ${clause}`,
|
||||
position(clause[0] ?? false));
|
||||
}
|
||||
|
||||
function invalidPattern(name: string, item: Input, pos: Position | null): never {
|
||||
throw new SchemaSyntaxError(`Invalid pattern in ${name}: ${stringify(item)}`, pos);
|
||||
}
|
||||
|
||||
export type SchemaReaderOptions = {
|
||||
name?: string | Position;
|
||||
readInclude?(includePath: string): string;
|
||||
};
|
||||
|
||||
function _readSchema(source: string, options?: SchemaReaderOptions): Array<Input> {
|
||||
return new Reader<GenericEmbedded>(source, {
|
||||
name: options?.name,
|
||||
includeAnnotations: true,
|
||||
embeddedDecode: genericEmbeddedTypeDecode,
|
||||
}).readToEnd();
|
||||
}
|
||||
|
||||
export function readSchema(source: string, options?: SchemaReaderOptions): Schema
|
||||
{
|
||||
const checked = checkSchema(parseSchema(_readSchema(source, options), options ?? {}));
|
||||
if (checked.ok) return checked.schema;
|
||||
throw new Error(`Schema is not invertible:\n` +
|
||||
checked.problems.map(c => ' - ' + c).join('\n'));
|
||||
}
|
||||
|
||||
export function parseSchema(toplevelTokens: Array<Input>, options: SchemaReaderOptions): Schema
|
||||
{
|
||||
let version: M.Version | undefined = void 0;
|
||||
let embeddedType: M.EmbeddedTypeName = M.EmbeddedTypeName.$false();
|
||||
let definitions = new KeyedDictionary<symbol, Definition, M.InputEmbedded>();
|
||||
|
||||
function process(toplevelTokens: Array<Input>): void {
|
||||
const toplevelClauses = splitBy(peel(toplevelTokens) as Array<Input>, M.DOT);
|
||||
for (const clause of toplevelClauses) {
|
||||
if (clause.length >= 2 && is(clause[1], M.EQUALS)) {
|
||||
const pos = position(clause[0]);
|
||||
const name = peel(clause[0]);
|
||||
if (typeof name !== 'symbol') invalidClause(clause);
|
||||
if (!M.isValidToken(name.description!)) {
|
||||
throw new SchemaSyntaxError(preserves`Invalid definition name: ${name}`, pos);
|
||||
}
|
||||
if (definitions.has(name)) {
|
||||
throw new SchemaSyntaxError(preserves`Duplicate definition: ${clause}`, pos);
|
||||
}
|
||||
definitions.set(name, parseDefinition(name, pos, clause.slice(2)));
|
||||
} else if (clause.length === 2 && is(clause[0], M.$version)) {
|
||||
version = M.asVersion(peel(clause[1]));
|
||||
} else if (clause.length === 2 && is(clause[0], M.$embeddedType)) {
|
||||
const pos = position(clause[1]);
|
||||
const stx = peel(clause[1]);
|
||||
if (stx === false) {
|
||||
embeddedType = M.EmbeddedTypeName.$false();
|
||||
} else if (typeof stx === 'symbol' && M.isValidQid(stx.description!)) {
|
||||
embeddedType = M.EmbeddedTypeName.Ref(parseRef(stx.description!, pos));
|
||||
} else {
|
||||
invalidPattern('embedded type name specification', stx, pos);
|
||||
}
|
||||
} else if (clause.length === 2 && is(clause[0], M.INCLUDE)) {
|
||||
const pos = position(clause[1]);
|
||||
const path = peel(clause[1]);
|
||||
if (typeof path !== 'string') {
|
||||
throw new SchemaSyntaxError(preserves`Invalid include: ${clause}`, pos);
|
||||
}
|
||||
if (options.readInclude === void 0) {
|
||||
throw new SchemaSyntaxError(preserves`Cannot include files in schema`, pos);
|
||||
}
|
||||
process(_readSchema(options.readInclude(path), options));
|
||||
} else {
|
||||
invalidClause(clause);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
process(toplevelTokens);
|
||||
|
||||
if (version === void 0) {
|
||||
throw new SchemaSyntaxError("Schema: missing version declaration.", null);
|
||||
}
|
||||
|
||||
return M.Schema({ version: M.Version(), embeddedType, definitions });
|
||||
}
|
||||
|
||||
function namedMustBeSimple(p: Position | null): never {
|
||||
throw new SchemaSyntaxError('Named patterns must be Simple patterns', p);
|
||||
}
|
||||
|
||||
function parseDefinition(name: symbol, pos: Position | null, body: Array<Input>): Definition {
|
||||
function alternativeName(input: Array<Input>): M.NamedAlternative
|
||||
{
|
||||
const n = findName(input) || findName(input[0]);
|
||||
const p = parsePattern(name, input);
|
||||
if (n !== false) {
|
||||
return M.NamedAlternative({ variantLabel: n.description!, pattern: p });
|
||||
}
|
||||
if (p._variant === 'CompoundPattern' &&
|
||||
p.value._variant === 'rec' &&
|
||||
p.value.label._variant === 'anonymous' &&
|
||||
p.value.label.value._variant === 'SimplePattern' &&
|
||||
p.value.label.value.value._variant === 'lit' &&
|
||||
typeof p.value.label.value.value.value === 'symbol' &&
|
||||
M.isValidToken(p.value.label.value.value.value.description!))
|
||||
{
|
||||
return M.NamedAlternative({
|
||||
variantLabel: p.value.label.value.value.value.description!,
|
||||
pattern: p
|
||||
});
|
||||
}
|
||||
if (p._variant === 'SimplePattern' &&
|
||||
p.value._variant === 'Ref' &&
|
||||
M.isValidQid(p.value.value.name.description!))
|
||||
{
|
||||
return M.NamedAlternative({
|
||||
variantLabel: M.qidLast(p.value.value.name.description!),
|
||||
pattern: p
|
||||
});
|
||||
}
|
||||
if (p._variant === 'SimplePattern' &&
|
||||
p.value._variant === 'lit')
|
||||
{
|
||||
const s = M.namelike(p.value.value);
|
||||
if (s !== void 0) return M.NamedAlternative({ variantLabel: s, pattern: p });
|
||||
}
|
||||
throw new SchemaSyntaxError(preserves`Name missing for alternative: ${input}`, pos);
|
||||
}
|
||||
|
||||
function patternName(input: Array<Input>): M.NamedPattern {
|
||||
const n = findName(input) || findName(input[0]);
|
||||
const p = parsePattern(name, input);
|
||||
if (n !== false) {
|
||||
if (p._variant !== 'SimplePattern') namedMustBeSimple(position(input[0]));
|
||||
return M.NamedPattern.named(M.Binding({ name: n, pattern: p.value }));
|
||||
}
|
||||
return M.NamedPattern.anonymous(p);
|
||||
}
|
||||
|
||||
const andPieces = splitBy(body, M.ANDSYM);
|
||||
const orPieces = splitBy(body, M.ORSYM);
|
||||
|
||||
if (andPieces.length === 0 || orPieces.length === 0) {
|
||||
throw new SchemaSyntaxError(preserves`Invalid Schema clause: ${body}`, pos);
|
||||
}
|
||||
|
||||
if (andPieces.length > 1 && orPieces.length > 1) {
|
||||
throw new SchemaSyntaxError(preserves`Mixed "or" and "and" clause: ${body}`, pos);
|
||||
}
|
||||
|
||||
if (andPieces.length > 1) {
|
||||
return M.Definition.and({
|
||||
pattern0: patternName(andPieces[0]),
|
||||
pattern1: patternName(andPieces[1]),
|
||||
patternN: andPieces.slice(2).map(patternName),
|
||||
});
|
||||
}
|
||||
|
||||
if (orPieces.length > 1) {
|
||||
return M.Definition.or({
|
||||
pattern0: alternativeName(orPieces[0]),
|
||||
pattern1: alternativeName(orPieces[1]),
|
||||
patternN: orPieces.slice(2).map(alternativeName),
|
||||
});
|
||||
}
|
||||
|
||||
return M.Definition.Pattern(parsePattern(name, orPieces[0]));
|
||||
}
|
||||
|
||||
function transferAnnotations(dest: Input, src: Input): Input {
|
||||
return annotate(dest, ... annotations(src));
|
||||
}
|
||||
|
||||
function parsePattern(name: symbol, body0: Array<Input>): Pattern {
|
||||
function parseSimple<A>(item0: Input, ks: (p: SimplePattern) => A, kf: () => A): A {
|
||||
const pos = position(item0);
|
||||
const item = peel(item0);
|
||||
function complain(): never { invalidPattern(stringify(name), item, pos); }
|
||||
if (typeof item === 'symbol') {
|
||||
const str = item.description!;
|
||||
switch (str) {
|
||||
case 'any': return ks(M.SimplePattern.any());
|
||||
case 'bool': return ks(M.SimplePattern.atom(M.AtomKind.Boolean()));
|
||||
case 'float': return ks(M.SimplePattern.atom(M.AtomKind.Float()));
|
||||
case 'double': return ks(M.SimplePattern.atom(M.AtomKind.Double()));
|
||||
case 'int': return ks(M.SimplePattern.atom(M.AtomKind.SignedInteger()));
|
||||
case 'string': return ks(M.SimplePattern.atom(M.AtomKind.String()));
|
||||
case 'bytes': return ks(M.SimplePattern.atom(M.AtomKind.ByteString()));
|
||||
case 'symbol': return ks(M.SimplePattern.atom(M.AtomKind.Symbol()));
|
||||
default: {
|
||||
if (str[0] === '=') {
|
||||
return ks(M.SimplePattern.lit(Symbol.for(str.slice(1))));
|
||||
} else if (M.isValidQid(str)) {
|
||||
return ks(M.SimplePattern.Ref(parseRef(str, pos)));
|
||||
} else {
|
||||
complain();
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (Record.isRecord<Input, Tuple<Input>, M.InputEmbedded>(item)) {
|
||||
const label = item.label;
|
||||
if (Record.isRecord<Input, [], M.InputEmbedded>(label)) {
|
||||
if (label.length !== 0) complain();
|
||||
switch (label.label) {
|
||||
case M.$lit:
|
||||
if (item.length !== 1) complain();
|
||||
return ks(M.SimplePattern.lit(item[0]));
|
||||
default:
|
||||
return kf();
|
||||
}
|
||||
} else {
|
||||
return kf();
|
||||
}
|
||||
} else if (Array.isArray(item) && item.length === 2 && is(item[1], M.DOTDOTDOT)) {
|
||||
return ks(M.SimplePattern.seqof(walkSimple(item[0])));
|
||||
} else if (Set.isSet<M.Input>(item)) {
|
||||
if (item.size !== 1) complain();
|
||||
const [vp] = item.entries();
|
||||
return ks(M.SimplePattern.setof(walkSimple(vp)));
|
||||
} else if (Dictionary.isDictionary<M.InputEmbedded, Input>(item)
|
||||
&& item.size === 2
|
||||
&& item.has(M.DOTDOTDOT))
|
||||
{
|
||||
const v = item.clone();
|
||||
v.delete(M.DOTDOTDOT);
|
||||
const [[kp, vp]] = v.entries();
|
||||
return ks(M.SimplePattern.dictof({ key: walkSimple(kp), value: walkSimple(vp) }));
|
||||
} else if (isCompound(item)) {
|
||||
return kf();
|
||||
} else if (isEmbedded(item)) {
|
||||
return ks(M.SimplePattern.embedded(walkSimple(item.embeddedValue.generic)));
|
||||
} else {
|
||||
return ks(M.SimplePattern.lit(strip(item)));
|
||||
}
|
||||
}
|
||||
|
||||
function parseCompound(item0: Input): CompoundPattern {
|
||||
const pos = position(item0);
|
||||
const item = peel(item0);
|
||||
function complain(): never { invalidPattern(stringify(name), item, pos); }
|
||||
|
||||
if (Record.isRecord<Input, Tuple<Input>, M.InputEmbedded>(item)) {
|
||||
const label = item.label;
|
||||
if (Record.isRecord<Input, [], M.InputEmbedded>(label)) {
|
||||
if (label.length !== 0) complain();
|
||||
switch (label.label) {
|
||||
case M.$rec:
|
||||
if (item.length !== 2) complain();
|
||||
return M.CompoundPattern.rec({
|
||||
label: maybeNamed(item[0]),
|
||||
fields: maybeNamed(item[1])
|
||||
});
|
||||
default:
|
||||
complain();
|
||||
}
|
||||
} else {
|
||||
return M.CompoundPattern.rec({
|
||||
label: M.NamedPattern.anonymous(M.Pattern.SimplePattern(M.SimplePattern.lit(label))),
|
||||
fields: M.NamedPattern.anonymous(parsePattern(name, [transferAnnotations([... item], item0)])),
|
||||
});
|
||||
}
|
||||
} else if (Array.isArray(item) && item.length > 2 && is(item[item.length - 1], M.DOTDOTDOT)) {
|
||||
const variableTemplateInput = item[item.length - 2];
|
||||
const variablePart =
|
||||
transferAnnotations([variableTemplateInput, M.DOTDOTDOT], variableTemplateInput);
|
||||
return M.CompoundPattern.tuplePrefix({
|
||||
fixed: item.slice(0, item.length - 2).map(maybeNamed),
|
||||
variable: maybeNamedSimple(variablePart),
|
||||
});
|
||||
} else if (Array.isArray(item)) {
|
||||
return M.CompoundPattern.tuple(item.map(maybeNamed));
|
||||
} else if (Dictionary.isDictionary<M.InputEmbedded, Input>(item) && !item.has(M.DOTDOTDOT)) {
|
||||
return M.CompoundPattern.dict(
|
||||
M.DictionaryEntries(item.mapEntries<M.NamedSimplePattern, Input, M.InputEmbedded>(
|
||||
([k, vp]) => [
|
||||
strip(k),
|
||||
_maybeNamed(
|
||||
M.NamedSimplePattern.named,
|
||||
M.NamedSimplePattern.anonymous,
|
||||
walkSimple,
|
||||
strip(k))(vp)
|
||||
])));
|
||||
} else {
|
||||
complain();
|
||||
}
|
||||
}
|
||||
|
||||
const walk = (b: Input): Pattern => parsePattern(name, [b]);
|
||||
const walkSimple = (b: Input): SimplePattern => parseSimple(b, p => p, () => {
|
||||
throw new SchemaSyntaxError(`Compound patterns not accepted here`, position(b));
|
||||
});
|
||||
|
||||
function _maybeNamed<R,P>(
|
||||
named: (p: M.Binding) => R,
|
||||
anonymous: (p: P) => R,
|
||||
recur: (b: Input) => P,
|
||||
literalName?: Input): (b: Input) => R
|
||||
{
|
||||
return (b: Input) => {
|
||||
let name = findName(b);
|
||||
if (name === false) {
|
||||
if (literalName !== void 0 &&
|
||||
typeof literalName === 'symbol' &&
|
||||
M.isValidToken(literalName.description!))
|
||||
{
|
||||
name = literalName;
|
||||
}
|
||||
}
|
||||
if (name === false) {
|
||||
return anonymous(recur(b));
|
||||
}
|
||||
return named(M.Binding({ name, pattern: parseSimple(b, p => p, () =>
|
||||
namedMustBeSimple(position(b))) }));
|
||||
};
|
||||
}
|
||||
const maybeNamed = _maybeNamed(M.NamedPattern.named, M.NamedPattern.anonymous, walk);
|
||||
const maybeNamedSimple =
|
||||
_maybeNamed(M.NamedSimplePattern.named, M.NamedSimplePattern.anonymous, walkSimple);
|
||||
|
||||
const body = peel(body0) as Array<Input>;
|
||||
if (body.length !== 1) {
|
||||
invalidPattern(stringify(name), body, body.length > 0 ? position(body[0]) : position(body));
|
||||
}
|
||||
return parseSimple(body[0],
|
||||
M.Pattern.SimplePattern,
|
||||
() => M.Pattern.CompoundPattern(parseCompound(body[0])));
|
||||
}
|
||||
|
||||
function findName(x: Input): symbol | false {
|
||||
if (!Annotated.isAnnotated<never>(x)) return false;
|
||||
for (const a0 of x.annotations) {
|
||||
const a = peel(a0);
|
||||
if (typeof a === 'symbol') return M.isValidToken(a.description!) && a;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function parseRef(s: string, pos: Position | null): M.Ref {
|
||||
const pieces = s.split('.');
|
||||
return recordPosition(M.Ref({
|
||||
module: M.ModulePath(pieces.slice(0, pieces.length - 1).map(Symbol.for)),
|
||||
name: Symbol.for(pieces[pieces.length - 1])
|
||||
}), pos);
|
||||
}
|
|
@ -1,137 +0,0 @@
|
|||
import { Reader } from '@preserves/core';
|
||||
import { Meta, readSchema } from '../src/index';
|
||||
import './test-utils';
|
||||
|
||||
describe('checker', () => {
|
||||
describe('simplest invertibility tests', () => {
|
||||
it('passes simple invertibility test', () => {
|
||||
expect(readSchema('version 1 . A = <a @x string @y symbol> .')).not.toBeNull();
|
||||
});
|
||||
it('passes invertibility check for literal field', () => {
|
||||
expect(readSchema('version 1 . A = <a "string" @y symbol> .')).not.toBeNull();
|
||||
});
|
||||
it('detects non-invertibility for string field', () => {
|
||||
expect(() => readSchema('version 1 . A = <a string @y symbol> .')).toThrow(/item 0 of fields of A/);
|
||||
});
|
||||
it('detects non-invertibility for symbol field', () => {
|
||||
expect(() => readSchema('version 1 . A = <a @x string symbol> .')).toThrow(/item 1 of fields of A/);
|
||||
});
|
||||
it('is OK with no names in simple seqof', () => {
|
||||
expect(readSchema('version 1 . A = [string ...].')).not.toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('extensible record', () => {
|
||||
it('is happy with extensible record', () => {
|
||||
expect(Meta.fromSchema(readSchema(
|
||||
'version 1 . ExtensibleRecord = <foo @a string @b string @extra any ...>.')))
|
||||
.is(new Reader(
|
||||
`<schema {
|
||||
version: 1,
|
||||
embeddedType: #f,
|
||||
definitions: {
|
||||
ExtensibleRecord:
|
||||
<rec <lit foo>
|
||||
<tuple* [<named a <atom String>>, <named b <atom String>>]
|
||||
<named extra <seqof any>>>>}}>`).next());
|
||||
});
|
||||
it('non-invertibility tail', () => {
|
||||
expect(() => readSchema(
|
||||
'version 1 . ExtensibleRecord = <foo @a string @b string any ...>.'))
|
||||
.toThrow(/tail of fields of ExtensibleRecord/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('duplicate bindings', () => {
|
||||
it('complains about duplicates in tuples', () => {
|
||||
expect(() => readSchema('version 1 . A = [@a string @a string].'))
|
||||
.toThrow(/duplicate binding named "a" in item 1 of A/);
|
||||
});
|
||||
it('complains about duplicates in dicts', () => {
|
||||
expect(() => readSchema('version 1 . A = { x: @a string , y: @a string }.'))
|
||||
.toThrow(/duplicate binding named "a" in entry y in dictionary in A/);
|
||||
});
|
||||
it('complains about duplicates in tuple*s', () => {
|
||||
expect(() => readSchema('version 1 . A = [@a string @b string @a int @rest any ...].'))
|
||||
.toThrow(/duplicate binding named "a" in item 2 of A/);
|
||||
});
|
||||
it('complains about duplicates in tuple* tails', () => {
|
||||
expect(() => readSchema('version 1 . A = [@a string @b string @c int @a any ...].'))
|
||||
.toThrow(/duplicate binding named "a" in tail of A/);
|
||||
});
|
||||
describe('in records', () => {
|
||||
it('complains about duplicates in recs (1)', () => {
|
||||
expect(() => readSchema('version 1 . A = <a @a string @a int>.'))
|
||||
.toThrow(/duplicate binding named "a" in item 1 of fields of A/);
|
||||
});
|
||||
it('complains about duplicates in recs (2)', () => {
|
||||
expect(() => readSchema('version 1 . A = <a @a string <x @y int @a int>>.'))
|
||||
.toThrow(/duplicate binding named "a" in item 1 of fields of item 1 of fields of A/);
|
||||
});
|
||||
it('complains about duplicates in recs (3)', () => {
|
||||
expect(() => readSchema('version 1 . A = <a @a string <<rec> =x [@y int @a int]>>.'))
|
||||
.toThrow(/duplicate binding named "a" in item 1 of fields of item 1 of fields of A/);
|
||||
});
|
||||
it('complains about duplicates in recs (4)', () => {
|
||||
expect(() => readSchema('version 1 . A = <a @a string <<rec> @a =x [@y int @z int]>>.'))
|
||||
.toThrow(/duplicate binding named "a" in label of item 1 of fields of A/);
|
||||
});
|
||||
it('complains about duplicates in recs (5)', () => {
|
||||
expect(() => readSchema('version 1 . A = <a @a string <<rec> @a any [@y int @z int]>>.'))
|
||||
.toThrow(/duplicate binding named "a" in label of item 1 of fields of A/);
|
||||
});
|
||||
});
|
||||
describe('in unions', () => {
|
||||
it('is OK with non-duplicate but duplicate-seeming bindings across branches', () => {
|
||||
expect(readSchema('version 1 . A = <a @a string> / <b @a string>.')).not.toBeNull();
|
||||
});
|
||||
it('complains about duplicates within branches', () => {
|
||||
expect(() => readSchema('version 1 . A = <a @a string @a int> / <b @a string>.'))
|
||||
.toThrow(/in item 1 of fields of variant a of A/);
|
||||
});
|
||||
it('complains about duplicate branch names', () => {
|
||||
expect(() => readSchema('version 1 . A = @x <a @a string> / @x <b @a string>.'))
|
||||
.toThrow(/duplicate variant label in variant x of A/);
|
||||
});
|
||||
});
|
||||
describe('in intersections', () => {
|
||||
it('is OK with non-duplicate bindings across branches', () => {
|
||||
expect(readSchema('version 1 . A = <a @a string> & <a @b string>.')).not.toBeNull();
|
||||
});
|
||||
it('complains about duplicates within branches', () => {
|
||||
expect(() => readSchema(
|
||||
`version 1 . A = <a @a string @a int> & <a @b string @c int>.`))
|
||||
.toThrow(/in item 1 of fields of A/);
|
||||
});
|
||||
it('complains about duplicates across branches', () => {
|
||||
expect(() => readSchema(
|
||||
`version 1 . A = <a @a string @b int> & <a @a string @c int>.`))
|
||||
.toThrow(/in item 0 of fields of A/);
|
||||
});
|
||||
it('complains about duplicates within named branches', () => {
|
||||
expect(() => readSchema(
|
||||
`version 1 .
|
||||
AAA = <a @a string @a int>.
|
||||
ABC = <a @b string @c int>.
|
||||
A = @x AAA & @y ABC.`))
|
||||
.toThrow(/in item 1 of fields of AAA/);
|
||||
});
|
||||
it('is OK with seeming- but non-duplicates across named branches', () => {
|
||||
expect(readSchema(
|
||||
`version 1 .
|
||||
AAA = <a @a string @b int>.
|
||||
ABC = <a @a string @c int>.
|
||||
A = @x AAA & @y ABC.`))
|
||||
.not.toBeNull();
|
||||
});
|
||||
it('complains about duplicate branch names', () => {
|
||||
expect(() => readSchema(
|
||||
`version 1 .
|
||||
AAB = <a @a string @b int>.
|
||||
ACD = <a @c string @d int>.
|
||||
A = @x AAB & @x ACD.`))
|
||||
.toThrow(/in A/);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,26 +0,0 @@
|
|||
import { readSchema, Meta } from '../src/index';
|
||||
|
||||
describe('reader schema', () => {
|
||||
it('complains about bad version', () => {
|
||||
expect(() => readSchema('version 999 .')).toThrow(/Invalid Version/);
|
||||
});
|
||||
it('complains about missing version', () => {
|
||||
expect(() => readSchema('')).toThrow(/missing version/);
|
||||
});
|
||||
it('is OK with an empty schema correctly versioned', () => {
|
||||
const s = readSchema('version 1 .');
|
||||
expect(s.version).toBeNull();
|
||||
expect(s.definitions.size).toBe(0);
|
||||
expect(s.embeddedType._variant).toBe('false');
|
||||
});
|
||||
it('understands patterns under embed', () => {
|
||||
const s = readSchema('version 1 . X = #!0 .');
|
||||
const def: Meta.Definition = s.definitions.get(Symbol.for('X'))!;
|
||||
if (def._variant !== 'Pattern') fail('bad definition 1');
|
||||
if (def.value._variant !== 'SimplePattern') fail ('bad definition 2');
|
||||
if (def.value.value._variant !== 'embedded') fail('bad definition 3');
|
||||
const i = def.value.value.interface;
|
||||
if (i._variant !== 'lit') fail('Non-tuple embedded pattern');
|
||||
expect(i.value).toBe(0);
|
||||
});
|
||||
});
|
|
@ -1,35 +0,0 @@
|
|||
import { Value, is, preserves } from '@preserves/core';
|
||||
|
||||
declare global {
|
||||
namespace jest {
|
||||
interface Matchers<R> {
|
||||
is<T>(expected: Value<T>): R;
|
||||
toThrowFilter(f: (e: Error) => boolean): R;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expect.extend({
|
||||
is(actual, expected) {
|
||||
return is(actual, expected)
|
||||
? { message: () => preserves`expected ${actual} not to be Preserves.is to ${expected}`,
|
||||
pass: true }
|
||||
: { message: () => preserves`expected ${actual} to be Preserves.is to ${expected}`,
|
||||
pass: false };
|
||||
},
|
||||
|
||||
toThrowFilter(thunk, f) {
|
||||
try {
|
||||
thunk();
|
||||
return { message: () => preserves`expected an exception`, pass: false };
|
||||
} catch (e) {
|
||||
if (f(e)) {
|
||||
return { message: () => preserves`expected an exception not matching the filter`,
|
||||
pass: true };
|
||||
} else {
|
||||
return { message: () => preserves`expected an exception matching the filter: ${e.constructor.name}`,
|
||||
pass: false };
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
|
@ -1,16 +0,0 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2017",
|
||||
"lib": ["es2019", "DOM"],
|
||||
"declaration": true,
|
||||
"baseUrl": "./src",
|
||||
"rootDir": "./src",
|
||||
"outDir": "./lib",
|
||||
"declarationDir": "./lib",
|
||||
"esModuleInterop": true,
|
||||
"moduleResolution": "node",
|
||||
"sourceMap": true,
|
||||
"strict": true
|
||||
},
|
||||
"include": ["src/**/*"]
|
||||
}
|
|
@ -1,6 +1,10 @@
|
|||
import pkg from './package.json';
|
||||
import { terser } from 'rollup-plugin-terser';
|
||||
|
||||
const distfile = (insertion) => `dist/preserves${insertion}.js`;
|
||||
function distfile(insertion) {
|
||||
const f = `${pkg.name}${insertion}.js`;
|
||||
return `dist/${f}`;
|
||||
}
|
||||
|
||||
function umd(insertion, extra) {
|
||||
return {
|
||||
|
@ -19,7 +23,7 @@ function es6(insertion, extra) {
|
|||
};
|
||||
}
|
||||
|
||||
export default [{
|
||||
export default {
|
||||
input: 'lib/index.js',
|
||||
output: [
|
||||
umd(''),
|
||||
|
@ -27,4 +31,4 @@ export default [{
|
|||
es6(''),
|
||||
es6('.min', { plugins: [terser()] }),
|
||||
],
|
||||
}];
|
||||
}
|
|
@ -0,0 +1,92 @@
|
|||
import { Encoder } from "./codec";
|
||||
import { Tag } from "./constants";
|
||||
import { AsPreserve, PreserveOn } from "./symbols";
|
||||
import { DefaultPointer, is, Value } from "./values";
|
||||
import { Record } from './record';
|
||||
import { Dictionary, Set } from './dictionary';
|
||||
|
||||
export const IsPreservesAnnotated = Symbol.for('IsPreservesAnnotated');
|
||||
|
||||
export class Annotated<T extends object = DefaultPointer> {
|
||||
readonly annotations: Array<Value<T>>;
|
||||
readonly item: Value<T>;
|
||||
|
||||
constructor(item: Value<T>) {
|
||||
this.annotations = [];
|
||||
this.item = item;
|
||||
}
|
||||
|
||||
[AsPreserve](): Value<T> {
|
||||
return this;
|
||||
}
|
||||
|
||||
[PreserveOn](encoder: Encoder<T>) {
|
||||
if (encoder.includeAnnotations) {
|
||||
for (const a of this.annotations) {
|
||||
encoder.emitbyte(Tag.Annotation);
|
||||
encoder.push(a);
|
||||
}
|
||||
}
|
||||
encoder.push(this.item);
|
||||
}
|
||||
|
||||
equals(other: any): boolean {
|
||||
return is(this.item, Annotated.isAnnotated(other) ? other.item : other);
|
||||
}
|
||||
|
||||
// hashCode(): number {
|
||||
// return hash(this.item);
|
||||
// }
|
||||
|
||||
toString(): string {
|
||||
return this.asPreservesText();
|
||||
}
|
||||
|
||||
asPreservesText(): string {
|
||||
const anns = this.annotations.map((a) => '@' + a.asPreservesText()).join(' ');
|
||||
return (anns ? anns + ' ' : anns) + this.item.asPreservesText();
|
||||
}
|
||||
|
||||
get [IsPreservesAnnotated](): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
static isAnnotated<T extends object = DefaultPointer>(x: any): x is Annotated<T> {
|
||||
return !!x?.[IsPreservesAnnotated];
|
||||
}
|
||||
}
|
||||
|
||||
export function peel<T extends object = DefaultPointer>(v: Value<T>): Value<T> {
|
||||
return strip(v, 1);
|
||||
}
|
||||
|
||||
export function strip<T extends object = DefaultPointer>(v: Value<T>, depth: number = Infinity): Value<T> {
|
||||
function step(v: Value<T>, depth: number): Value<T> {
|
||||
if (depth === 0) return v;
|
||||
if (!Annotated.isAnnotated<T>(v)) return v;
|
||||
|
||||
const nextDepth = depth - 1;
|
||||
function walk(v: Value<T>): Value<T> { return step(v, nextDepth); }
|
||||
|
||||
if (Record.isRecord<T>(v.item)) {
|
||||
return Record(step(v.item.label, depth), v.item.map(walk));
|
||||
} else if (Array.isArray(v.item)) {
|
||||
return (v.item as Array<Value<T>>).map(walk);
|
||||
} else if (Set.isSet<T>(v.item)) {
|
||||
return v.item.map(walk);
|
||||
} else if (Dictionary.isDictionary<Value<T>, T>(v.item)) {
|
||||
return v.item.mapEntries((e) => [walk(e[0]), walk(e[1])]);
|
||||
} else if (Annotated.isAnnotated(v.item)) {
|
||||
throw new Error("Improper annotation structure");
|
||||
} else {
|
||||
return v.item;
|
||||
}
|
||||
}
|
||||
return step(v, depth);
|
||||
}
|
||||
|
||||
export function annotate<T extends object = DefaultPointer>(v0: Value<T>, ...anns: Value<T>[]): Annotated<T> {
|
||||
const v = Annotated.isAnnotated<T>(v0) ? v0 : new Annotated(v0);
|
||||
anns.forEach((a) => v.annotations.push(a));
|
||||
return v;
|
||||
}
|
|
@ -1,8 +1,7 @@
|
|||
import { Tag } from './constants';
|
||||
import { AsPreserve, PreserveOn } from './symbols';
|
||||
import { Encoder, Preservable } from './encoder';
|
||||
import { Value } from './values';
|
||||
import { GenericEmbedded } from './embedded';
|
||||
import { Encoder, Preservable } from './codec';
|
||||
import { DefaultPointer, Value } from './values';
|
||||
|
||||
const textEncoder = new TextEncoder();
|
||||
const textDecoder = new TextDecoder();
|
||||
|
@ -127,7 +126,7 @@ export class Bytes implements Preservable<never> {
|
|||
return this.asPreservesText();
|
||||
}
|
||||
|
||||
[AsPreserve]<T = GenericEmbedded>(): Value<T> {
|
||||
[AsPreserve]<T extends object = DefaultPointer>(): Value<T> {
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -161,9 +160,9 @@ export class Bytes implements Preservable<never> {
|
|||
}
|
||||
|
||||
[PreserveOn](encoder: Encoder<never>) {
|
||||
encoder.state.emitbyte(Tag.ByteString);
|
||||
encoder.state.varint(this.length);
|
||||
encoder.state.emitbytes(this._view);
|
||||
encoder.emitbyte(Tag.ByteString);
|
||||
encoder.varint(this.length);
|
||||
encoder.emitbytes(this._view);
|
||||
}
|
||||
|
||||
get [IsPreservesBytes](): boolean {
|
|
@ -0,0 +1,431 @@
|
|||
// Preserves Binary codec.
|
||||
|
||||
import {
|
||||
underlying,
|
||||
Annotated,
|
||||
Dictionary, Set, Bytes, Record, SingleFloat, DoubleFloat,
|
||||
BytesLike,
|
||||
Value,
|
||||
} from './values';
|
||||
import { Tag } from './constants';
|
||||
|
||||
import { PreserveOn } from './symbols';
|
||||
|
||||
export type ErrorType = 'DecodeError' | 'EncodeError' | 'ShortPacket';
|
||||
export const ErrorType = Symbol.for('ErrorType');
|
||||
|
||||
export type Encodable<T extends object> =
|
||||
Value<T> | Preservable<T> | Iterable<Value<T>> | ArrayBufferView;
|
||||
|
||||
export interface Preservable<T extends object> {
|
||||
[PreserveOn](encoder: Encoder<T>): void;
|
||||
}
|
||||
|
||||
export function isPreservable<T extends object>(v: any): v is Preservable<T> {
|
||||
return typeof v === 'object' && v !== null && typeof v[PreserveOn] === 'function';
|
||||
}
|
||||
|
||||
export abstract class PreservesCodecError {
|
||||
abstract get [ErrorType](): ErrorType;
|
||||
|
||||
static isCodecError(e: any, t: ErrorType): e is PreservesCodecError {
|
||||
return (e?.[ErrorType] === t);
|
||||
}
|
||||
}
|
||||
|
||||
export class DecodeError extends Error {
|
||||
get [ErrorType](): ErrorType { return 'DecodeError' }
|
||||
|
||||
static isDecodeError(e: any): e is DecodeError {
|
||||
return PreservesCodecError.isCodecError(e, 'DecodeError');
|
||||
}
|
||||
}
|
||||
|
||||
export class EncodeError extends Error {
|
||||
get [ErrorType](): ErrorType { return 'EncodeError' }
|
||||
|
||||
static isEncodeError(e: any): e is EncodeError {
|
||||
return PreservesCodecError.isCodecError(e, 'EncodeError');
|
||||
}
|
||||
|
||||
readonly irritant: any;
|
||||
|
||||
constructor(message: string, irritant: any) {
|
||||
super(message);
|
||||
this.irritant = irritant;
|
||||
}
|
||||
}
|
||||
|
||||
export class ShortPacket extends DecodeError {
|
||||
get [ErrorType](): ErrorType { return 'ShortPacket' }
|
||||
|
||||
static isShortPacket(e: any): e is ShortPacket {
|
||||
return PreservesCodecError.isCodecError(e, 'ShortPacket');
|
||||
}
|
||||
}
|
||||
|
||||
export interface DecoderOptions<T extends object> {
|
||||
includeAnnotations?: boolean;
|
||||
decodePointer?: (v: Value<T>) => T;
|
||||
}
|
||||
|
||||
export class Decoder<T extends object> {
|
||||
packet: Uint8Array;
|
||||
index: number;
|
||||
options: DecoderOptions<T>;
|
||||
|
||||
constructor(packet: BytesLike = new Uint8Array(0), options: DecoderOptions<T> = {}) {
|
||||
this.packet = underlying(packet);
|
||||
this.index = 0;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
get includeAnnotations(): boolean {
|
||||
return this.options.includeAnnotations ?? false;
|
||||
}
|
||||
|
||||
write(data: BytesLike) {
|
||||
this.packet = Bytes.concat([this.packet.slice(this.index), data])._view;
|
||||
this.index = 0;
|
||||
}
|
||||
|
||||
nextbyte(): number {
|
||||
if (this.index >= this.packet.length) throw new ShortPacket("Short packet");
|
||||
// ^ NOTE: greater-than-or-equal-to, not greater-than.
|
||||
return this.packet[this.index++];
|
||||
}
|
||||
|
||||
nextbytes(n: number): DataView {
|
||||
const start = this.index;
|
||||
this.index += n;
|
||||
if (this.index > this.packet.length) throw new ShortPacket("Short packet");
|
||||
// ^ NOTE: greater-than, not greater-than-or-equal-to.
|
||||
return new DataView(this.packet.buffer, this.packet.byteOffset + start, n);
|
||||
}
|
||||
|
||||
varint(): number {
|
||||
// TODO: Bignums :-/
|
||||
const v = this.nextbyte();
|
||||
if (v < 128) return v;
|
||||
return (this.varint() << 7) + (v - 128);
|
||||
}
|
||||
|
||||
peekend(): boolean {
|
||||
const matched = this.nextbyte() === Tag.End;
|
||||
if (!matched) this.index--;
|
||||
return matched;
|
||||
}
|
||||
|
||||
nextvalues(): Value<T>[] {
|
||||
const result = [];
|
||||
while (!this.peekend()) result.push(this.next());
|
||||
return result;
|
||||
}
|
||||
|
||||
nextint(n: number): number {
|
||||
// TODO: Bignums :-/
|
||||
if (n === 0) return 0;
|
||||
let acc = this.nextbyte();
|
||||
if (acc & 0x80) acc -= 256;
|
||||
for (let i = 1; i < n; i++) acc = (acc << 8) | this.nextbyte();
|
||||
return acc;
|
||||
}
|
||||
|
||||
wrap(v: Value<T>): Value<T> {
|
||||
return this.includeAnnotations ? new Annotated(v) : v;
|
||||
}
|
||||
|
||||
static dictionaryFromArray<T extends object>(vs: Value<T>[]): Dictionary<Value<T>, T> {
|
||||
const d = new Dictionary<Value<T>, T>();
|
||||
if (vs.length % 2) throw new DecodeError("Missing dictionary value");
|
||||
for (let i = 0; i < vs.length; i += 2) {
|
||||
d.set(vs[i], vs[i+1]);
|
||||
}
|
||||
return d;
|
||||
}
|
||||
|
||||
unshiftAnnotation(a: Value<T>, v: Annotated<T>) {
|
||||
if (this.includeAnnotations) {
|
||||
v.annotations.unshift(a);
|
||||
}
|
||||
return v;
|
||||
}
|
||||
|
||||
next(): Value<T> {
|
||||
const tag = this.nextbyte();
|
||||
switch (tag) {
|
||||
case Tag.False: return this.wrap(false);
|
||||
case Tag.True: return this.wrap(true);
|
||||
case Tag.Float: return this.wrap(new SingleFloat(this.nextbytes(4).getFloat32(0, false)));
|
||||
case Tag.Double: return this.wrap(new DoubleFloat(this.nextbytes(8).getFloat64(0, false)));
|
||||
case Tag.End: throw new DecodeError("Unexpected Compound end marker");
|
||||
case Tag.Annotation: {
|
||||
const a = this.next();
|
||||
const v = this.next() as Annotated<T>;
|
||||
return this.unshiftAnnotation(a, v);
|
||||
}
|
||||
case Tag.Pointer: {
|
||||
const d = this.options.decodePointer;
|
||||
if (d === void 0) {
|
||||
throw new DecodeError("No decodePointer function supplied");
|
||||
}
|
||||
return this.wrap(d(this.next()));
|
||||
}
|
||||
case Tag.SignedInteger: return this.wrap(this.nextint(this.varint()));
|
||||
case Tag.String: return this.wrap(Bytes.from(this.nextbytes(this.varint())).fromUtf8());
|
||||
case Tag.ByteString: return this.wrap(Bytes.from(this.nextbytes(this.varint())));
|
||||
case Tag.Symbol: return this.wrap(Symbol.for(Bytes.from(this.nextbytes(this.varint())).fromUtf8()));
|
||||
case Tag.Record: {
|
||||
const vs = this.nextvalues();
|
||||
if (vs.length === 0) throw new DecodeError("Too few elements in encoded record");
|
||||
return this.wrap(Record(vs[0], vs.slice(1)));
|
||||
}
|
||||
case Tag.Sequence: return this.wrap(this.nextvalues());
|
||||
case Tag.Set: return this.wrap(new Set(this.nextvalues()));
|
||||
case Tag.Dictionary: return this.wrap(Decoder.dictionaryFromArray(this.nextvalues()));
|
||||
default: {
|
||||
if (tag >= Tag.SmallInteger_lo && tag <= Tag.SmallInteger_lo + 15) {
|
||||
const v = tag - Tag.SmallInteger_lo;
|
||||
return this.wrap(v > 12 ? v - 16 : v);
|
||||
}
|
||||
if (tag >= Tag.MediumInteger_lo && tag <= Tag.MediumInteger_lo + 15) {
|
||||
const n = tag - Tag.MediumInteger_lo;
|
||||
return this.wrap(this.nextint(n + 1));
|
||||
}
|
||||
throw new DecodeError("Unsupported Preserves tag: " + tag);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try_next() {
|
||||
const start = this.index;
|
||||
try {
|
||||
return this.next();
|
||||
} catch (e) {
|
||||
if (ShortPacket.isShortPacket(e)) {
|
||||
this.index = start;
|
||||
return void 0;
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function decode<T extends object>(bs: BytesLike, options?: DecoderOptions<T>) {
|
||||
return new Decoder(bs, options).next();
|
||||
}
|
||||
|
||||
export function decodeWithAnnotations<T extends object>(bs: BytesLike, options: DecoderOptions<T> = {}): Annotated<T> {
|
||||
return decode(bs, { ... options, includeAnnotations: true }) as Annotated<T>;
|
||||
}
|
||||
|
||||
export interface EncoderOptions<T extends object> {
|
||||
canonical?: boolean;
|
||||
includeAnnotations?: boolean;
|
||||
encodePointer?: (v: T) => Value<T>;
|
||||
}
|
||||
|
||||
function chunkStr(bs: Uint8Array): string {
|
||||
return String.fromCharCode.apply(null, bs as any as number[]);
|
||||
}
|
||||
|
||||
function isIterable<T>(v: any): v is Iterable<T> {
|
||||
return typeof v === 'object' && v !== null && typeof v[Symbol.iterator] === 'function';
|
||||
}
|
||||
|
||||
export class Encoder<T extends object> {
|
||||
chunks: Array<Uint8Array>;
|
||||
view: DataView;
|
||||
index: number;
|
||||
options: EncoderOptions<T>;
|
||||
|
||||
constructor(options: EncoderOptions<T> = {}) {
|
||||
this.chunks = [];
|
||||
this.view = new DataView(new ArrayBuffer(256));
|
||||
this.index = 0;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
get canonical(): boolean {
|
||||
return this.options.canonical ?? true;
|
||||
}
|
||||
|
||||
get includeAnnotations(): boolean {
|
||||
return this.options.includeAnnotations ?? !this.canonical;
|
||||
}
|
||||
|
||||
contents(): Bytes {
|
||||
if (this.chunks.length === 0) {
|
||||
const resultLength = this.index;
|
||||
this.index = 0;
|
||||
return new Bytes(this.view.buffer.slice(0, resultLength));
|
||||
} else {
|
||||
this.rotatebuffer(4096);
|
||||
return Bytes.concat(this.chunks);
|
||||
}
|
||||
}
|
||||
|
||||
/* Like contents(), but hands back a string containing binary data "encoded" via latin-1 */
|
||||
contentsString(): string {
|
||||
if (this.chunks.length === 0) {
|
||||
const s = chunkStr(new Uint8Array(this.view.buffer, 0, this.index));
|
||||
this.index = 0;
|
||||
return s;
|
||||
} else {
|
||||
this.rotatebuffer(4096);
|
||||
return this.chunks.map(chunkStr).join('');
|
||||
}
|
||||
}
|
||||
|
||||
rotatebuffer(size: number) {
|
||||
this.chunks.push(new Uint8Array(this.view.buffer, 0, this.index));
|
||||
this.view = new DataView(new ArrayBuffer(size));
|
||||
this.index = 0;
|
||||
}
|
||||
|
||||
makeroom(amount: number) {
|
||||
if (this.index + amount > this.view.byteLength) {
|
||||
this.rotatebuffer(amount + 4096);
|
||||
}
|
||||
}
|
||||
|
||||
emitbyte(b: number) {
|
||||
this.makeroom(1);
|
||||
this.view.setUint8(this.index++, b);
|
||||
}
|
||||
|
||||
emitbytes(bs: Uint8Array) {
|
||||
this.makeroom(bs.length);
|
||||
(new Uint8Array(this.view.buffer)).set(bs, this.index);
|
||||
this.index += bs.length;
|
||||
}
|
||||
|
||||
varint(v: number) {
|
||||
while (v >= 128) {
|
||||
this.emitbyte((v % 128) + 128);
|
||||
v = Math.floor(v / 128);
|
||||
}
|
||||
this.emitbyte(v);
|
||||
}
|
||||
|
||||
encodeint(v: number) {
|
||||
// TODO: Bignums :-/
|
||||
const plain_bitcount = Math.floor(Math.log2(v > 0 ? v : ~v)) + 1;
|
||||
const signed_bitcount = plain_bitcount + 1;
|
||||
const bytecount = (signed_bitcount + 7) >> 3;
|
||||
if (bytecount <= 16) {
|
||||
this.emitbyte(Tag.MediumInteger_lo + bytecount - 1);
|
||||
} else {
|
||||
this.emitbyte(Tag.SignedInteger);
|
||||
this.varint(bytecount);
|
||||
}
|
||||
const enc = (n: number, x: number) => {
|
||||
if (n > 0) {
|
||||
enc(n - 1, x >> 8);
|
||||
this.emitbyte(x & 255);
|
||||
}
|
||||
};
|
||||
enc(bytecount, v);
|
||||
}
|
||||
|
||||
encodebytes(tag: Tag, bs: Uint8Array) {
|
||||
this.emitbyte(tag);
|
||||
this.varint(bs.length);
|
||||
this.emitbytes(bs);
|
||||
}
|
||||
|
||||
encodevalues(tag: Tag, items: Iterable<Value<T>>) {
|
||||
this.emitbyte(tag);
|
||||
for (let i of items) { this.push(i); }
|
||||
this.emitbyte(Tag.End);
|
||||
}
|
||||
|
||||
encoderawvalues(tag: Tag, items: BytesLike[]) {
|
||||
this.emitbyte(tag);
|
||||
items.forEach((i) => this.emitbytes(underlying(i)));
|
||||
this.emitbyte(Tag.End);
|
||||
}
|
||||
|
||||
push(v: Encodable<T>) {
|
||||
if (isPreservable<never>(v)) {
|
||||
v[PreserveOn](this as unknown as Encoder<never>);
|
||||
}
|
||||
else if (isPreservable<T>(v)) {
|
||||
v[PreserveOn](this);
|
||||
}
|
||||
else if (typeof v === 'boolean') {
|
||||
this.emitbyte(v ? Tag.True : Tag.False);
|
||||
}
|
||||
else if (typeof v === 'number') {
|
||||
if (v >= -3 && v <= 12) {
|
||||
this.emitbyte(Tag.SmallInteger_lo + ((v + 16) & 0xf));
|
||||
} else {
|
||||
this.encodeint(v);
|
||||
}
|
||||
}
|
||||
else if (typeof v === 'string') {
|
||||
this.encodebytes(Tag.String, new Bytes(v)._view);
|
||||
}
|
||||
else if (typeof v === 'symbol') {
|
||||
const key = Symbol.keyFor(v);
|
||||
if (key === void 0) throw new EncodeError("Cannot preserve non-global Symbol", v);
|
||||
this.encodebytes(Tag.Symbol, new Bytes(key)._view);
|
||||
}
|
||||
else if (ArrayBuffer.isView(v)) {
|
||||
if (v instanceof Uint8Array) {
|
||||
this.encodebytes(Tag.ByteString, v);
|
||||
} else {
|
||||
const bs = new Uint8Array(v.buffer, v.byteOffset, v.byteLength);
|
||||
this.encodebytes(Tag.ByteString, bs);
|
||||
}
|
||||
}
|
||||
else if (Array.isArray(v)) {
|
||||
this.encodevalues(Tag.Sequence, v);
|
||||
}
|
||||
else if (isIterable<Value<T>>(v)) {
|
||||
this.encodevalues(Tag.Sequence, v as Iterable<Value<T>>);
|
||||
}
|
||||
else {
|
||||
const e = this.options.encodePointer ?? pointerId;
|
||||
this.emitbyte(Tag.Pointer);
|
||||
this.push(e(v));
|
||||
}
|
||||
return this; // for chaining
|
||||
}
|
||||
}
|
||||
|
||||
export function encode<T extends object>(v: Encodable<T>, options?: EncoderOptions<T>): Bytes {
|
||||
return new Encoder(options).push(v).contents();
|
||||
}
|
||||
|
||||
let _nextId = 0;
|
||||
const _registry = new WeakMap<object, number>();
|
||||
export function pointerId(v: object): number {
|
||||
let id = _registry.get(v);
|
||||
if (id === void 0) {
|
||||
id = _nextId++;
|
||||
_registry.set(v, id);
|
||||
}
|
||||
return id;
|
||||
}
|
||||
|
||||
const _canonicalEncoder = new Encoder({ canonical: true });
|
||||
let _usingCanonicalEncoder = false;
|
||||
export function canonicalEncode(v: Encodable<any>, options?: EncoderOptions<any>): Bytes {
|
||||
if (options === void 0 && !_usingCanonicalEncoder) {
|
||||
_usingCanonicalEncoder = true;
|
||||
const bs = _canonicalEncoder.push(v).contents();
|
||||
_usingCanonicalEncoder = false;
|
||||
return bs;
|
||||
} else {
|
||||
return encode(v, { ... options, canonical: true });
|
||||
}
|
||||
}
|
||||
|
||||
export function canonicalString(v: Encodable<any>): string {
|
||||
return _canonicalEncoder.push(v).contentsString();
|
||||
}
|
||||
|
||||
export function encodeWithAnnotations<T extends object>(v: Encodable<T>, options: EncoderOptions<T> = {}): Bytes {
|
||||
return encode(v, { ... options, includeAnnotations: true });
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue