forked from syndicate-lang/preserves
Compare commits
452 Commits
Author | SHA1 | Date |
---|---|---|
Emery Hemingway | 2ff489d975 | |
Tony Garnock-Jones | 1668fdc6dd | |
Tony Garnock-Jones | 3a605e75d6 | |
Tony Garnock-Jones | b2c3032e7a | |
Tony Garnock-Jones | 668ac9f680 | |
Tony Garnock-Jones | ea83031a28 | |
Tony Garnock-Jones | 77c305a4cf | |
Tony Garnock-Jones | 602cfb8800 | |
Tony Garnock-Jones | 5260f85952 | |
Tony Garnock-Jones | 55b02b9cff | |
Tony Garnock-Jones | 386c07628c | |
Tony Garnock-Jones | 5fded03fa4 | |
Tony Garnock-Jones | 0507ab2f38 | |
Tony Garnock-Jones | b73e0c7025 | |
Tony Garnock-Jones | e2a4e3d6cb | |
Tony Garnock-Jones | e31cf739df | |
Tony Garnock-Jones | 486a631e73 | |
Tony Garnock-Jones | 29a882f953 | |
Tony Garnock-Jones | d1d52c2a30 | |
Tony Garnock-Jones | 62d9236045 | |
Tony Garnock-Jones | 1b4064b17c | |
Tony Garnock-Jones | a72810c416 | |
Tony Garnock-Jones | b1ed29657e | |
Tony Garnock-Jones | 9936ddb29d | |
Tony Garnock-Jones | 94e9fabc70 | |
Tony Garnock-Jones | f778325748 | |
Tony Garnock-Jones | b68b485af4 | |
Tony Garnock-Jones | 72a38cea7e | |
Tony Garnock-Jones | ccf01f5f24 | |
Tony Garnock-Jones | 6fdda6636b | |
Tony Garnock-Jones | 843c0c894f | |
Tony Garnock-Jones | e5a5130b56 | |
Tony Garnock-Jones | a91ee3977f | |
Tony Garnock-Jones | ac8567731d | |
Tony Garnock-Jones | 6869a89291 | |
Tony Garnock-Jones | e78196c942 | |
Tony Garnock-Jones | 15a27b4865 | |
Tony Garnock-Jones | df4059ee7a | |
Tony Garnock-Jones | 8d587c0aaa | |
Tony Garnock-Jones | d872f7cf8a | |
Tony Garnock-Jones | 7bf5403353 | |
Tony Garnock-Jones | 9f98e1ef3b | |
Tony Garnock-Jones | 06fc9aa017 | |
Tony Garnock-Jones | 997bea2836 | |
Tony Garnock-Jones | 28249b19f7 | |
Tony Garnock-Jones | e56b62cfbb | |
Tony Garnock-Jones | 985a0b6795 | |
Tony Garnock-Jones | 352d8ba1b3 | |
Tony Garnock-Jones | 8127033407 | |
Tony Garnock-Jones | 6348524542 | |
Tony Garnock-Jones | 87227b5623 | |
Tony Garnock-Jones | c4afc49646 | |
Tony Garnock-Jones | ef67347b8d | |
Tony Garnock-Jones | aabe7b2623 | |
Tony Garnock-Jones | e43e85ce8e | |
Tony Garnock-Jones | 5f2a3e3eb8 | |
Tony Garnock-Jones | eda9979041 | |
Tony Garnock-Jones | 4afc6d4c94 | |
Tony Garnock-Jones | d26e38ded0 | |
Tony Garnock-Jones | 66e7af491f | |
Tony Garnock-Jones | 18ac916899 | |
Tony Garnock-Jones | 00e31c0e29 | |
Tony Garnock-Jones | a9125874bf | |
Tony Garnock-Jones | 8550be0ba2 | |
Tony Garnock-Jones | 3cdf1f662e | |
Tony Garnock-Jones | 027966fb3b | |
Tony Garnock-Jones | f387f5e8c9 | |
Tony Garnock-Jones | 0aded61071 | |
Tony Garnock-Jones | dc451ea7b4 | |
Tony Garnock-Jones | 3e56cf3d7e | |
Tony Garnock-Jones | 94fe6ad946 | |
Tony Garnock-Jones | 446b2ee5f7 | |
Tony Garnock-Jones | 62cd9ac78f | |
Tony Garnock-Jones | 8b5aa372b5 | |
Tony Garnock-Jones | d408070fde | |
Tony Garnock-Jones | 0f30522f19 | |
Tony Garnock-Jones | e0bc1b31b8 | |
Tony Garnock-Jones | 6bb99b45c3 | |
Tony Garnock-Jones | 5669f2aff1 | |
Tony Garnock-Jones | 0c693d8ece | |
Tony Garnock-Jones | 82c66ec1c4 | |
Tony Garnock-Jones | a5065955ca | |
Tony Garnock-Jones | 8afc8f1eae | |
Tony Garnock-Jones | 123b6222ca | |
Tony Garnock-Jones | cf192b634c | |
Tony Garnock-Jones | fff34b8d45 | |
Tony Garnock-Jones | e2b27b619f | |
Tony Garnock-Jones | a2ca133983 | |
Tony Garnock-Jones | e056394ca6 | |
Tony Garnock-Jones | fc1d6afc28 | |
Tony Garnock-Jones | 59bcced776 | |
Tony Garnock-Jones | e45ff6b020 | |
Tony Garnock-Jones | abe60b3506 | |
Tony Garnock-Jones | 5c8bacd759 | |
Tony Garnock-Jones | 1b466aade7 | |
Tony Garnock-Jones | 23329cd8f3 | |
Tony Garnock-Jones | 06a08631aa | |
Tony Garnock-Jones | eafc22fb1c | |
Tony Garnock-Jones | ea75dc8f59 | |
Tony Garnock-Jones | 5161e54e0d | |
Tony Garnock-Jones | 7f8db08039 | |
Tony Garnock-Jones | cf94a95266 | |
Tony Garnock-Jones | 3128f6da82 | |
Tony Garnock-Jones | f1b4a4568b | |
Tony Garnock-Jones | d67bafcb30 | |
Tony Garnock-Jones | ebc609dfec | |
Tony Garnock-Jones | 4f60845dc0 | |
Tony Garnock-Jones | 37ca3fd493 | |
Tony Garnock-Jones | e6d7e9c1b5 | |
Tony Garnock-Jones | b11316e40b | |
Tony Garnock-Jones | c2167f1ee8 | |
Tony Garnock-Jones | 0821f6e3da | |
Tony Garnock-Jones | e02ee00894 | |
Tony Garnock-Jones | 32ebebec34 | |
Tony Garnock-Jones | a0bf6ebf41 | |
Tony Garnock-Jones | 02420543f1 | |
Tony Garnock-Jones | 7abd4a3d3a | |
Tony Garnock-Jones | 2ee1c48fcd | |
Tony Garnock-Jones | 965bda9f9e | |
Tony Garnock-Jones | 61af114d5f | |
Tony Garnock-Jones | 137cc63a97 | |
Tony Garnock-Jones | be10924118 | |
Tony Garnock-Jones | 96707352e6 | |
Tony Garnock-Jones | cfd9898b4d | |
Tony Garnock-Jones | 661d96780d | |
Tony Garnock-Jones | 87946abb63 | |
Tony Garnock-Jones | 96f5c9f434 | |
Tony Garnock-Jones | b24aca8f0f | |
Tony Garnock-Jones | 3078396487 | |
Tony Garnock-Jones | d28901446d | |
Tony Garnock-Jones | e913951b91 | |
Tony Garnock-Jones | e80d849f9a | |
Tony Garnock-Jones | dcdfdb8dd9 | |
Tony Garnock-Jones | 3176e5f8d0 | |
Tony Garnock-Jones | af1405e87a | |
Tony Garnock-Jones | 9d4e6998f2 | |
Tony Garnock-Jones | 9b100ab9aa | |
Tony Garnock-Jones | 5fa8c32ba0 | |
Tony Garnock-Jones | 61c6dfbc3e | |
Tony Garnock-Jones | 48412ae7ea | |
Tony Garnock-Jones | 9b88db6790 | |
Tony Garnock-Jones | cae254ef21 | |
Tony Garnock-Jones | 6d9ed94065 | |
Tony Garnock-Jones | 7712c6e0a9 | |
Tony Garnock-Jones | 423c9d0bba | |
Tony Garnock-Jones | 6ffc34065f | |
Tony Garnock-Jones | 932375fa49 | |
Tony Garnock-Jones | 7e3bf2ade5 | |
Tony Garnock-Jones | 790782fc87 | |
Tony Garnock-Jones | c527160e9d | |
Tony Garnock-Jones | 64593436a8 | |
Tony Garnock-Jones | 7c8a5c61ca | |
Tony Garnock-Jones | e9b5b3549c | |
Tony Garnock-Jones | e30ade6ed3 | |
Tony Garnock-Jones | 7e76503779 | |
Tony Garnock-Jones | da08189dd4 | |
Tony Garnock-Jones | 8f1a83e548 | |
Tony Garnock-Jones | 41fe3c3440 | |
Tony Garnock-Jones | 64ff818cd1 | |
Tony Garnock-Jones | 83d15a838e | |
Tony Garnock-Jones | 0fb1ef4efd | |
Tony Garnock-Jones | c04447d62a | |
Tony Garnock-Jones | c7b252ca9d | |
Tony Garnock-Jones | 6143ddc93d | |
Tony Garnock-Jones | c70035b044 | |
Tony Garnock-Jones | 297d1d39eb | |
Tony Garnock-Jones | 8b7baec26b | |
Tony Garnock-Jones | 23943f8b14 | |
Tony Garnock-Jones | 8cafcbcaf1 | |
Tony Garnock-Jones | 70990d2371 | |
Tony Garnock-Jones | 7c4cf38110 | |
Tony Garnock-Jones | 569563a564 | |
Tony Garnock-Jones | aea230b056 | |
Tony Garnock-Jones | 00759673ce | |
Tony Garnock-Jones | 5b9c4d29f6 | |
Tony Garnock-Jones | 17d8d076ec | |
Tony Garnock-Jones | e882d5a4df | |
Tony Garnock-Jones | fc8709706c | |
Tony Garnock-Jones | 3156180601 | |
Tony Garnock-Jones | f1d403a6a7 | |
Tony Garnock-Jones | d69787e5ee | |
Tony Garnock-Jones | c3bc678a46 | |
Tony Garnock-Jones | dd9e190bed | |
Tony Garnock-Jones | 01e8e2c279 | |
Tony Garnock-Jones | e4392ea2d5 | |
Tony Garnock-Jones | aa1c983acc | |
Tony Garnock-Jones | 5c2d12971d | |
Tony Garnock-Jones | 9c4be54be1 | |
Tony Garnock-Jones | 7546ba29ad | |
Tony Garnock-Jones | c7dbbdc178 | |
Tony Garnock-Jones | 4144a90b9d | |
Tony Garnock-Jones | 60d1be41a3 | |
Tony Garnock-Jones | fdb43f6292 | |
Tony Garnock-Jones | da513a249e | |
Tony Garnock-Jones | f808e37e89 | |
Tony Garnock-Jones | f12343e723 | |
Tony Garnock-Jones | 0d4d1e738c | |
Tony Garnock-Jones | feb6361029 | |
Tony Garnock-Jones | 27002dfe7f | |
Tony Garnock-Jones | e5b6c46169 | |
Tony Garnock-Jones | 6cecf64df5 | |
Tony Garnock-Jones | 7acf7c5b40 | |
Tony Garnock-Jones | 0bcb4e64ec | |
Tony Garnock-Jones | 30bcc1a50b | |
Tony Garnock-Jones | a4d61017d8 | |
Tony Garnock-Jones | ade9b0a0f1 | |
Tony Garnock-Jones | f93d329f48 | |
Tony Garnock-Jones | c05180c492 | |
Tony Garnock-Jones | f14b902f24 | |
Tony Garnock-Jones | 139f4ff08b | |
Tony Garnock-Jones | 12e38ddd8f | |
Tony Garnock-Jones | 2c5ee2066b | |
Tony Garnock-Jones | 53bd5a1a7e | |
Tony Garnock-Jones | b968f77ff6 | |
Tony Garnock-Jones | bd68786f1c | |
Tony Garnock-Jones | 64696ac184 | |
Tony Garnock-Jones | 7683a64a5b | |
Tony Garnock-Jones | 498c63ef67 | |
Tony Garnock-Jones | 9014a0ffb8 | |
Tony Garnock-Jones | 8d96743d53 | |
Tony Garnock-Jones | b23acdaf5a | |
Tony Garnock-Jones | a44884d9f5 | |
Tony Garnock-Jones | 947b816a57 | |
Tony Garnock-Jones | b69c3a0894 | |
Tony Garnock-Jones | d7bf235813 | |
Tony Garnock-Jones | ab12c6535f | |
Tony Garnock-Jones | 534018e3a4 | |
Tony Garnock-Jones | ef7cea09bf | |
Tony Garnock-Jones | 264c4b9d2e | |
Tony Garnock-Jones | 87e816306d | |
Tony Garnock-Jones | 5470497aa2 | |
Tony Garnock-Jones | f90544d807 | |
Tony Garnock-Jones | cb88c587b6 | |
Tony Garnock-Jones | e6efd03be7 | |
Tony Garnock-Jones | 351cafddb4 | |
Tony Garnock-Jones | 90ce0a544d | |
Tony Garnock-Jones | 7ab12108e4 | |
Tony Garnock-Jones | ed3cd8de26 | |
Tony Garnock-Jones | e4f7219dc6 | |
Tony Garnock-Jones | 98e2511fe1 | |
Tony Garnock-Jones | 3559cc679e | |
Tony Garnock-Jones | 460529e1c7 | |
Tony Garnock-Jones | badb059440 | |
Tony Garnock-Jones | 668d4e6271 | |
Tony Garnock-Jones | 394d10e6da | |
Tony Garnock-Jones | de7ac63b96 | |
Tony Garnock-Jones | 52bc77c9d7 | |
Tony Garnock-Jones | 49efc76580 | |
Tony Garnock-Jones | a24a5b19f5 | |
Tony Garnock-Jones | eeb84ad669 | |
Tony Garnock-Jones | 6c9071fd88 | |
Tony Garnock-Jones | 8b2aa0fb87 | |
Tony Garnock-Jones | 5c644624c4 | |
Tony Garnock-Jones | 163e338ce5 | |
Tony Garnock-Jones | 4ed8fd2c92 | |
Tony Garnock-Jones | 669d1b480d | |
Tony Garnock-Jones | 7d06c7dce0 | |
Tony Garnock-Jones | bdd699ae9f | |
Tony Garnock-Jones | 33a80533fa | |
Tony Garnock-Jones | 1ca796e6aa | |
Tony Garnock-Jones | 4914c8cd68 | |
Tony Garnock-Jones | 46d76dfca7 | |
Tony Garnock-Jones | 0db223ede8 | |
Tony Garnock-Jones | 52be118dc7 | |
Tony Garnock-Jones | a1fdddcf7b | |
Tony Garnock-Jones | e594d22d09 | |
Tony Garnock-Jones | aaee62044c | |
Tony Garnock-Jones | 9bdfc4c3ab | |
Tony Garnock-Jones | c4bfc0eefc | |
Tony Garnock-Jones | 2559a4713f | |
Tony Garnock-Jones | 1d6956fa55 | |
Tony Garnock-Jones | 9e6743abdc | |
Tony Garnock-Jones | 43b776eb7f | |
Tony Garnock-Jones | ebab3fafc5 | |
Tony Garnock-Jones | 986e7fa30d | |
Tony Garnock-Jones | b5405d80ec | |
Tony Garnock-Jones | 49cba14b4f | |
Tony Garnock-Jones | 1654ad4c80 | |
Tony Garnock-Jones | 2ddedc7673 | |
Tony Garnock-Jones | dbd6c3cf53 | |
Tony Garnock-Jones | 2bde06f509 | |
Tony Garnock-Jones | 0f1ea4aa20 | |
Tony Garnock-Jones | 46ab6d90ec | |
Tony Garnock-Jones | b5b4effeac | |
Tony Garnock-Jones | 20b676df27 | |
Tony Garnock-Jones | ecdb314366 | |
Tony Garnock-Jones | 7253d1507e | |
Tony Garnock-Jones | 10380e451a | |
Tony Garnock-Jones | 436b14e2fe | |
Tony Garnock-Jones | cefc029f70 | |
Tony Garnock-Jones | e4a2503899 | |
Tony Garnock-Jones | 6fc41ead6f | |
Tony Garnock-Jones | 8e068fbdbf | |
Tony Garnock-Jones | 8442718f96 | |
Tony Garnock-Jones | 854a2bc41c | |
Tony Garnock-Jones | 638f8e026e | |
Tony Garnock-Jones | 2d0e6255bd | |
Tony Garnock-Jones | 58d2bf6f3a | |
Tony Garnock-Jones | 70ce961dd2 | |
Tony Garnock-Jones | e5965fde83 | |
Tony Garnock-Jones | 8c783dbc7d | |
Tony Garnock-Jones | aef970dc2d | |
Tony Garnock-Jones | 8895d2b6a4 | |
Tony Garnock-Jones | be6537f6d4 | |
Tony Garnock-Jones | c54a17162d | |
Tony Garnock-Jones | 63cf5d1cf2 | |
Tony Garnock-Jones | 9f9514a7e6 | |
Tony Garnock-Jones | 825d208198 | |
Tony Garnock-Jones | b4d7af4322 | |
Tony Garnock-Jones | e763174846 | |
Tony Garnock-Jones | e1e7904a87 | |
Tony Garnock-Jones | 545e1bb6de | |
Tony Garnock-Jones | 3ad56a5275 | |
Tony Garnock-Jones | 55fab35073 | |
Tony Garnock-Jones | 4d8618ce63 | |
Tony Garnock-Jones | 121bcc7a53 | |
Tony Garnock-Jones | 94f6959ac8 | |
Tony Garnock-Jones | d64bb82c22 | |
Tony Garnock-Jones | c23cbcc60c | |
Tony Garnock-Jones | e187fb83b4 | |
Tony Garnock-Jones | 4434f712b6 | |
Tony Garnock-Jones | d91924c72b | |
Tony Garnock-Jones | 7d8453a806 | |
Tony Garnock-Jones | 1d73289345 | |
Tony Garnock-Jones | 4ded3a484c | |
Tony Garnock-Jones | 87dda48083 | |
Tony Garnock-Jones | 4814790d8e | |
Tony Garnock-Jones | 0304c2631b | |
Tony Garnock-Jones | 889d38bbb8 | |
Tony Garnock-Jones | c2fe82e71d | |
Tony Garnock-Jones | 0970898065 | |
Tony Garnock-Jones | 1c07573178 | |
Tony Garnock-Jones | c8b752a73b | |
Tony Garnock-Jones | d372977023 | |
Tony Garnock-Jones | 084f54f869 | |
Tony Garnock-Jones | 98558b81f0 | |
Tony Garnock-Jones | 5d2ee85b36 | |
Tony Garnock-Jones | 3463cd4a65 | |
Tony Garnock-Jones | 5f71239130 | |
Tony Garnock-Jones | 376e83acd0 | |
Tony Garnock-Jones | 306c7c2cae | |
Tony Garnock-Jones | bacf310648 | |
Tony Garnock-Jones | 98346c61d5 | |
Tony Garnock-Jones | ca42ffe832 | |
Tony Garnock-Jones | 05c7343983 | |
Tony Garnock-Jones | e6f99ae2e1 | |
Tony Garnock-Jones | b9019d03f1 | |
Tony Garnock-Jones | 178f528bf0 | |
Tony Garnock-Jones | 8f2da8f8db | |
Tony Garnock-Jones | 12121128a6 | |
Tony Garnock-Jones | 425e7dd5cb | |
Tony Garnock-Jones | 434279ab66 | |
Tony Garnock-Jones | 7072f19407 | |
Tony Garnock-Jones | adfb1822ac | |
Tony Garnock-Jones | 14be044092 | |
Tony Garnock-Jones | 5393308be4 | |
Tony Garnock-Jones | 5afb1469f3 | |
Tony Garnock-Jones | e078a71b30 | |
Tony Garnock-Jones | 47da2e5308 | |
Tony Garnock-Jones | 5e3ae0c18b | |
Tony Garnock-Jones | d6d4e830a3 | |
Tony Garnock-Jones | 2311dbd245 | |
Tony Garnock-Jones | d502249c53 | |
Tony Garnock-Jones | 1cc0325007 | |
Tony Garnock-Jones | 4ee9f99529 | |
Tony Garnock-Jones | 942fb79a2e | |
Tony Garnock-Jones | ce947c109a | |
Tony Garnock-Jones | 2b9eddfc79 | |
Tony Garnock-Jones | d811032ac7 | |
Tony Garnock-Jones | bddb111f87 | |
Tony Garnock-Jones | a1db64422c | |
Tony Garnock-Jones | cfa0a9caa3 | |
Tony Garnock-Jones | fc88612414 | |
Tony Garnock-Jones | c75aaf4b18 | |
Tony Garnock-Jones | 55e4222d68 | |
Tony Garnock-Jones | 9a46a22fb6 | |
Tony Garnock-Jones | 86fc0acc92 | |
Tony Garnock-Jones | f531eb347d | |
Tony Garnock-Jones | 8ec5946696 | |
Tony Garnock-Jones | d9726a6878 | |
Tony Garnock-Jones | 8d7e7c6d95 | |
Tony Garnock-Jones | c09032f609 | |
Tony Garnock-Jones | c8f564aea4 | |
Tony Garnock-Jones | 94f6f9af9d | |
Tony Garnock-Jones | d932431d83 | |
Tony Garnock-Jones | ba2c7e9978 | |
Tony Garnock-Jones | a0d51fab4c | |
Tony Garnock-Jones | 87bb930020 | |
Tony Garnock-Jones | 929f916d1d | |
Tony Garnock-Jones | 932818145b | |
Tony Garnock-Jones | 7c587f03d3 | |
Tony Garnock-Jones | 036d42a73a | |
Tony Garnock-Jones | 754306aca3 | |
Tony Garnock-Jones | 8187337187 | |
Tony Garnock-Jones | 447380218e | |
Tony Garnock-Jones | 62bab41bed | |
Tony Garnock-Jones | c27aa7579e | |
Tony Garnock-Jones | e894d0dbbc | |
Tony Garnock-Jones | dc96f74075 | |
Tony Garnock-Jones | a05bf0cb7a | |
Tony Garnock-Jones | 4022b76650 | |
Tony Garnock-Jones | 5412f8b9d0 | |
Tony Garnock-Jones | 550224e0b1 | |
Tony Garnock-Jones | eaff7b86d8 | |
Tony Garnock-Jones | 6d2120989b | |
Tony Garnock-Jones | c8c027f762 | |
Tony Garnock-Jones | 75790f237b | |
Tony Garnock-Jones | 1268c4f9bd | |
Tony Garnock-Jones | 481f866ada | |
Tony Garnock-Jones | 993689356b | |
Tony Garnock-Jones | 074fc5db98 | |
Tony Garnock-Jones | c46566e5a0 | |
Tony Garnock-Jones | 055b367764 | |
Tony Garnock-Jones | a19a9d50c6 | |
Tony Garnock-Jones | 4353d5280e | |
Tony Garnock-Jones | a69297c3ba | |
Tony Garnock-Jones | 407e8778a1 | |
Tony Garnock-Jones | 83b09d9406 | |
Tony Garnock-Jones | 95c04bd5d5 | |
Tony Garnock-Jones | 1743756097 | |
Tony Garnock-Jones | 013c5f4dae | |
Tony Garnock-Jones | 6fd06cec98 | |
Tony Garnock-Jones | 532e811894 | |
Tony Garnock-Jones | 6bf49874b7 | |
Tony Garnock-Jones | abca13e260 | |
Tony Garnock-Jones | 0bb61d260f | |
Tony Garnock-Jones | a5d4098e29 | |
Tony Garnock-Jones | d8a041a647 | |
Tony Garnock-Jones | 320215dca0 | |
Tony Garnock-Jones | b4d5334a1a | |
Tony Garnock-Jones | 329cee7bd6 | |
Tony Garnock-Jones | 10351b5369 | |
Tony Garnock-Jones | 3f62d68bab | |
Tony Garnock-Jones | b0ed7e914b | |
Tony Garnock-Jones | 55db55b42b | |
Tony Garnock-Jones | f7b7f29a3b | |
Tony Garnock-Jones | 98e981dccf | |
Tony Garnock-Jones | 66cac324e0 | |
Tony Garnock-Jones | 44f142d86b | |
Tony Garnock-Jones | 2c5ed693f5 | |
Tony Garnock-Jones | a1a604aee8 | |
Tony Garnock-Jones | ca2276d268 | |
Tony Garnock-Jones | 8459521db5 | |
Tony Garnock-Jones | 749747ca05 | |
Tony Garnock-Jones | 77fd8e86bf | |
Tony Garnock-Jones | 85fe7b3b07 | |
Tony Garnock-Jones | 5d719c2c6f | |
Tony Garnock-Jones | ccf4f97ed8 | |
Tony Garnock-Jones | 2391722a25 | |
Tony Garnock-Jones | 3c6bff6646 | |
Tony Garnock-Jones | 137aa308e3 | |
Tony Garnock-Jones | 3c059a573c |
|
@ -1,3 +1,4 @@
|
|||
_site/
|
||||
preserves.pdf
|
||||
preserves-schema.pdf
|
||||
scratch/
|
||||
|
|
|
@ -10,5 +10,5 @@ pages:
|
|||
paths:
|
||||
- public
|
||||
only:
|
||||
- master
|
||||
- main
|
||||
|
||||
|
|
13
Makefile
13
Makefile
|
@ -1,6 +1,15 @@
|
|||
preserves.pdf: preserves.md preserves.css
|
||||
__ignored__ := $(shell ./setup.sh)
|
||||
|
||||
PDFS=preserves.pdf preserves-schema.pdf
|
||||
|
||||
all: $(PDFS)
|
||||
|
||||
clean:
|
||||
rm -f $(PDFS)
|
||||
|
||||
%.pdf: %.md preserves.css
|
||||
google-chrome --headless --disable-gpu --print-to-pdf=$@ \
|
||||
http://localhost:4000/preserves/preserves.html
|
||||
http://localhost:4000/preserves/$*.html
|
||||
|
||||
test-all:
|
||||
make -C tests
|
||||
|
|
2
NOTICE
2
NOTICE
|
@ -1,2 +1,2 @@
|
|||
Preserves: an Expressive Data Language
|
||||
Copyright 2018-2019 Tony Garnock-Jones
|
||||
Copyright 2018-2021 Tony Garnock-Jones
|
||||
|
|
56
README.md
56
README.md
|
@ -1,15 +1,14 @@
|
|||
---
|
||||
projectpages: "https://gitlab.com/preserves/preserves"
|
||||
projecttree: "https://gitlab.com/preserves/preserves/tree/master"
|
||||
projecttree: "https://gitlab.com/preserves/preserves/tree/main"
|
||||
title: "Preserves: an Expressive Data Language"
|
||||
no_site_title: true
|
||||
---
|
||||
|
||||
This [repository]({{page.projectpages}}) contains a
|
||||
[proposal](preserves.html) and
|
||||
[various implementations]({{page.projecttree}}/implementations/) of
|
||||
*Preserves*, a new data model and serialization format in many ways
|
||||
comparable to JSON, XML, S-expressions, CBOR, ASN.1 BER, and so on.
|
||||
[proposal](preserves.html) and various implementations of *Preserves*,
|
||||
a new data model and serialization format in many ways comparable to
|
||||
JSON, XML, S-expressions, CBOR, ASN.1 BER, and so on.
|
||||
|
||||
> **WARNING** Everything in this repository is experimental and in
|
||||
> flux! The design of Preserves is not finalised and may change
|
||||
|
@ -18,10 +17,53 @@ comparable to JSON, XML, S-expressions, CBOR, ASN.1 BER, and so on.
|
|||
|
||||
## Core documents
|
||||
|
||||
### Preserves data model and serialization formats
|
||||
|
||||
Preserves is defined in terms of a syntax-neutral
|
||||
[data model and semantics](preserves.html#starting-with-semantics)
|
||||
which all transfer syntaxes share. This allows trivial, completely
|
||||
automatic, perfect-fidelity conversion between syntaxes.
|
||||
|
||||
- [Preserves tutorial](TUTORIAL.html)
|
||||
- [Preserves specification](preserves.html), including semantics,
|
||||
textual syntax, and compact binary syntax
|
||||
data model, textual syntax, and compact binary syntax
|
||||
- [Canonical Form for Binary Syntax](canonical-binary.html)
|
||||
- [Syrup](https://github.com/ocapn/syrup#pseudo-specification), a
|
||||
hybrid binary/human-readable syntax for the Preserves data model
|
||||
|
||||
### Preserves schema and queries
|
||||
|
||||
- [Preserves Schema specification](preserves-schema.html)
|
||||
- [Preserves Path specification](preserves-path.html)
|
||||
|
||||
## Implementations
|
||||
|
||||
Implementations of the data model, plus the textual and/or binary transfer syntaxes:
|
||||
|
||||
- [Preserves for Nim](https://git.sr.ht/~ehmry/preserves-nim)
|
||||
- [Preserves for Python]({{page.projecttree}}/implementations/python/) ([`pip install preserves`](https://pypi.org/project/preserves/))
|
||||
- [Preserves for Racket]({{page.projecttree}}/implementations/racket/preserves/) ([`raco pkg install preserves`](https://pkgs.racket-lang.org/package/preserves))
|
||||
- [Preserves for Rust]({{page.projecttree}}/implementations/rust/) ([crates.io package](https://crates.io/crates/preserves))
|
||||
- [Preserves for Squeak Smalltalk](https://squeaksource.com/Preserves.html) (`Installer ss project: 'Preserves'; install: 'Preserves'`)
|
||||
- [Preserves for TypeScript and JavaScript]({{page.projecttree}}/implementations/javascript/) ([`yarn add @preserves/core`](https://www.npmjs.com/package/@preserves/core))
|
||||
|
||||
Implementations of the data model, plus Syrup transfer syntax:
|
||||
|
||||
- [Syrup for Racket](https://github.com/ocapn/syrup/blob/master/impls/racket/syrup/syrup.rkt)
|
||||
- [Syrup for Guile](https://github.com/ocapn/syrup/blob/master/impls/guile/syrup.scm)
|
||||
- [Syrup for Python](https://github.com/ocapn/syrup/blob/master/impls/python/syrup.py)
|
||||
- [Syrup for JavaScript](https://github.com/zarutian/agoric-sdk/blob/zarutian/captp_variant/packages/captp/lib/syrup.js)
|
||||
- [Syrup for Haskell](https://github.com/zenhack/haskell-preserves)
|
||||
|
||||
## Tools
|
||||
|
||||
### Preserves documents
|
||||
|
||||
- [preserves-tool](doc/preserves-tool.html), generic syntax translation and pretty-printing
|
||||
|
||||
### Preserves Schema documents and codegen
|
||||
|
||||
- [Tools for working with Preserves Schema](doc/schema-tools.html)
|
||||
|
||||
## Additional resources
|
||||
|
||||
|
@ -39,4 +81,4 @@ Tony Garnock-Jones <tonyg@leastfixedpoint.com>
|
|||
The contents of this repository are made available to you under the
|
||||
[Apache License, version 2.0](LICENSE)
|
||||
(<http://www.apache.org/licenses/LICENSE-2.0>), and are Copyright
|
||||
2018-2020 Tony Garnock-Jones.
|
||||
2018-2021 Tony Garnock-Jones.
|
||||
|
|
5
TODO.md
5
TODO.md
|
@ -17,6 +17,11 @@ TODO:
|
|||
- if there were, it'd give IPLD-like characteristics to the thing from the get-go
|
||||
- IRIs and mime-typed things are already in there so why not content-based addressing
|
||||
|
||||
- Check out https://hitchdev.com/strictyaml/, in particular the "Why
|
||||
StrictYAML?" and "Design justifications" sections; perhaps borrow
|
||||
elements of that structure for writing a comparison of Preserves
|
||||
with other things
|
||||
|
||||
It is becoming VERY CLEAR that on-the-wire efficiency is... a
|
||||
secondary concern. Perhaps revise the binary syntax to be less terse
|
||||
and better for simple encoding and for term ordering,
|
||||
|
|
191
TUTORIAL.md
191
TUTORIAL.md
|
@ -38,7 +38,7 @@ For that, see the [Preserves specification](preserves.html).
|
|||
|
||||
If you're familiar with JSON, Preserves looks fairly similar:
|
||||
|
||||
``` javascript
|
||||
```
|
||||
{"name": "Missy Rose",
|
||||
"species": "Felis Catus",
|
||||
"age": 13,
|
||||
|
@ -49,35 +49,35 @@ Preserves also has something we can use for debugging/development
|
|||
information called "annotations"; they aren't actually read in as data
|
||||
but we can use them for comments.
|
||||
(They can also be used for other development tools and are not
|
||||
restricted to strings; more on this later, but for now interpret them
|
||||
as comments.)
|
||||
restricted to strings; more on this later, but for now, we will stick
|
||||
to the special comment annotation syntax.)
|
||||
|
||||
``` javascript
|
||||
@"I'm an annotation... basically a comment. Ignore me!"
|
||||
"I'm data! Don't ignore me!"
|
||||
```
|
||||
;I'm an annotation... basically a comment. Ignore me!
|
||||
"I'm data! Don't ignore me!"
|
||||
```
|
||||
|
||||
Preserves supports some data types you're probably already familiar
|
||||
with from JSON, and which look fairly similar in the textual format:
|
||||
|
||||
``` javascript
|
||||
@"booleans"
|
||||
#true
|
||||
#false
|
||||
|
||||
@"various kinds of numbers:"
|
||||
```
|
||||
;booleans
|
||||
#t
|
||||
#f
|
||||
|
||||
;various kinds of numbers:
|
||||
42
|
||||
123556789012345678901234567890
|
||||
-10
|
||||
13.5
|
||||
|
||||
@"strings"
|
||||
|
||||
;strings
|
||||
"I'm feeling stringy!"
|
||||
|
||||
@"sequences (lists)"
|
||||
|
||||
;sequences (lists)
|
||||
["cat", "dog", "mouse", "goldfish"]
|
||||
|
||||
@"dictionaries (hashmaps)"
|
||||
|
||||
;dictionaries (hashmaps)
|
||||
{"cat": "meow",
|
||||
"dog": "woof",
|
||||
"goldfish": "glub glub",
|
||||
|
@ -90,16 +90,16 @@ with from JSON, and which look fairly similar in the textual format:
|
|||
## Going beyond JSON
|
||||
|
||||
We can observe a few differences from JSON already; it's possible to
|
||||
express numbers of arbitrary length in Preserves, and booleans look a little
|
||||
*reliably* express integers of arbitrary length in Preserves, and booleans look a little
|
||||
bit different.
|
||||
A few more interesting differences:
|
||||
|
||||
``` javascript
|
||||
@"Preserves treats commas as whitespace, so these are the same"
|
||||
```
|
||||
;Preserves treats commas as whitespace, so these are the same
|
||||
["cat", "dog", "mouse", "goldfish"]
|
||||
["cat" "dog" "mouse" "goldfish"]
|
||||
|
||||
@"We can use anything as keys in dictionaries, not just strings"
|
||||
|
||||
;We can use anything as keys in dictionaries, not just strings
|
||||
{1: "the loneliest number",
|
||||
["why", "was", 6, "afraid", "of", 7]: "because 7 8 9",
|
||||
{"dictionaries": "as keys???"}: "well, why not?"}
|
||||
|
@ -107,17 +107,17 @@ A few more interesting differences:
|
|||
|
||||
Preserves technically provides a few types of numbers:
|
||||
|
||||
``` javascript
|
||||
@"Signed Integers"
|
||||
```
|
||||
;Signed Integers
|
||||
42
|
||||
-42
|
||||
5907212309572059846509324862304968273468909473609826340
|
||||
-5907212309572059846509324862304968273468909473609826340
|
||||
|
||||
@"Floats (Single-precision IEEE floats) (notice the trailing f)"
|
||||
|
||||
;Floats (Single-precision IEEE floats) (notice the trailing f)
|
||||
3.1415927f
|
||||
|
||||
@"Doubles (Double-precision IEEE floats)"
|
||||
|
||||
;Doubles (Double-precision IEEE floats)
|
||||
3.141592653589793
|
||||
```
|
||||
|
||||
|
@ -129,33 +129,33 @@ Often they're meant to be used for something that has symbolic importance
|
|||
to the program, but not textual importance (other than to guide the
|
||||
programmer… not unlike variable names).
|
||||
|
||||
``` javascript
|
||||
@"A symbol (NOT a string!)"
|
||||
```
|
||||
;A symbol (NOT a string!)
|
||||
JustASymbol
|
||||
|
||||
@"You can do mixedCase or CamelCase too of course, pick your poison"
|
||||
@"(but be consistent, for the sake of your collaborators!"
|
||||
|
||||
;You can do mixedCase or CamelCase too of course, pick your poison
|
||||
;(but be consistent, for the sake of your collaborators!)
|
||||
iAmASymbol
|
||||
i-am-a-symbol
|
||||
|
||||
@"A list of symbols"
|
||||
|
||||
;A list of symbols
|
||||
[GET, PUT, POST, DELETE]
|
||||
|
||||
@"A symbol with spaces in it"
|
||||
|
||||
;A symbol with spaces in it
|
||||
|this is just one symbol believe it or not|
|
||||
```
|
||||
|
||||
We can also add binary data, aka ByteStrings:
|
||||
|
||||
``` javascript
|
||||
@"Some binary data, base64 encoded"
|
||||
#base64{cGljdHVyZSBvZiBhIGNhdA==}
|
||||
|
||||
@"Some other binary data, hexadecimal encoded"
|
||||
#hex{616263}
|
||||
|
||||
@"Same binary data as above, base64 encoded"
|
||||
#base64{YWJj}
|
||||
```
|
||||
;Some binary data, base64 encoded
|
||||
#[cGljdHVyZSBvZiBhIGNhdA==]
|
||||
|
||||
;Some other binary data, hexadecimal encoded
|
||||
#x"616263"
|
||||
|
||||
;Same binary data as above, base64 encoded
|
||||
#[YWJj]
|
||||
```
|
||||
|
||||
What's neat about this is that we don't have to "pay the cost" of
|
||||
|
@ -165,48 +165,41 @@ the length of the binary data is the length of the binary data.
|
|||
Conveniently, Preserves also includes Sets, which are collections of
|
||||
unique elements where ordering of items is unimportant.
|
||||
|
||||
``` javascript
|
||||
#set{flour, salt, water}
|
||||
```
|
||||
#{flour, salt, water}
|
||||
```
|
||||
|
||||
<a id="orgefafe56"></a>
|
||||
|
||||
## Total ordering and canonicalization
|
||||
## Canonicalization
|
||||
|
||||
This is a good time to mention that even though from a semantic
|
||||
perspective sets and dictionaries do not carry information about the
|
||||
ordering of their elements (and Preserves doesn't care what order we
|
||||
enter them in for our hand-written-as-text Preserves documents),
|
||||
Preserves has a well-defined "total ordering".
|
||||
[Preserves provides support for canonical ordering](canonical-binary.html)
|
||||
when serializing.
|
||||
|
||||
Based on this total ordering, Preserves provides support for canonical
|
||||
ordering when serializing; in this mode, Preserves will always write
|
||||
out the elements in the same order, every time.
|
||||
When combined with binary serialization, this is Preserves' "canonical
|
||||
form".
|
||||
This is important and useful for many contexts, but especially for
|
||||
cryptographic signatures and hashing.
|
||||
In canonicalizing output mode, Preserves will always write out a given
|
||||
value using exactly the same bytes, every time. This is important and
|
||||
useful for many contexts, but especially for cryptographic signatures
|
||||
and hashing.
|
||||
|
||||
``` javascript
|
||||
@"This hand-typed Preserves document..."
|
||||
```
|
||||
;This hand-typed Preserves document...
|
||||
{monkey: {"noise": "ooh-ooh",
|
||||
"eats": #set{"bananas", "berries"}}
|
||||
"eats": #{"bananas", "berries"}}
|
||||
cat: {"noise": "meow",
|
||||
"eats": #set{"kibble", "cat treats", "tinned meat"}}}
|
||||
|
||||
@"Will always, always be written out in this order when canonicalized:"
|
||||
{cat: {"eats": #set{"cat treats", "kibble", "tinned meat"},
|
||||
"eats": #{"kibble", "cat treats", "tinned meat"}}}
|
||||
|
||||
;Will always, always be written out in this order (except in
|
||||
;binary, of course) when canonicalized:
|
||||
{cat: {"eats": #{"cat treats", "kibble", "tinned meat"},
|
||||
"noise": "meow"}
|
||||
monkey: {"eats": #set{"bananas", "berries"},
|
||||
monkey: {"eats": #{"bananas", "berries"},
|
||||
"noise": "ooh-ooh"}}
|
||||
```
|
||||
|
||||
Clever implementations can get canonicalized output for free by
|
||||
carefully ordering set elements and dictionary entries at construction
|
||||
time, but even in simple implementations, canonical serialization is
|
||||
almost as cheap as normal serialization.
|
||||
|
||||
|
||||
<a id="org0366627"></a>
|
||||
|
||||
## Defining our own types using Records
|
||||
|
@ -216,7 +209,7 @@ sense, it's a meta-type.
|
|||
`Record` objects have a label and a series of arguments (or "fields").
|
||||
For example, we can make a `Date` record:
|
||||
|
||||
``` javascript
|
||||
```
|
||||
<Date 2019 8 15>
|
||||
```
|
||||
|
||||
|
@ -228,7 +221,7 @@ We could instead just decide to encode our date data in a string,
|
|||
like "2019-08-15".
|
||||
A document using such a date structure might look like so:
|
||||
|
||||
``` javascript
|
||||
```
|
||||
{"name": "Gregor Samsa",
|
||||
"description": "humanoid trapped in an insect body",
|
||||
"born": "1915-10-04"}
|
||||
|
@ -243,13 +236,13 @@ know the date exactly.
|
|||
This causes a problem.
|
||||
Now we might have two kinds of entries:
|
||||
|
||||
``` javascript
|
||||
@"Exact date known"
|
||||
```
|
||||
;Exact date known
|
||||
{"name": "Gregor Samsa",
|
||||
"description": "humanoid trapped in an insect body",
|
||||
"born": "1915-10-04"}
|
||||
|
||||
@"Not sure about exact date..."
|
||||
|
||||
;Not sure about exact date...
|
||||
{"name": "Gregor Samsa",
|
||||
"description": "humanoid trapped in an insect body",
|
||||
"born": "Sometime in October 1915? Or was that when he became an insect?"}
|
||||
|
@ -261,13 +254,13 @@ like a date", but doing this kind of thing is prone to errors and weird
|
|||
edge cases.
|
||||
No, it's better to be able to have a separate type:
|
||||
|
||||
``` javascript
|
||||
@"Exact date known"
|
||||
```
|
||||
;Exact date known
|
||||
{"name": "Gregor Samsa",
|
||||
"description": "humanoid trapped in an insect body",
|
||||
"born": <Date 1915 10 04>}
|
||||
|
||||
@"Not sure about exact date..."
|
||||
|
||||
;Not sure about exact date...
|
||||
{"name": "Gregor Samsa",
|
||||
"description": "humanoid trapped in an insect body",
|
||||
"born": <Unknown "Sometime in October 1915? Or was that when he became an insect?">}
|
||||
|
@ -285,7 +278,7 @@ the meaning the label signifies for it to be of use.
|
|||
Still, there are plenty of interesting labels we can define.
|
||||
Here is one for an "iri", a hyperlink:
|
||||
|
||||
``` javascript
|
||||
```
|
||||
<iri "https://dustycloud.org/blog/">
|
||||
```
|
||||
|
||||
|
@ -294,11 +287,11 @@ Records are usually symbols but aren't necessarily so.
|
|||
They can also be strings or numbers or even dictionaries.
|
||||
And very interestingly, they can also be other records:
|
||||
|
||||
``` javascript
|
||||
<<iri "https://www.w3.org/ns/activitystreams#Note">
|
||||
{"to": [<iri "https://chatty.example/ben/">],
|
||||
"attributedTo": <iri "https://social.example/alyssa/">,
|
||||
"content": "Say, did you finish reading that book I lent you?"}>
|
||||
```
|
||||
< <iri "https://www.w3.org/ns/activitystreams#Note">
|
||||
{"to": [<iri "https://chatty.example/ben/">],
|
||||
"attributedTo": <iri "https://social.example/alyssa/">,
|
||||
"content": "Say, did you finish reading that book I lent you?"} >
|
||||
```
|
||||
|
||||
Do you see it? This Record's label is… an `iri` Record!
|
||||
|
@ -327,16 +320,18 @@ Annotations are not strictly a necessary feature, but they are useful
|
|||
in some circumstances.
|
||||
We have previously shown them used as comments:
|
||||
|
||||
``` javascript
|
||||
@"I'm a comment!"
|
||||
```
|
||||
;I'm a comment!
|
||||
"I am not a comment, I am data!"
|
||||
```
|
||||
|
||||
Annotations annotate the values they precede.
|
||||
It is possible to have multiple annotations on a value.
|
||||
The `;`-based comment syntax is syntactic sugar for the general
|
||||
`@`-prefixed string annotation syntax.
|
||||
|
||||
``` javascript
|
||||
@"I am annotating this number"
|
||||
```
|
||||
;I am annotating this number
|
||||
@"And so am I!"
|
||||
42
|
||||
```
|
||||
|
@ -349,7 +344,7 @@ Many implementations will, in the same mode, also supply line number
|
|||
and column information attached to each read value.
|
||||
|
||||
So what's the point of them then?
|
||||
If annotations were just for comments, there would be indeed hardly
|
||||
If annotations were just for comments, there would be indeed hardly any
|
||||
point at all… it would be simpler to just provide a comment syntax.
|
||||
|
||||
However, annotations can be used for more than just comments.
|
||||
|
@ -360,13 +355,17 @@ For instance, here's a reply from an HTTP API service running in
|
|||
"debug" mode annotated with the time it took to produce the reply and
|
||||
the internal name of the server that produced the response:
|
||||
|
||||
``` javascript
|
||||
```
|
||||
@<ResponseTime <Milliseconds 64.4>>
|
||||
@<BackendServer "humpty-dumpty.example.com">
|
||||
<Success
|
||||
<Employees [
|
||||
<Employee "Alyssa P. Hacker" #set{<Role Programmer>, <Role Manager>}, <Date 2018, 1, 24>>
|
||||
<Employee "Ben Bitdiddle" #set{<Role Programmer>}, <Date 2019, 2, 13>> ]>>
|
||||
<Employee "Alyssa P. Hacker"
|
||||
#{<Role Programmer>, <Role Manager>}
|
||||
<Date 2018, 1, 24>>
|
||||
<Employee "Ben Bitdiddle"
|
||||
#{<Role Programmer>}
|
||||
<Date 2019, 2, 13>> ]>>
|
||||
```
|
||||
|
||||
The annotations aren't related to the data requested, which is all
|
||||
|
|
|
@ -20,22 +20,17 @@ are equal.
|
|||
This document specifies canonical form for the Preserves compact
|
||||
binary syntax.
|
||||
|
||||
**General rules.**
|
||||
Streaming formats ("format C") *MUST NOT* be used.
|
||||
**Annotations.**
|
||||
Annotations *MUST NOT* be present.
|
||||
Whenever there is a choice between fixed-length ("format A") or
|
||||
variable-length ("format B") formats, the fixed-length format *MUST* be
|
||||
used.
|
||||
|
||||
**Sets.**
|
||||
The elements of a `Set` *MUST* be serialized sorted in ascending order
|
||||
following the total order relation defined in the
|
||||
[Preserves specification][spec].
|
||||
by comparing their canonical encoded binary representations.
|
||||
|
||||
**Dictionaries.**
|
||||
The key-value pairs in a `Dictionary` *MUST* be serialized sorted in
|
||||
ascending order by key, following the total order relation defined in
|
||||
the [Preserves specification][spec].[^no-need-for-by-value]
|
||||
ascending order by comparing the canonical encoded binary
|
||||
representations of their keys.[^no-need-for-by-value]
|
||||
|
||||
[^no-need-for-by-value]: There is no need to order by (key, value)
|
||||
pair, since a `Dictionary` has no duplicate keys.
|
||||
|
@ -43,7 +38,9 @@ the [Preserves specification][spec].[^no-need-for-by-value]
|
|||
**Other kinds of `Value`.**
|
||||
There are no special canonicalization restrictions on
|
||||
`SignedInteger`s, `String`s, `ByteString`s, `Symbol`s, `Boolean`s,
|
||||
`Float`s, `Double`s, `Record`s, or `Sequence`s.
|
||||
`Float`s, `Double`s, `Record`s, `Sequence`s, or `Embedded`s. The
|
||||
constraints given for these `Value`s in the [specification][spec]
|
||||
suffice to ensure canonicity.
|
||||
|
||||
<!-- Heading to visually offset the footnotes from the main document: -->
|
||||
## Notes
|
||||
|
|
|
@ -65,28 +65,29 @@ interior portions of a tree.
|
|||
## Comments.
|
||||
|
||||
`String` values used as annotations are conventionally interpreted as
|
||||
comments.
|
||||
comments. Special syntax exists for such string annotations, though
|
||||
the usual `@`-prefixed annotation notation can also be used.
|
||||
|
||||
@"I am a comment for the Dictionary"
|
||||
;I am a comment for the Dictionary
|
||||
{
|
||||
@"I am a comment for the key"
|
||||
key: @"I am a comment for the value"
|
||||
;I am a comment for the key
|
||||
key: ;I am a comment for the value
|
||||
value
|
||||
}
|
||||
|
||||
@"I am a comment for this entire IOList"
|
||||
;I am a comment for this entire IOList
|
||||
[
|
||||
#hex{00010203}
|
||||
@"I am a comment for the middle half of the IOList"
|
||||
@"A second comment for the same portion of the IOList"
|
||||
@ @"I am the first and only comment for the following comment"
|
||||
#x"00010203"
|
||||
;I am a comment for the middle half of the IOList
|
||||
;A second comment for the same portion of the IOList
|
||||
@ ;I am the first and only comment for the following comment
|
||||
"A third (itself commented!) comment for the same part of the IOList"
|
||||
[
|
||||
@"I am a comment for the following ByteString"
|
||||
#hex{04050607}
|
||||
#hex{08090A0B}
|
||||
;"I am a comment for the following ByteString"
|
||||
#x"04050607"
|
||||
#x"08090A0B"
|
||||
]
|
||||
#hex{0C0D0E0F}
|
||||
#x"0C0D0E0F"
|
||||
]
|
||||
|
||||
## MIME-type tagged binary data.
|
||||
|
@ -105,12 +106,17 @@ such media types following the general rules for ordering of
|
|||
|
||||
**Examples.**
|
||||
|
||||
| Value | Encoded hexadecimal byte sequence |
|
||||
|--------------------------------------------|-------------------------------------------------------------------------------------------------------------------|
|
||||
| `<mime application/octet-stream #"abcde">` | 83 74 6D 69 6D 65 7F 18 61 70 70 6C 69 63 61 74 69 6F 6E 2F 6F 63 74 65 74 2D 73 74 72 65 61 6D 65 61 62 63 64 65 |
|
||||
| `<mime text/plain #"ABC">` | 83 74 6D 69 6D 65 7A 74 65 78 74 2F 70 6C 61 69 6E 63 41 42 43 |
|
||||
| `<mime application/xml #"<xhtml/>">` | 83 74 6D 69 6D 65 7F 0F 61 70 70 6C 69 63 61 74 69 6F 6E 2F 78 6D 6C 68 3C 78 68 74 6D 6C 2F 3E |
|
||||
| `<mime text/csv #"123,234,345">` | 83 74 6D 69 6D 65 78 74 65 78 74 2F 63 73 76 6B 31 32 33 2C 32 33 34 2C 33 34 35 |
|
||||
«<mime application/octet-stream #"abcde">»
|
||||
= B4 B3 04 "mime" B3 18 "application/octet-stream" B2 05 "abcde"
|
||||
|
||||
«<mime text/plain #"ABC">»
|
||||
= B4 B3 04 "mime" B3 0A "text/plain" B2 03 "ABC" 84
|
||||
|
||||
«<mime application/xml #"<xhtml/>">»
|
||||
= B4 B3 04 "mime" B3 0F "application/xml" B2 08 "<xhtml/>" 84
|
||||
|
||||
«<mime text/csv #"123,234,345">»
|
||||
= B4 B3 04 "mime" B3 08 "text/csv" B2 0B "123,234,345" 84
|
||||
|
||||
## Unicode normalization forms.
|
||||
|
||||
|
@ -166,8 +172,46 @@ value can be represented as `<undefined>`.
|
|||
Dates, times, moments, and timestamps can be represented with a
|
||||
`Record` with label `rfc3339` having a single field, a `String`, which
|
||||
*MUST* conform to one of the `full-date`, `partial-time`, `full-time`,
|
||||
or `date-time` productions of
|
||||
[section 5.6 of RFC 3339](https://tools.ietf.org/html/rfc3339#section-5.6).
|
||||
or `date-time` productions of [section 5.6 of RFC
|
||||
3339](https://tools.ietf.org/html/rfc3339#section-5.6). (In
|
||||
`date-time`, "T" and "Z" *MUST* be upper-case and "T" *MUST* be used;
|
||||
a space separating the `full-date` and `full-time` *MUST NOT* be
|
||||
used.)
|
||||
|
||||
## XML Infoset
|
||||
|
||||
[XML Infoset](https://www.w3.org/TR/2004/REC-xml-infoset-20040204/)
|
||||
describes the semantics of XML - that is, the underlying information
|
||||
contained in a document, independent of surface syntax.
|
||||
|
||||
A useful subset of XML Infoset, namely its Element Information Items
|
||||
(omitting processing instructions, entities, entity references,
|
||||
comments, namespaces, name prefixes, and base URIs), can be captured
|
||||
with the [schema](preserves-schema.html)
|
||||
|
||||
Node = Text / Element .
|
||||
Text = string .
|
||||
Element =
|
||||
/ @withAttributes
|
||||
<<rec> @localName symbol [@attributes Attributes @children Node ...]>
|
||||
/ @withoutAttributes
|
||||
<<rec> @localName symbol @children [Node ...]> .
|
||||
Attributes = { symbol: string ...:... } .
|
||||
|
||||
**Examples.**
|
||||
|
||||
<html
|
||||
<h1 {class: "title"} "Hello World!">
|
||||
<p
|
||||
"I could swear I've seen markup like this somewhere before. "
|
||||
"Perhaps it was "
|
||||
<a {href: "https://docs.racket-lang.org/search/index.html?q=xexpr%3F"} "here">
|
||||
"?"
|
||||
>
|
||||
<table
|
||||
<tr <th> <th "Column 1"> <th "Column 2">>
|
||||
<tr <th "Row 1"> <td 123> <td 234>>>
|
||||
>
|
||||
|
||||
<!-- Heading to visually offset the footnotes from the main document: -->
|
||||
## Notes
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
version 1 .
|
||||
JSON =
|
||||
/ @string string
|
||||
/ @integer int
|
||||
/ @double double
|
||||
/ @boolean JSONBoolean
|
||||
/ @null =null
|
||||
/ @array [JSON ...]
|
||||
/ @object { string: JSON ...:... } .
|
||||
JSONBoolean = =true / =false .
|
|
@ -0,0 +1,46 @@
|
|||
---
|
||||
title: preserves-schema-rkt
|
||||
---
|
||||
|
||||
The `preserves-schema-rkt` program reads
|
||||
[Preserves Schema](../preserves-schema.html) DSL input files. For each
|
||||
input file, it produces a Racket source file of the same name but
|
||||
with `.rkt` in place of `.prs`.
|
||||
|
||||
Instead of using this tool, you may prefer to use `#lang
|
||||
preserves-schema` to use Schema DSL syntax in an ordinary Racket
|
||||
module source file.
|
||||
|
||||
## Installation
|
||||
|
||||
Install Racket. Then, `raco pkg install preserves`.
|
||||
|
||||
## Usage
|
||||
|
||||
usage: preserves-schema-rkt [ <option> ... ] [<input-glob>] ...
|
||||
|
||||
<option> is one of
|
||||
|
||||
--output <directory>
|
||||
Output directory for modules (default: next to sources)
|
||||
--stdout
|
||||
Prints each module to stdout one after the other instead of writing them to files in the `--output` directory
|
||||
--no-write-files
|
||||
Disables generation of output to the filesystem
|
||||
--base <directory>
|
||||
Base directory for sources (default: common prefix)
|
||||
* --module <namespace=path>
|
||||
Additional Namespace=path import
|
||||
* --plugin-lib <lib-path>, -l <lib-path>
|
||||
Load compiler plugin library
|
||||
* --plugin-file <rkt-file-path>, -f <rkt-file-path>
|
||||
Load compiler plugin source file
|
||||
--help, -h
|
||||
Show this help
|
||||
--
|
||||
Do not treat any remaining argument as a switch (at this level)
|
||||
|
||||
* Asterisks indicate options allowed multiple times.
|
||||
|
||||
Multiple single-letter switches can be combined after
|
||||
one `-`. For example, `-h-` is the same as `-h --`.
|
|
@ -0,0 +1,71 @@
|
|||
---
|
||||
title: preserves-schema-rs
|
||||
---
|
||||
|
||||
The `preserves-schema-rs` program reads
|
||||
[Preserves Schema](../preserves-schema.html) AST input files (such as
|
||||
are produced by [`preserves-schemac`]({% link doc/preserves-schemac.md
|
||||
%})). It produces a collection of Rust source files providing parsers,
|
||||
unparsers, and Rust data structures reflecting the definitions in the
|
||||
inputs.
|
||||
|
||||
## Using the compiler from `build.rs` instead
|
||||
|
||||
You will usually not need to use the `preserves-schema-rs`
|
||||
command-line program. Instead, access the preserves-schema compiler
|
||||
API from your `build.rs`. The following example is taken from
|
||||
[`build.rs` for the `preserves-path` crate](https://gitlab.com/preserves/preserves/-/blob/18ac9168996026073ee16164fce108054b2a0ed7/implementations/rust/preserves-path/build.rs):
|
||||
|
||||
use preserves_schema::compiler::*;
|
||||
|
||||
use std::io::Error;
|
||||
use std::path::PathBuf;
|
||||
|
||||
fn main() -> Result<(), Error> {
|
||||
let buildroot = PathBuf::from(std::env::var_os("OUT_DIR").unwrap());
|
||||
|
||||
let mut gen_dir = buildroot.clone();
|
||||
gen_dir.push("src/schemas");
|
||||
|
||||
let mut c = CompilerConfig::new(gen_dir, "crate::schemas".to_owned());
|
||||
|
||||
let inputs = expand_inputs(&vec!["path.bin".to_owned()])?;
|
||||
c.load_schemas_and_bundles(&inputs)?;
|
||||
|
||||
compile(&c)
|
||||
}
|
||||
|
||||
This approach also requires an `include!` from your main, hand-written
|
||||
source tree. The following is a snippet from
|
||||
[`preserves-path/src/lib.rs`](https://gitlab.com/preserves/preserves/-/blob/18ac9168996026073ee16164fce108054b2a0ed7/implementations/rust/preserves-path/src/lib.rs):
|
||||
|
||||
pub mod schemas {
|
||||
include!(concat!(env!("OUT_DIR"), "/src/schemas/mod.rs"));
|
||||
}
|
||||
|
||||
## Installation
|
||||
|
||||
The tool is
|
||||
[written in Rust](https://crates.io/crates/preserves-schema).
|
||||
[Install `cargo`.](https://doc.rust-lang.org/cargo/getting-started/installation.html)
|
||||
Then, `cargo install preserves-schema`.
|
||||
|
||||
## Usage
|
||||
|
||||
preserves-schema 1.0.0
|
||||
|
||||
USAGE:
|
||||
preserves-schema-rs [OPTIONS] --output-dir <output-dir> --prefix <prefix> [--] [input-glob]...
|
||||
|
||||
FLAGS:
|
||||
-h, --help Prints help information
|
||||
-V, --version Prints version information
|
||||
|
||||
OPTIONS:
|
||||
--module <module>...
|
||||
-o, --output-dir <output-dir>
|
||||
-p, --prefix <prefix>
|
||||
--support-crate <support-crate>
|
||||
|
||||
ARGS:
|
||||
<input-glob>...
|
|
@ -0,0 +1,32 @@
|
|||
---
|
||||
title: preserves-schema-ts
|
||||
---
|
||||
|
||||
The `preserves-schema-ts` program reads
|
||||
[Preserves Schema](../preserves-schema.html) DSL input files. For each
|
||||
input file, it produces a TypeScript source file of the same name but
|
||||
with `.ts` in place of `.prs`.
|
||||
|
||||
## Installation
|
||||
|
||||
Install node.js v12 or newer. Then, `yarn global add @preserves/schema`.
|
||||
|
||||
## Usage
|
||||
|
||||
Usage: preserves-schema-ts [options] [input...]
|
||||
|
||||
Compile Preserves schema definitions to TypeScript
|
||||
|
||||
Arguments:
|
||||
input Input filename or glob
|
||||
|
||||
Options:
|
||||
--output <directory> Output directory for modules (default: next to sources)
|
||||
--stdout Prints each module to stdout one after the other instead of writing them to files in the `--output`
|
||||
directory
|
||||
--base <directory> Base directory for sources (default: common prefix)
|
||||
--core <path> Import path for @preserves/core (default: "@preserves/core")
|
||||
--watch Watch base directory for changes
|
||||
--traceback Include stack traces in compiler errors
|
||||
--module <namespace=path> Additional Namespace=path import (default: [])
|
||||
-h, --help display help for command
|
|
@ -0,0 +1,136 @@
|
|||
---
|
||||
title: preserves-schemac
|
||||
---
|
||||
|
||||
The `preserves-schemac` program reads
|
||||
[Preserves Schema](../preserves-schema.html) DSL input files and
|
||||
outputs a binary-syntax Preserves document conforming to the
|
||||
[metaschema](https://gitlab.com/preserves/preserves/-/blob/main/schema/schema.prs).
|
||||
|
||||
It can either output single `Schema` records (corresponding to a
|
||||
single input file), or a `Bundle` of `Schema`s (corresponding to a
|
||||
directory tree of files).
|
||||
|
||||
## Installation
|
||||
|
||||
Install node.js v12 or newer. Then, `yarn global add @preserves/schema`.
|
||||
|
||||
## Usage
|
||||
|
||||
Usage: preserves-schemac [options] [input...]
|
||||
|
||||
Compile textual Preserves schema definitions to binary format
|
||||
|
||||
Arguments:
|
||||
input Input filename or glob
|
||||
|
||||
Options:
|
||||
--no-bundle Emit a single Schema instead of a schema Bundle
|
||||
--base <directory> Base directory for sources (default: common prefix)
|
||||
-h, --help display help for command
|
||||
|
||||
## Examples
|
||||
|
||||
### Single file (non-bundle)
|
||||
|
||||
Given a file [`demo.prs`](demo.prs) containing:
|
||||
|
||||
version 1 .
|
||||
JSON =
|
||||
/ @string string
|
||||
/ @integer int
|
||||
/ @double double
|
||||
/ @boolean JSONBoolean
|
||||
/ @null =null
|
||||
/ @array [JSON ...]
|
||||
/ @object { string: JSON ...:... } .
|
||||
JSONBoolean = =true / =false .
|
||||
|
||||
running the following:
|
||||
|
||||
preserves-schemac --no-bundle demo.prs
|
||||
|
||||
will produce the following binary file on `stdout`:
|
||||
|
||||
00000000: b4b3 0673 6368 656d 61b7 b307 7665 7273 ...schema...vers
|
||||
00000010: 696f 6e91 b30b 6465 6669 6e69 7469 6f6e ion...definition
|
||||
00000020: 73b7 b304 4a53 4f4e b4b3 026f 72b5 b5b1 s...JSON...or...
|
||||
00000030: 0673 7472 696e 67b4 b304 6174 6f6d b306 .string...atom..
|
||||
00000040: 5374 7269 6e67 8484 b5b1 0769 6e74 6567 String.....integ
|
||||
00000050: 6572 b4b3 0461 746f 6db3 0d53 6967 6e65 er...atom..Signe
|
||||
00000060: 6449 6e74 6567 6572 8484 b5b1 0664 6f75 dInteger.....dou
|
||||
00000070: 626c 65b4 b304 6174 6f6d b306 446f 7562 ble...atom..Doub
|
||||
00000080: 6c65 8484 b5b1 0762 6f6f 6c65 616e b4b3 le.....boolean..
|
||||
00000090: 0372 6566 b584 b30b 4a53 4f4e 426f 6f6c .ref....JSONBool
|
||||
000000a0: 6561 6e84 84b5 b104 6e75 6c6c b4b3 036c ean.....null...l
|
||||
000000b0: 6974 b304 6e75 6c6c 8484 b5b1 0561 7272 it..null.....arr
|
||||
000000c0: 6179 b4b3 0573 6571 6f66 b4b3 0372 6566 ay...seqof...ref
|
||||
000000d0: b584 b304 4a53 4f4e 8484 84b5 b106 6f62 ....JSON......ob
|
||||
000000e0: 6a65 6374 b4b3 0664 6963 746f 66b4 b304 ject...dictof...
|
||||
000000f0: 6174 6f6d b306 5374 7269 6e67 84b4 b303 atom..String....
|
||||
00000100: 7265 66b5 84b3 044a 534f 4e84 8484 8484 ref....JSON.....
|
||||
00000110: b30b 4a53 4f4e 426f 6f6c 6561 6eb4 b302 ..JSONBoolean...
|
||||
00000120: 6f72 b5b5 b104 7472 7565 b4b3 036c 6974 or....true...lit
|
||||
00000130: b304 7472 7565 8484 b5b1 0566 616c 7365 ..true.....false
|
||||
00000140: b4b3 036c 6974 b305 6661 6c73 6584 8484 ...lit..false...
|
||||
00000150: 8484 b30c 656d 6265 6464 6564 5479 7065 ....embeddedType
|
||||
00000160: 8084 84 ...
|
||||
|
||||
Piping the output to [`preserves-tool`](./preserves-tool.html) to
|
||||
pretty-print it produces:
|
||||
|
||||
<schema {
|
||||
version: 1,
|
||||
embeddedType: #f,
|
||||
definitions: {
|
||||
JSONBoolean: <or [
|
||||
[
|
||||
"true",
|
||||
<lit true>
|
||||
],
|
||||
[
|
||||
"false",
|
||||
<lit false>
|
||||
]
|
||||
]>,
|
||||
JSON: <or [
|
||||
[
|
||||
"string",
|
||||
<atom String>
|
||||
],
|
||||
[
|
||||
"integer",
|
||||
<atom SignedInteger>
|
||||
],
|
||||
[
|
||||
"double",
|
||||
<atom Double>
|
||||
],
|
||||
[
|
||||
"boolean",
|
||||
<ref [] JSONBoolean>
|
||||
],
|
||||
[
|
||||
"null",
|
||||
<lit null>
|
||||
],
|
||||
[
|
||||
"array",
|
||||
<seqof <ref [] JSON>>
|
||||
],
|
||||
[
|
||||
"object",
|
||||
<dictof <atom String> <ref [] JSON>>
|
||||
]
|
||||
]>
|
||||
}
|
||||
}>
|
||||
|
||||
### Multiple file (bundle)
|
||||
|
||||
Given a directory tree containing multiple `*.prs` files, running
|
||||
|
||||
preserves-schemac '**.prs'
|
||||
|
||||
will produce a binary `Bundle` on `stdout` containing one `Schema` for
|
||||
each input file in the tree.
|
|
@ -0,0 +1,189 @@
|
|||
---
|
||||
title: preserves-tool
|
||||
---
|
||||
|
||||
The `preserves-tool` program is a swiss army knife for working with
|
||||
Preserves documents.
|
||||
|
||||
preserves-tools 1.0.0
|
||||
|
||||
USAGE:
|
||||
preserves-tool <SUBCOMMAND>
|
||||
|
||||
FLAGS:
|
||||
-h, --help Print help information
|
||||
-V, --version Print version information
|
||||
|
||||
SUBCOMMANDS:
|
||||
completions
|
||||
convert
|
||||
help Print this message or the help of the given subcommand(s)
|
||||
quote
|
||||
|
||||
## Installation
|
||||
|
||||
The tool is
|
||||
[written in Rust](https://crates.io/crates/preserves-tools).
|
||||
[Install `cargo`.](https://doc.rust-lang.org/cargo/getting-started/installation.html)
|
||||
Then, `cargo install preserves-tools`.
|
||||
|
||||
## Subcommands
|
||||
|
||||
The tool includes three subcommands.
|
||||
|
||||
### `preserves-tool convert`
|
||||
|
||||
This is the main tool. It can
|
||||
|
||||
- translate between the various Preserves text and binary document
|
||||
syntaxes;
|
||||
- strip annotations;
|
||||
- pretty-print; and
|
||||
- break down and filter documents using [preserves path]({{
|
||||
site.baseurl }}{% link preserves-path.md %}) selectors.
|
||||
|
||||
#### Usage
|
||||
|
||||
preserves-tool-convert
|
||||
|
||||
USAGE:
|
||||
preserves-tool convert [FLAGS] [OPTIONS]
|
||||
|
||||
FLAGS:
|
||||
--collect
|
||||
--escape-spaces
|
||||
-h, --help Print help information
|
||||
-V, --version Print version information
|
||||
|
||||
OPTIONS:
|
||||
-i, --input-format <INPUT_FORMAT>
|
||||
[default: auto-detect] [possible values: auto-detect, text, binary]
|
||||
|
||||
--indent <on/off>
|
||||
[default: on] [possible values: disabled, no, n, off, 0, false,
|
||||
enabled, yes, y, on, 1, true]
|
||||
|
||||
--limit <LIMIT>
|
||||
|
||||
|
||||
-o, --output-format <OUTPUT_FORMAT>
|
||||
[default: text] [possible values: text, binary, unquoted]
|
||||
|
||||
--read-annotations <on/off>
|
||||
[default: on] [possible values: disabled, no, n, off, 0, false,
|
||||
enabled, yes, y, on, 1, true]
|
||||
|
||||
--select <SELECT>
|
||||
[default: *]
|
||||
|
||||
--select-output <SELECT_OUTPUT>
|
||||
[default: sequence] [possible values: sequence, set]
|
||||
|
||||
--write-annotations <on/off>
|
||||
[default: on] [possible values: disabled, no, n, off, 0, false,
|
||||
enabled, yes, y, on, 1, true]
|
||||
|
||||
### `preserves-tool quote`
|
||||
|
||||
This subcommand reads chunks from standard input and outputs each one
|
||||
as a Preserves `String`, `Symbol`, or `ByteString` using either the
|
||||
text or binary Preserves surface syntax.
|
||||
|
||||
This is useful when writing shell scripts that interact with other
|
||||
programs using Preserves as an interchange format.
|
||||
|
||||
It defaults to taking the entirety of standard input as a single large
|
||||
chunk, but it can also work with newline- or `nul`-delimited chunks.
|
||||
|
||||
#### Usage
|
||||
|
||||
```
|
||||
preserves-tool-quote
|
||||
|
||||
USAGE:
|
||||
preserves-tool quote [OPTIONS] <SUBCOMMAND>
|
||||
|
||||
FLAGS:
|
||||
-h, --help Print help information
|
||||
-V, --version Print version information
|
||||
|
||||
OPTIONS:
|
||||
-o, --output-format <OUTPUT_FORMAT> [default: text] [possible values: text, binary, unquoted]
|
||||
|
||||
SUBCOMMANDS:
|
||||
byte-string
|
||||
help Print this message or the help of the given subcommand(s)
|
||||
string
|
||||
symbol
|
||||
```
|
||||
|
||||
```
|
||||
preserves-tool-quote-string
|
||||
|
||||
USAGE:
|
||||
preserves-tool quote string [FLAGS] [OPTIONS]
|
||||
|
||||
FLAGS:
|
||||
--escape-spaces
|
||||
-h, --help Print help information
|
||||
--include-terminator
|
||||
-V, --version Print version information
|
||||
|
||||
OPTIONS:
|
||||
--input-terminator <INPUT_TERMINATOR> [default: eof] [possible values: eof, newline, nul]
|
||||
```
|
||||
|
||||
```
|
||||
preserves-tool-quote-symbol
|
||||
|
||||
USAGE:
|
||||
preserves-tool quote symbol [FLAGS] [OPTIONS]
|
||||
|
||||
FLAGS:
|
||||
--escape-spaces
|
||||
-h, --help Print help information
|
||||
--include-terminator
|
||||
-V, --version Print version information
|
||||
|
||||
OPTIONS:
|
||||
--input-terminator <INPUT_TERMINATOR> [default: eof] [possible values: eof, newline, nul]
|
||||
```
|
||||
|
||||
```
|
||||
preserves-tool-quote-byte-string
|
||||
|
||||
USAGE:
|
||||
preserves-tool quote byte-string
|
||||
|
||||
FLAGS:
|
||||
-h, --help Print help information
|
||||
-V, --version Print version information
|
||||
```
|
||||
|
||||
### `preserves-tool completions`
|
||||
|
||||
This subcommand outputs Bash completion code to stdout, for sourcing
|
||||
at shell startup time.
|
||||
|
||||
#### Usage
|
||||
|
||||
Add the following to your `.profile` or similar:
|
||||
|
||||
eval "$(preserves-tool completions bash 2>/dev/null)"
|
||||
|
||||
Multiple shell dialects are supported (courtesy of
|
||||
[`clap`](https://crates.io/crates/clap)):
|
||||
|
||||
```
|
||||
preserves-tool-completions
|
||||
|
||||
USAGE:
|
||||
preserves-tool completions <dialect>
|
||||
|
||||
ARGS:
|
||||
<dialect> [possible values: bash, zsh, power-shell, fish, elvish]
|
||||
|
||||
FLAGS:
|
||||
-h, --help Print help information
|
||||
-V, --version Print version information
|
||||
```
|
|
@ -0,0 +1,11 @@
|
|||
---
|
||||
title: Tools for working with Preserves Schema
|
||||
---
|
||||
|
||||
A number of tools for working with [Preserves Schema]({{ site.baseurl
|
||||
}}{% link preserves-schema.md %}) exist:
|
||||
|
||||
- [preserves-schemac](preserves-schemac.html), generic Schema reader and linter
|
||||
- [preserves-schema-rkt](preserves-schema-rkt.html), Racket code generator
|
||||
- [preserves-schema-rs](preserves-schema-rs.html), Rust code generator
|
||||
- [preserves-schema-ts](preserves-schema-ts.html), TypeScript code generator
|
|
@ -0,0 +1,22 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
exec 1>&2
|
||||
|
||||
# Ensure that various copies of schema.prs are in fact identical.
|
||||
cmp schema/schema.prs implementations/racket/preserves/preserves-schema/schema.prs
|
||||
|
||||
# Likewise for samples.pr
|
||||
cmp tests/samples.pr implementations/racket/preserves/preserves/tests/samples.pr
|
||||
|
||||
# https://gitlab.com/preserves/preserves/-/issues/30
|
||||
#
|
||||
# So it turns out that Racket's git-checkout mechanism pays attention
|
||||
# to portions of the tree outside the package of interest, which is
|
||||
# totally fair enough!
|
||||
#
|
||||
# But it means we can't use updir-containing symlinks anywhere in the
|
||||
# repository if we want to have a Racket-installable package as well.
|
||||
#
|
||||
# So I've cloned path.bin, too.
|
||||
cmp path/path.bin implementations/rust/preserves-path/path.bin
|
|
@ -0,0 +1,21 @@
|
|||
# Preserves Implementations
|
||||
|
||||
Here you may find:
|
||||
|
||||
- [dhall](dhall/), functions for converting Dhall values to a corresponding
|
||||
subset of Preserves.
|
||||
|
||||
- [javascript](javascript/), an implementation in TypeScript,
|
||||
compiling to JavaScript, for node.js and the Browser.
|
||||
|
||||
- [python](python/), an implementation for Python 2.x and 3.x.
|
||||
|
||||
- [racket](racket/), an implementation for Racket 7.x and newer
|
||||
(though older Rackets may also work with it).
|
||||
|
||||
- [rust](rust/), an implementation for Rust that interoperates with
|
||||
serde.
|
||||
|
||||
Other implementations are also available:
|
||||
|
||||
- [Preserves for Squeak Smalltalk](https://squeaksource.com/Preserves.html)
|
|
@ -0,0 +1,3 @@
|
|||
env:DHALL_PRELUDE
|
||||
? https://prelude.dhall-lang.org/v20.2.0/package.dhall
|
||||
sha256:a6036bc38d883450598d1de7c98ead113196fe2db02e9733855668b18096f07b
|
|
@ -0,0 +1,48 @@
|
|||
# Dhall
|
||||
|
||||
Not a true implementation of Preserves, but functions for translating Dhall
|
||||
values to Preserves and rendering them.
|
||||
|
||||
For example, to generate configuration for a Syndicate server listener:
|
||||
```dhall
|
||||
let Prelude = ./Prelude.dhall
|
||||
|
||||
let Preserves = ./package.dhall
|
||||
|
||||
let Tcp/Type = { address : Text, port : Natural }
|
||||
|
||||
let RelayListener/Type = { transport : Tcp/Type }
|
||||
|
||||
let RequireService/Type = { relayListener : RelayListener/Type }
|
||||
|
||||
let Tcp/toPreserves =
|
||||
λ(tcp : Tcp/Type) →
|
||||
Preserves.record
|
||||
(Preserves.symbol "tcp")
|
||||
[ Preserves.string tcp.address
|
||||
, Preserves.integer (Prelude.Natural.toInteger tcp.port)
|
||||
]
|
||||
|
||||
let RelayListener/toPreserves =
|
||||
λ(relayListener : RelayListener/Type) →
|
||||
Preserves.record
|
||||
(Preserves.symbol "relay-listener")
|
||||
[ Tcp.toPreserves relayListener.transport ]
|
||||
|
||||
let RequireService/toPreserves =
|
||||
λ(requireService : RequireService/Type) →
|
||||
Preserves.record
|
||||
(Preserves.symbol "require-service")
|
||||
[ RelayListener.toPreserves requireService.relayListener ]
|
||||
|
||||
let example = { relayListener.transport = { address = "127.0.0.1", port = 1 } }
|
||||
|
||||
let rendering = Preserves.render (RequireService.toPreserves example)
|
||||
|
||||
let check =
|
||||
assert
|
||||
: rendering ≡ "<require-service <relay-listener <tcp \"127.0.0.1\" 1>>>"
|
||||
|
||||
in rendering
|
||||
|
||||
```
|
|
@ -0,0 +1,10 @@
|
|||
{-|
|
||||
Dhall encoding of an arbitrary Preserves value
|
||||
-}
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let Preserves/Type
|
||||
: Type
|
||||
= ∀(Preserves : Type) → ∀(value : Preserves/function Preserves) → Preserves
|
||||
|
||||
in Preserves/Type
|
|
@ -0,0 +1,15 @@
|
|||
{-|
|
||||
Create a Preserves boolean map from a `Bool` value
|
||||
-}
|
||||
let Preserves/Type = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let bool
|
||||
: Bool → Preserves/Type
|
||||
= λ(x : Bool) →
|
||||
λ(Preserves : Type) →
|
||||
λ(value : Preserves/function Preserves) →
|
||||
value.boolean x
|
||||
|
||||
in bool
|
|
@ -0,0 +1,37 @@
|
|||
{-|
|
||||
Create a Preserves dictionary value from a Dhall `Map` of `Preserves` values
|
||||
-}
|
||||
let Prelude = ./Prelude.dhall
|
||||
|
||||
let List/map = Prelude.List.map
|
||||
|
||||
let Map/Entry = Prelude.Map.Entry
|
||||
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let Preserves/Entry = Map/Entry Preserves Preserves
|
||||
|
||||
let Preserves/Map = List Preserves/Entry
|
||||
|
||||
let map
|
||||
: Preserves/Map → Preserves
|
||||
= λ(x : Preserves/Map) →
|
||||
λ(Preserves : Type) →
|
||||
let Preserves/Entry = Map/Entry Preserves Preserves
|
||||
|
||||
in λ(value : Preserves/function Preserves) →
|
||||
value.dictionary
|
||||
( List/map
|
||||
Preserves/Entry@1
|
||||
Preserves/Entry
|
||||
( λ(e : Preserves/Entry@1) →
|
||||
{ mapKey = e.mapKey Preserves value
|
||||
, mapValue = e.mapValue Preserves value
|
||||
}
|
||||
)
|
||||
x
|
||||
)
|
||||
|
||||
in map
|
|
@ -0,0 +1,40 @@
|
|||
{-|
|
||||
Create a Preserves dictionary value from a Dhall `Map`
|
||||
|
||||
See ./render.dhall for an example.
|
||||
-}
|
||||
let Prelude = ./Prelude.dhall
|
||||
|
||||
let List/map = Prelude.List.map
|
||||
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Preserves/dictionary = ./dictionary.dhall
|
||||
|
||||
let dictionaryOf
|
||||
: ∀(a : Type) →
|
||||
(a → Preserves) →
|
||||
∀(b : Type) →
|
||||
(b → Preserves) →
|
||||
Prelude.Map.Type a b →
|
||||
Preserves
|
||||
= λ(a : Type) →
|
||||
λ(key : a → Preserves) →
|
||||
λ(b : Type) →
|
||||
λ(value : b → Preserves) →
|
||||
λ(x : Prelude.Map.Type a b) →
|
||||
let ab = Prelude.Map.Entry a b
|
||||
|
||||
let pp = Prelude.Map.Entry Preserves Preserves
|
||||
|
||||
in Preserves/dictionary
|
||||
( List/map
|
||||
ab
|
||||
pp
|
||||
( λ(x : ab) →
|
||||
{ mapKey = key x.mapKey, mapValue = value x.mapValue }
|
||||
)
|
||||
x
|
||||
)
|
||||
|
||||
in dictionaryOf
|
|
@ -0,0 +1,15 @@
|
|||
{-|
|
||||
Create a Preserves floating-point value from a `Double` value
|
||||
-}
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let double
|
||||
: Double → Preserves
|
||||
= λ(x : Double) →
|
||||
λ(Preserves : Type) →
|
||||
λ(value : Preserves/function Preserves) →
|
||||
value.double x
|
||||
|
||||
in double
|
|
@ -0,0 +1,15 @@
|
|||
{-|
|
||||
Create an embedded Preserves value.
|
||||
-}
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let embedded
|
||||
: Preserves → Preserves
|
||||
= λ(value : Preserves) →
|
||||
λ(Preserves : Type) →
|
||||
λ(value : Preserves/function Preserves) →
|
||||
value.embedded (value@1 Preserves value)
|
||||
|
||||
in embedded
|
|
@ -0,0 +1,40 @@
|
|||
{-|
|
||||
Translate a `JSON` value to a `Preserves` value
|
||||
-}
|
||||
let Prelude = ./Prelude.dhall
|
||||
|
||||
let List/map = Prelude.List.map
|
||||
|
||||
let JSON = Prelude.JSON.Type
|
||||
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let fromJSON
|
||||
: JSON → Preserves
|
||||
= λ(json : JSON) →
|
||||
λ(Preserves : Type) →
|
||||
λ(value : Preserves/function Preserves) →
|
||||
json
|
||||
Preserves
|
||||
{ array = value.sequence
|
||||
, bool = λ(x : Bool) → value.symbol (if x then "true" else "false")
|
||||
, double = value.double
|
||||
, integer = value.integer
|
||||
, null = value.symbol "null"
|
||||
, object =
|
||||
let Entry = { mapKey : Text, mapValue : Preserves }
|
||||
|
||||
in λ(m : List Entry) →
|
||||
value.dictionary
|
||||
( List/map
|
||||
Entry
|
||||
{ mapKey : Preserves, mapValue : Preserves }
|
||||
(λ(e : Entry) → e with mapKey = value.string e.mapKey)
|
||||
m
|
||||
)
|
||||
, string = value.string
|
||||
}
|
||||
|
||||
in fromJSON
|
|
@ -0,0 +1,12 @@
|
|||
λ(Preserves : Type) →
|
||||
{ boolean : Bool → Preserves
|
||||
, double : Double → Preserves
|
||||
, integer : Integer → Preserves
|
||||
, string : Text → Preserves
|
||||
, symbol : Text → Preserves
|
||||
, record : Preserves → List Preserves → Preserves
|
||||
, sequence : List Preserves → Preserves
|
||||
, set : List Preserves → Preserves
|
||||
, dictionary : List { mapKey : Preserves, mapValue : Preserves } → Preserves
|
||||
, embedded : Preserves → Preserves
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
{-|
|
||||
Create a Preserves integer value from an `Integer` value
|
||||
-}
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let integer
|
||||
: Integer → Preserves
|
||||
= λ(x : Integer) →
|
||||
λ(Preserves : Type) →
|
||||
λ(value : Preserves/function Preserves) →
|
||||
value.integer x
|
||||
|
||||
in integer
|
|
@ -0,0 +1,16 @@
|
|||
{ Type = ./Type.dhall
|
||||
, function = ./function.dhall
|
||||
, boolean = ./boolean.dhall
|
||||
, dictionary = ./dictionary.dhall
|
||||
, dictionaryOf = ./dictionaryOf.dhall
|
||||
, double = ./double.dhall
|
||||
, embedded = ./embedded.dhall
|
||||
, fromJSON = ./fromJSON.dhall
|
||||
, integer = ./integer.dhall
|
||||
, record = ./record.dhall
|
||||
, render = ./render.dhall
|
||||
, sequence = ./sequence.dhall
|
||||
, sequenceOf = ./sequenceOf.dhall
|
||||
, string = ./string.dhall
|
||||
, symbol = ./symbol.dhall
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
let Prelude = ./Prelude.dhall
|
||||
|
||||
let List/map = Prelude.List.map
|
||||
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let record =
|
||||
λ(label : Preserves) →
|
||||
λ(fields : List Preserves) →
|
||||
λ(Preserves : Type) →
|
||||
λ(value : Preserves/function Preserves) →
|
||||
value.record
|
||||
(label Preserves value)
|
||||
( List/map
|
||||
Preserves@1
|
||||
Preserves
|
||||
(λ(value : Preserves@1) → value Preserves value@1)
|
||||
fields
|
||||
)
|
||||
|
||||
in record
|
|
@ -0,0 +1,100 @@
|
|||
{-
|
||||
Render a `Preserves` value to a diagnostic `Text` value
|
||||
-}
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Prelude = ./Prelude.dhall
|
||||
|
||||
let Map/Type = Prelude.Map.Type
|
||||
|
||||
let Text/concatSep = Prelude.Text.concatSep
|
||||
|
||||
let Text/concatMapSep = Prelude.Text.concatMapSep
|
||||
|
||||
let render
|
||||
: Preserves → Text
|
||||
= λ(value : Preserves) →
|
||||
value
|
||||
Text
|
||||
{ boolean = λ(x : Bool) → if x then "#t" else "#f"
|
||||
, double = Double/show
|
||||
, integer = Prelude.JSON.renderInteger
|
||||
, string = Text/show
|
||||
, symbol = λ(sym : Text) → "${sym}"
|
||||
, record =
|
||||
λ(label : Text) →
|
||||
λ(fields : List Text) →
|
||||
"<${label}"
|
||||
++ (if Prelude.List.null Text fields then "" else " ")
|
||||
++ Text/concatSep " " fields
|
||||
++ ">"
|
||||
, sequence = λ(xs : List Text) → "[ " ++ Text/concatSep " " xs ++ " ]"
|
||||
, set = λ(xs : List Text) → "#{" ++ Text/concatSep " " xs ++ " }"
|
||||
, dictionary =
|
||||
λ(m : Map/Type Text Text) →
|
||||
"{ "
|
||||
++ Text/concatMapSep
|
||||
" "
|
||||
{ mapKey : Text, mapValue : Text }
|
||||
( λ(e : { mapKey : Text, mapValue : Text }) →
|
||||
"${e.mapKey}: ${e.mapValue}"
|
||||
)
|
||||
m
|
||||
++ " }"
|
||||
, embedded = λ(value : Text) → "#!${value}"
|
||||
}
|
||||
|
||||
let Preserves/boolean = ./boolean.dhall
|
||||
|
||||
let Preserves/integer = ./integer.dhall
|
||||
|
||||
let Preserves/double = ./double.dhall
|
||||
|
||||
let Preserves/symbol = ./symbol.dhall
|
||||
|
||||
let Preserves/record = ./record.dhall
|
||||
|
||||
let Preserves/sequenceOf = ./sequenceOf.dhall
|
||||
|
||||
let Preserves/dictionaryOf = ./dictionaryOf.dhall
|
||||
|
||||
let Preserves/dictionaryOfSymbols = Preserves/dictionaryOf Text Preserves/symbol
|
||||
|
||||
let Preserves/embedded = ./embedded.dhall
|
||||
|
||||
let example0 =
|
||||
assert
|
||||
: ''
|
||||
${render
|
||||
( Preserves/dictionaryOfSymbols
|
||||
Preserves
|
||||
(λ(x : Preserves) → x)
|
||||
( toMap
|
||||
{ a = Preserves/integer +1
|
||||
, b =
|
||||
Preserves/sequenceOf
|
||||
Integer
|
||||
Preserves/integer
|
||||
[ +2, +3 ]
|
||||
, c =
|
||||
Preserves/dictionaryOfSymbols
|
||||
Double
|
||||
Preserves/double
|
||||
(toMap { d = 1.0, e = -1.0 })
|
||||
, d = Preserves/embedded (Preserves/boolean True)
|
||||
, e =
|
||||
Preserves/record
|
||||
(Preserves/symbol "capture")
|
||||
[ Preserves/record
|
||||
(Preserves/symbol "_")
|
||||
([] : List Preserves)
|
||||
]
|
||||
}
|
||||
)
|
||||
)}
|
||||
''
|
||||
≡ ''
|
||||
{ a: 1 b: [ 2 3 ] c: { d: 1.0 e: -1.0 } d: #!#t e: <capture <_>> }
|
||||
''
|
||||
|
||||
in render
|
|
@ -0,0 +1,27 @@
|
|||
{-|
|
||||
Create a Preserves sequence value from a `List` of `Preserve` values
|
||||
|
||||
See ./sequenceOf.dhall for an example.
|
||||
-}
|
||||
let Prelude = ./Prelude.dhall
|
||||
|
||||
let List/map = Prelude.List.map
|
||||
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let sequence
|
||||
: List Preserves → Preserves
|
||||
= λ(x : List Preserves) →
|
||||
λ(Preserves : Type) →
|
||||
λ(value : Preserves/function Preserves) →
|
||||
value.sequence
|
||||
( List/map
|
||||
Preserves@1
|
||||
Preserves
|
||||
(λ(value : Preserves@1) → value Preserves value@1)
|
||||
x
|
||||
)
|
||||
|
||||
in sequence
|
|
@ -0,0 +1,21 @@
|
|||
{-|
|
||||
Create a Preserves sequence value from a `List` of values and a conversion function
|
||||
|
||||
See ./render.dhall for an example.
|
||||
-}
|
||||
let Prelude = ./Prelude.dhall
|
||||
|
||||
let List/map = Prelude.List.map
|
||||
|
||||
let Preserves = ./Type.dhall
|
||||
|
||||
let Preserves/sequence = ./sequence.dhall
|
||||
|
||||
let sequenceOf
|
||||
: ∀(a : Type) → (a → Preserves) → List a → Preserves
|
||||
= λ(a : Type) →
|
||||
λ(f : a → Preserves) →
|
||||
λ(xs : List a) →
|
||||
Preserves/sequence (List/map a Preserves f xs)
|
||||
|
||||
in sequenceOf
|
|
@ -0,0 +1,15 @@
|
|||
{-|
|
||||
Create a Preserves string from a `Text` value
|
||||
-}
|
||||
let Preserves/Type = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let string
|
||||
: Text → Preserves/Type
|
||||
= λ(x : Text) →
|
||||
λ(Preserves : Type) →
|
||||
λ(value : Preserves/function Preserves) →
|
||||
value.string x
|
||||
|
||||
in string
|
|
@ -0,0 +1,15 @@
|
|||
{-|
|
||||
Create a Preserves symbol from a `Text` value
|
||||
-}
|
||||
let Preserves/Type = ./Type.dhall
|
||||
|
||||
let Preserves/function = ./function.dhall
|
||||
|
||||
let symbol
|
||||
: Text → Preserves/Type
|
||||
= λ(x : Text) →
|
||||
λ(Preserves : Type) →
|
||||
λ(value : Preserves/function Preserves) →
|
||||
value.symbol x
|
||||
|
||||
in symbol
|
|
@ -0,0 +1,6 @@
|
|||
((nil . ((eval .
|
||||
(setq tide-tsserver-executable
|
||||
(concat
|
||||
(let ((d (dir-locals-find-file ".")))
|
||||
(if (stringp d) d (car d)))
|
||||
"node_modules/typescript/lib/tsserver.js"))))))
|
|
@ -1,4 +1,2 @@
|
|||
.nyc_output/
|
||||
coverage/
|
||||
yarn-error.log
|
||||
node_modules/
|
||||
package-lock.json
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Use yarn, not npm!
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"packages": ["packages/*"],
|
||||
"version": "independent",
|
||||
"npmClient": "yarn",
|
||||
"useWorkspaces": true
|
||||
}
|
|
@ -1,24 +1,25 @@
|
|||
{
|
||||
"name": "preserves",
|
||||
"version": "0.3.0",
|
||||
"description": "Experimental data serialization format",
|
||||
"homepage": "https://gitlab.com/preserves/preserves",
|
||||
"license": "Apache-2.0",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"repository": "gitlab:preserves/preserves",
|
||||
"scripts": {
|
||||
"test": "mocha",
|
||||
"cover": "nyc --reporter=html mocha"
|
||||
},
|
||||
"main": "src/index.js",
|
||||
"author": "Tony Garnock-Jones <tonyg@leastfixedpoint.com>",
|
||||
"name": "@preserves/root",
|
||||
"private": true,
|
||||
"devDependencies": {
|
||||
"mocha": "^5.2.0",
|
||||
"nyc": "^14.1.1"
|
||||
"@types/jest": "^26.0.19",
|
||||
"jest": "^26.6.3",
|
||||
"lerna": "^4.0.0",
|
||||
"rollup": "^2.40.0",
|
||||
"rollup-plugin-terser": "^7.0.2",
|
||||
"ts-jest": "^26.5.2",
|
||||
"ts-node-dev": "^1.1.6",
|
||||
"typescript": "^4.2.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"immutable": "^4.0.0-rc.12"
|
||||
"workspaces": [
|
||||
"packages/*"
|
||||
],
|
||||
"scripts": {
|
||||
"prepare": "lerna exec yarn run prepare",
|
||||
"clean": "lerna exec yarn run clean",
|
||||
"veryclean": "yarn run veryclean:local && lerna exec yarn run veryclean",
|
||||
"veryclean:local": "rm -rf node_modules",
|
||||
"build": "lerna exec yarn run prepare",
|
||||
"test": "lerna exec yarn run test"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
dist/
|
||||
lib/
|
|
@ -0,0 +1 @@
|
|||
version-tag-prefix javascript-@preserves/core@
|
|
@ -0,0 +1,10 @@
|
|||
import './src/values'; // required here because it monkey-patches
|
||||
// various globals like Array, Symbol etc., and
|
||||
// we need the patching to be done here so the
|
||||
// patching is consistently visible in the
|
||||
// per-test sub-VMs.
|
||||
|
||||
export default {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
};
|
|
@ -0,0 +1,24 @@
|
|||
{
|
||||
"name": "@preserves/core",
|
||||
"version": "0.17.0",
|
||||
"description": "Preserves data serialization format",
|
||||
"homepage": "https://gitlab.com/preserves/preserves",
|
||||
"license": "Apache-2.0",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"repository": "gitlab:preserves/preserves",
|
||||
"main": "dist/preserves.js",
|
||||
"module": "lib/index.js",
|
||||
"types": "lib/index.d.ts",
|
||||
"author": "Tony Garnock-Jones <tonyg@leastfixedpoint.com>",
|
||||
"scripts": {
|
||||
"clean": "rm -rf lib dist",
|
||||
"prepare": "tsc && rollup -c",
|
||||
"rollupwatch": "rollup -c -w",
|
||||
"test": "jest",
|
||||
"testwatch": "jest --watch",
|
||||
"veryclean": "yarn run clean && rm -rf node_modules",
|
||||
"watch": "tsc -w"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
import { terser } from 'rollup-plugin-terser';
|
||||
|
||||
const distfile = (insertion) => `dist/preserves${insertion}.js`;
|
||||
|
||||
function umd(insertion, extra) {
|
||||
return {
|
||||
file: distfile(insertion),
|
||||
format: 'umd',
|
||||
name: 'Preserves',
|
||||
... (extra || {})
|
||||
};
|
||||
}
|
||||
|
||||
function es6(insertion, extra) {
|
||||
return {
|
||||
file: distfile('.es6' + insertion),
|
||||
format: 'es',
|
||||
... (extra || {})
|
||||
};
|
||||
}
|
||||
|
||||
export default [{
|
||||
input: 'lib/index.js',
|
||||
output: [
|
||||
umd(''),
|
||||
umd('.min', { plugins: [terser()] }),
|
||||
es6(''),
|
||||
es6('.min', { plugins: [terser()] }),
|
||||
],
|
||||
}];
|
|
@ -0,0 +1,119 @@
|
|||
import { Encoder } from "./encoder";
|
||||
import { Tag } from "./constants";
|
||||
import { AsPreserve, PreserveOn } from "./symbols";
|
||||
import { Value } from "./values";
|
||||
import { is, isAnnotated, IsPreservesAnnotated } from "./is";
|
||||
import { stringify } from "./text";
|
||||
import { GenericEmbedded } from "./embedded";
|
||||
|
||||
export interface Position {
|
||||
line?: number;
|
||||
column?: number;
|
||||
pos: number;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
export function newPosition(name?: string): Position {
|
||||
return { line: 1, column: 0, pos: 0, name };
|
||||
}
|
||||
|
||||
export function updatePosition(p: Position, ch: string): boolean {
|
||||
p.pos++;
|
||||
if (p.line === void 0) {
|
||||
return false;
|
||||
} else {
|
||||
let advancedLine = false;
|
||||
switch (ch) {
|
||||
case '\t':
|
||||
p.column = (p.column! + 8) & ~7;
|
||||
break;
|
||||
case '\n':
|
||||
p.column = 0;
|
||||
p.line++;
|
||||
advancedLine = true;
|
||||
break;
|
||||
case '\r':
|
||||
p.column = 0;
|
||||
break;
|
||||
default:
|
||||
p.column!++;
|
||||
break;
|
||||
}
|
||||
return advancedLine;
|
||||
}
|
||||
}
|
||||
|
||||
export function formatPosition(p: Position | null | string): string {
|
||||
if (p === null) {
|
||||
return '<unknown>';
|
||||
} else if (typeof p === 'string') {
|
||||
return p;
|
||||
} else {
|
||||
return `${p.name ?? ''}:${p.line ?? ''}:${p.column ?? ''}:${p.pos}`;
|
||||
}
|
||||
}
|
||||
|
||||
export class Annotated<T = GenericEmbedded> {
|
||||
readonly annotations: Array<Value<T>>;
|
||||
readonly pos: Position | null;
|
||||
readonly item: Value<T>;
|
||||
|
||||
constructor(item: Value<T>, pos?: Position) {
|
||||
this.annotations = [];
|
||||
this.pos = pos ?? null;
|
||||
this.item = item;
|
||||
}
|
||||
|
||||
[AsPreserve](): Value<T> {
|
||||
return this;
|
||||
}
|
||||
|
||||
[PreserveOn](encoder: Encoder<T>) {
|
||||
if (encoder.includeAnnotations) {
|
||||
for (const a of this.annotations) {
|
||||
encoder.state.emitbyte(Tag.Annotation);
|
||||
encoder.push(a);
|
||||
}
|
||||
}
|
||||
encoder.push(this.item);
|
||||
}
|
||||
|
||||
equals(other: any): boolean {
|
||||
return is(this.item, Annotated.isAnnotated(other) ? other.item : other);
|
||||
}
|
||||
|
||||
// hashCode(): number {
|
||||
// return hash(this.item);
|
||||
// }
|
||||
|
||||
toString(): string {
|
||||
return this.asPreservesText();
|
||||
}
|
||||
|
||||
asPreservesText(): string {
|
||||
const anns = this.annotations.map((a) => '@' + stringify(a)).join(' ');
|
||||
return (anns ? anns + ' ' : anns) + stringify(this.item);
|
||||
}
|
||||
|
||||
get [IsPreservesAnnotated](): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
static isAnnotated<T = GenericEmbedded>(x: any): x is Annotated<T> {
|
||||
return isAnnotated(x);
|
||||
}
|
||||
}
|
||||
|
||||
export function annotate<T = GenericEmbedded>(v0: Value<T>, ...anns: Value<T>[]): Annotated<T> {
|
||||
const v = Annotated.isAnnotated<T>(v0) ? v0 : new Annotated(v0);
|
||||
anns.forEach((a) => v.annotations.push(a));
|
||||
return v;
|
||||
}
|
||||
|
||||
export function annotations<T = GenericEmbedded>(v: Value<T>): Array<Value<T>> {
|
||||
return Annotated.isAnnotated<T>(v) ? v.annotations : [];
|
||||
}
|
||||
|
||||
export function position<T = GenericEmbedded>(v: Value<T>): Position | null {
|
||||
return Annotated.isAnnotated<T>(v) ? v.pos : null;
|
||||
}
|
|
@ -0,0 +1,259 @@
|
|||
import { Tag } from './constants';
|
||||
import { AsPreserve, PreserveOn } from './symbols';
|
||||
import { Encoder, Preservable } from './encoder';
|
||||
import { Value } from './values';
|
||||
import { GenericEmbedded } from './embedded';
|
||||
|
||||
const textEncoder = new TextEncoder();
|
||||
const textDecoder = new TextDecoder();
|
||||
|
||||
export const IsPreservesBytes = Symbol.for('IsPreservesBytes');
|
||||
|
||||
export type BytesLike = Bytes | Uint8Array;
|
||||
|
||||
export class Bytes implements Preservable<never> {
|
||||
readonly _view: Uint8Array;
|
||||
|
||||
constructor(maybeByteIterable: any = new Uint8Array()) {
|
||||
if (Bytes.isBytes(maybeByteIterable)) {
|
||||
this._view = maybeByteIterable._view;
|
||||
} else if (ArrayBuffer.isView(maybeByteIterable)) {
|
||||
this._view = new Uint8Array(maybeByteIterable.buffer,
|
||||
maybeByteIterable.byteOffset,
|
||||
maybeByteIterable.byteLength);
|
||||
} else if (maybeByteIterable instanceof ArrayBuffer) {
|
||||
this._view = new Uint8Array(maybeByteIterable);
|
||||
} else if (typeof maybeByteIterable === 'string') {
|
||||
this._view = textEncoder.encode(maybeByteIterable);
|
||||
} else if (typeof maybeByteIterable === 'number') {
|
||||
this._view = new Uint8Array(maybeByteIterable);
|
||||
} else if (typeof maybeByteIterable.length === 'number') {
|
||||
this._view = Uint8Array.from(maybeByteIterable);
|
||||
} else {
|
||||
throw new TypeError("Attempt to initialize Bytes from unsupported value: " +
|
||||
maybeByteIterable);
|
||||
}
|
||||
}
|
||||
|
||||
get length(): number {
|
||||
return this._view.length;
|
||||
}
|
||||
|
||||
static from(x: any): Bytes {
|
||||
return new Bytes(x);
|
||||
}
|
||||
|
||||
static of(...bytes: number[]): Bytes {
|
||||
return new Bytes(Uint8Array.of(...bytes));
|
||||
}
|
||||
|
||||
static fromHex(s: string): Bytes {
|
||||
if (s.length & 1) throw new Error("Cannot decode odd-length hexadecimal string");
|
||||
const len = s.length >> 1;
|
||||
const result = new Bytes(len);
|
||||
for (let i = 0; i < len; i++) {
|
||||
result._view[i] =
|
||||
(unhexDigit(s.charCodeAt(i << 1)) << 4) | unhexDigit(s.charCodeAt((i << 1) + 1));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
static fromIO(io: string | BytesLike): string | Bytes {
|
||||
if (typeof io === 'string') return io;
|
||||
if (Bytes.isBytes(io)) return io;
|
||||
return new Bytes(io);
|
||||
}
|
||||
|
||||
static toIO(b : string | BytesLike): string | Uint8Array {
|
||||
if (typeof b === 'string') return b;
|
||||
if (Bytes.isBytes(b)) return b._view;
|
||||
return b;
|
||||
}
|
||||
|
||||
static concat = function (bss: BytesLike[]): Bytes {
|
||||
let len = 0;
|
||||
for (let i = 0; i < bss.length; i++) { len += underlying(bss[i]).length; }
|
||||
|
||||
const result = new Bytes(len);
|
||||
let index = 0;
|
||||
for (let i = 0; i < bss.length; i++) {
|
||||
const bs = underlying(bss[i]);
|
||||
result._view.set(bs, index);
|
||||
index += bs.length;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
get(index: number): number {
|
||||
return this._view[index];
|
||||
}
|
||||
|
||||
equals(other: any): boolean {
|
||||
if (!Bytes.isBytes(other)) return false;
|
||||
if (other.length !== this.length) return false;
|
||||
const va = this._view;
|
||||
const vb = other._view;
|
||||
for (let i = 0; i < va.length; i++) {
|
||||
if (va[i] !== vb[i]) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
hashCode(): number {
|
||||
// Immutable.js uses this function for strings.
|
||||
const v = this._view;
|
||||
let hash = 0;
|
||||
for (let i = 0; i < v.length; i++) {
|
||||
hash = ((31 * hash) + v[i]) | 0;
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
|
||||
static compare(a: Bytes, b: Bytes): number {
|
||||
if (a < b) return -1;
|
||||
if (b < a) return 1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static decodeUtf8(bs: Bytes | Uint8Array): string {
|
||||
return textDecoder.decode(underlying(bs));
|
||||
}
|
||||
|
||||
fromUtf8(): string {
|
||||
return textDecoder.decode(this._view);
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return this.asPreservesText();
|
||||
}
|
||||
|
||||
[AsPreserve]<T = GenericEmbedded>(): Value<T> {
|
||||
return this;
|
||||
}
|
||||
|
||||
asPreservesText(): string {
|
||||
return '#"' + this.__asciify() + '"';
|
||||
}
|
||||
|
||||
__asciify(): string {
|
||||
const pieces = [];
|
||||
const v = this._view;
|
||||
for (let i = 0; i < v.length; i++) {
|
||||
const b = v[i];
|
||||
if (b === 92 || b === 34) {
|
||||
pieces.push('\\' + String.fromCharCode(b));
|
||||
} else if (b >= 32 && b <= 126) {
|
||||
pieces.push(String.fromCharCode(b));
|
||||
} else {
|
||||
pieces.push('\\x' + hexDigit(b >> 4) + hexDigit(b & 15));
|
||||
}
|
||||
}
|
||||
return pieces.join('');
|
||||
}
|
||||
|
||||
toHex(): string {
|
||||
var nibbles = [];
|
||||
for (let i = 0; i < this.length; i++) {
|
||||
nibbles.push(hexDigit(this._view[i] >> 4));
|
||||
nibbles.push(hexDigit(this._view[i] & 15));
|
||||
}
|
||||
return nibbles.join('');
|
||||
}
|
||||
|
||||
[PreserveOn](encoder: Encoder<never>) {
|
||||
encoder.state.emitbyte(Tag.ByteString);
|
||||
encoder.state.varint(this.length);
|
||||
encoder.state.emitbytes(this._view);
|
||||
}
|
||||
|
||||
get [IsPreservesBytes](): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
static isBytes(x: any): x is Bytes {
|
||||
return !!x?.[IsPreservesBytes];
|
||||
}
|
||||
}
|
||||
|
||||
export function hexDigit(n: number): string {
|
||||
return '0123456789abcdef'[n];
|
||||
}
|
||||
|
||||
export function unhexDigit(asciiCode: number) {
|
||||
if (asciiCode >= 48 && asciiCode <= 57) return asciiCode - 48;
|
||||
if (asciiCode >= 97 && asciiCode <= 102) return asciiCode - 97 + 10;
|
||||
if (asciiCode >= 65 && asciiCode <= 70) return asciiCode - 65 + 10;
|
||||
throw new Error("Invalid hex digit: " + String.fromCharCode(asciiCode));
|
||||
}
|
||||
|
||||
export function underlying(b: Bytes | Uint8Array): Uint8Array {
|
||||
return (b instanceof Uint8Array) ? b : b._view;
|
||||
}
|
||||
|
||||
// Uint8Array / TypedArray methods
|
||||
|
||||
export interface Bytes {
|
||||
entries(): IterableIterator<[number, number]>;
|
||||
every(predicate: (value: number, index: number, array: Uint8Array) => unknown,
|
||||
thisArg?: any): boolean;
|
||||
find(predicate: (value: number, index: number, obj: Uint8Array) => boolean,
|
||||
thisArg?: any): number;
|
||||
findIndex(predicate: (value: number, index: number, obj: Uint8Array) => boolean,
|
||||
thisArg?: any): number;
|
||||
forEach(callbackfn: (value: number, index: number, array: Uint8Array) => void,
|
||||
thisArg?: any): void;
|
||||
includes(searchElement: number, fromIndex?: number): boolean;
|
||||
indexOf(searchElement: number, fromIndex?: number): number;
|
||||
join(separator?: string): string;
|
||||
keys(): IterableIterator<number>;
|
||||
lastIndexOf(searchElement: number, fromIndex?: number): number;
|
||||
reduce(callbackfn: (previousValue: number,
|
||||
currentValue: number,
|
||||
currentIndex: number,
|
||||
array: Uint8Array) => number,
|
||||
initialValue?: number): number;
|
||||
reduceRight(callbackfn: (previousValue: number,
|
||||
currentValue: number,
|
||||
currentIndex: number,
|
||||
array: Uint8Array) => number,
|
||||
initialValue?: number): number;
|
||||
some(predicate: (value: number, index: number, array: Uint8Array) => unknown,
|
||||
thisArg?: any): boolean;
|
||||
toLocaleString(): string;
|
||||
values(): IterableIterator<number>;
|
||||
|
||||
filter(predicate: (value: number, index: number, array: Uint8Array) => any,
|
||||
thisArg?: any): Bytes;
|
||||
map(callbackfn: (value: number, index: number, array: Uint8Array) => number,
|
||||
thisArg?: any): Bytes;
|
||||
slice(start?: number, end?: number): Bytes;
|
||||
subarray(begin?: number, end?: number): Bytes;
|
||||
|
||||
reverse(): Bytes;
|
||||
sort(compareFn?: (a: number, b: number) => number): Bytes;
|
||||
|
||||
[Symbol.iterator](): IterableIterator<number>;
|
||||
}
|
||||
|
||||
(function () {
|
||||
for (const k of `entries every find findIndex forEach includes indexOf join
|
||||
keys lastIndexOf reduce reduceRight some toLocaleString values`.split(/\s+/))
|
||||
{
|
||||
(Bytes as any).prototype[k] =
|
||||
function (...args: any[]) { return this._view[k](...args); };
|
||||
}
|
||||
|
||||
for (const k of `filter map slice subarray`.split(/\s+/))
|
||||
{
|
||||
(Bytes as any).prototype[k] =
|
||||
function (...args: any[]) { return new Bytes(this._view[k](...args)); };
|
||||
}
|
||||
|
||||
for (const k of `reverse sort`.split(/\s+/))
|
||||
{
|
||||
(Bytes as any).prototype[k] =
|
||||
function (...args: any[]) { return new Bytes(this._view.slice()[k](...args)); };
|
||||
}
|
||||
|
||||
Bytes.prototype[Symbol.iterator] = function () { return this._view[Symbol.iterator](); };
|
||||
})();
|
|
@ -0,0 +1,52 @@
|
|||
// Preserves Binary codec.
|
||||
|
||||
import { Position } from "./annotated";
|
||||
|
||||
export type ErrorType = 'DecodeError' | 'EncodeError' | 'ShortPacket';
|
||||
export const ErrorType = Symbol.for('ErrorType');
|
||||
|
||||
export abstract class PreservesCodecError {
|
||||
abstract get [ErrorType](): ErrorType;
|
||||
|
||||
static isCodecError(e: any, t: ErrorType): e is PreservesCodecError {
|
||||
return (e?.[ErrorType] === t);
|
||||
}
|
||||
}
|
||||
|
||||
export class DecodeError extends Error {
|
||||
readonly pos: Position | undefined;
|
||||
|
||||
get [ErrorType](): ErrorType { return 'DecodeError' }
|
||||
|
||||
constructor(message: string, pos?: Position) {
|
||||
super(message);
|
||||
this.pos = pos;
|
||||
}
|
||||
|
||||
static isDecodeError(e: any): e is DecodeError {
|
||||
return PreservesCodecError.isCodecError(e, 'DecodeError');
|
||||
}
|
||||
}
|
||||
|
||||
export class EncodeError extends Error {
|
||||
get [ErrorType](): ErrorType { return 'EncodeError' }
|
||||
|
||||
static isEncodeError(e: any): e is EncodeError {
|
||||
return PreservesCodecError.isCodecError(e, 'EncodeError');
|
||||
}
|
||||
|
||||
readonly irritant: any;
|
||||
|
||||
constructor(message: string, irritant: any) {
|
||||
super(message);
|
||||
this.irritant = irritant;
|
||||
}
|
||||
}
|
||||
|
||||
export class ShortPacket extends DecodeError {
|
||||
get [ErrorType](): ErrorType { return 'ShortPacket' }
|
||||
|
||||
static isShortPacket(e: any): e is ShortPacket {
|
||||
return PreservesCodecError.isCodecError(e, 'ShortPacket');
|
||||
}
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
import type { Compound, Value } from "./values";
|
||||
import type { GenericEmbedded } from "./embedded";
|
||||
import { Dictionary, Set } from "./dictionary";
|
||||
|
||||
export function isCompound<T = GenericEmbedded>(x: Value<T>): x is Compound<T>
|
||||
{
|
||||
return (Array.isArray(x) || Set.isSet(x) || Dictionary.isDictionary(x));
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
export enum Tag {
|
||||
False = 0x80,
|
||||
True,
|
||||
Float,
|
||||
Double,
|
||||
End,
|
||||
Annotation,
|
||||
Embedded,
|
||||
|
||||
SmallInteger_lo = 0x90,
|
||||
MediumInteger_lo = 0xa0,
|
||||
|
||||
SignedInteger = 0xb0,
|
||||
String,
|
||||
ByteString,
|
||||
Symbol,
|
||||
Record,
|
||||
Sequence,
|
||||
Set,
|
||||
Dictionary,
|
||||
}
|
|
@ -0,0 +1,385 @@
|
|||
import { Annotated } from "./annotated";
|
||||
import { DecodeError, ShortPacket } from "./codec";
|
||||
import { Tag } from "./constants";
|
||||
import { Set, Dictionary } from "./dictionary";
|
||||
import { DoubleFloat, SingleFloat } from "./float";
|
||||
import { Record } from "./record";
|
||||
import { Bytes, BytesLike, underlying } from "./bytes";
|
||||
import { Value } from "./values";
|
||||
import { is } from "./is";
|
||||
import { embed, GenericEmbedded, Embedded, EmbeddedTypeDecode } from "./embedded";
|
||||
import { ReaderStateOptions } from "reader";
|
||||
|
||||
export interface DecoderOptions {
|
||||
includeAnnotations?: boolean;
|
||||
}
|
||||
|
||||
export interface DecoderEmbeddedOptions<T> extends DecoderOptions {
|
||||
embeddedDecode?: EmbeddedTypeDecode<T>;
|
||||
}
|
||||
|
||||
export interface TypedDecoder<T> {
|
||||
atEnd(): boolean;
|
||||
|
||||
mark(): any;
|
||||
restoreMark(m: any): void;
|
||||
|
||||
skip(): void;
|
||||
next(): Value<T>;
|
||||
withEmbeddedDecode<S, R>(
|
||||
embeddedDecode: EmbeddedTypeDecode<S>,
|
||||
body: (d: TypedDecoder<S>) => R): R;
|
||||
|
||||
nextBoolean(): boolean | undefined;
|
||||
nextFloat(): SingleFloat | undefined;
|
||||
nextDouble(): DoubleFloat | undefined;
|
||||
nextEmbedded(): Embedded<T> | undefined;
|
||||
nextSignedInteger(): number | undefined;
|
||||
nextString(): string | undefined;
|
||||
nextByteString(): Bytes | undefined;
|
||||
nextSymbol(): symbol | undefined;
|
||||
|
||||
openRecord(): boolean;
|
||||
openSequence(): boolean;
|
||||
openSet(): boolean;
|
||||
openDictionary(): boolean;
|
||||
|
||||
closeCompound(): boolean;
|
||||
}
|
||||
|
||||
export function asLiteral<T, E extends Exclude<Value<T>, Annotated<T>>>(
|
||||
actual: Value<T>,
|
||||
expected: E): E | undefined
|
||||
{
|
||||
return is(actual, expected) ? expected : void 0;
|
||||
}
|
||||
|
||||
export class DecoderState {
|
||||
packet: Uint8Array;
|
||||
index = 0;
|
||||
options: DecoderOptions;
|
||||
|
||||
constructor(packet: BytesLike, options: DecoderOptions) {
|
||||
this.packet = underlying(packet);
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
get includeAnnotations(): boolean {
|
||||
return this.options.includeAnnotations ?? false;
|
||||
}
|
||||
|
||||
write(data: BytesLike) {
|
||||
if (this.index === this.packet.length) {
|
||||
this.packet = underlying(data);
|
||||
} else {
|
||||
this.packet = Bytes.concat([this.packet.slice(this.index), data])._view;
|
||||
}
|
||||
this.index = 0;
|
||||
}
|
||||
|
||||
atEnd(): boolean {
|
||||
return this.index >= this.packet.length;
|
||||
}
|
||||
|
||||
mark(): number {
|
||||
return this.index;
|
||||
}
|
||||
|
||||
restoreMark(m: number): void {
|
||||
this.index = m;
|
||||
}
|
||||
|
||||
shortGuard<R>(body: () => R, short: () => R): R {
|
||||
if (this.atEnd()) return short();
|
||||
// ^ important somewhat-common case optimization - avoid the exception
|
||||
|
||||
const start = this.mark();
|
||||
try {
|
||||
return body();
|
||||
} catch (e) {
|
||||
if (ShortPacket.isShortPacket(e)) {
|
||||
this.restoreMark(start);
|
||||
return short();
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
nextbyte(): number {
|
||||
if (this.atEnd()) throw new ShortPacket("Short packet");
|
||||
return this.packet[this.index++];
|
||||
}
|
||||
|
||||
nextbytes(n: number): DataView {
|
||||
const start = this.index;
|
||||
this.index += n;
|
||||
if (this.index > this.packet.length) throw new ShortPacket("Short packet");
|
||||
// ^ NOTE: greater-than, not greater-than-or-equal-to - this makes atEnd() inappropriate
|
||||
return new DataView(this.packet.buffer, this.packet.byteOffset + start, n);
|
||||
}
|
||||
|
||||
varint(): number {
|
||||
// TODO: Bignums :-/
|
||||
const v = this.nextbyte();
|
||||
if (v < 128) return v;
|
||||
return (this.varint() << 7) + (v - 128);
|
||||
}
|
||||
|
||||
peekend(): boolean {
|
||||
return (this.nextbyte() === Tag.End) || (this.index--, false);
|
||||
}
|
||||
|
||||
nextint(n: number): number {
|
||||
// TODO: Bignums :-/
|
||||
if (n === 0) return 0;
|
||||
let acc = this.nextbyte();
|
||||
if (acc & 0x80) acc -= 256;
|
||||
for (let i = 1; i < n; i++) acc = (acc * 256) + this.nextbyte();
|
||||
return acc;
|
||||
}
|
||||
|
||||
nextSmallOrMediumInteger(tag: number): number | undefined {
|
||||
if (tag >= Tag.SmallInteger_lo && tag <= Tag.SmallInteger_lo + 15) {
|
||||
const v = tag - Tag.SmallInteger_lo;
|
||||
return v > 12 ? v - 16 : v;
|
||||
}
|
||||
if (tag >= Tag.MediumInteger_lo && tag <= Tag.MediumInteger_lo + 15) {
|
||||
const n = tag - Tag.MediumInteger_lo;
|
||||
return this.nextint(n + 1);
|
||||
}
|
||||
return void 0;
|
||||
}
|
||||
|
||||
wrap<T>(v: Value<T>): Value<T> {
|
||||
return this.includeAnnotations ? new Annotated(v) : v;
|
||||
}
|
||||
|
||||
unshiftAnnotation<T>(a: Value<T>, v: Annotated<T>): Annotated<T> {
|
||||
if (this.includeAnnotations) {
|
||||
v.annotations.unshift(a);
|
||||
}
|
||||
return v;
|
||||
}
|
||||
}
|
||||
|
||||
export const neverEmbeddedTypeDecode: EmbeddedTypeDecode<never> = {
|
||||
decode(_s: DecoderState): never {
|
||||
throw new Error("Embeddeds not permitted at this point in Preserves document");
|
||||
},
|
||||
|
||||
fromValue(_v: Value<GenericEmbedded>, _options: ReaderStateOptions): never {
|
||||
throw new Error("Embeddeds not permitted at this point in Preserves document");
|
||||
},
|
||||
};
|
||||
|
||||
export class Decoder<T = never> implements TypedDecoder<T> {
|
||||
state: DecoderState;
|
||||
embeddedDecode: EmbeddedTypeDecode<T>;
|
||||
|
||||
constructor(state: DecoderState, embeddedDecode?: EmbeddedTypeDecode<T>);
|
||||
constructor(packet?: BytesLike, options?: DecoderEmbeddedOptions<T>);
|
||||
constructor(
|
||||
packet_or_state: (DecoderState | BytesLike) = new Uint8Array(0),
|
||||
options_or_embeddedDecode?: (DecoderEmbeddedOptions<T> | EmbeddedTypeDecode<T>))
|
||||
{
|
||||
if (packet_or_state instanceof DecoderState) {
|
||||
this.state = packet_or_state;
|
||||
this.embeddedDecode = (options_or_embeddedDecode as EmbeddedTypeDecode<T>) ?? neverEmbeddedTypeDecode;
|
||||
} else {
|
||||
const options = (options_or_embeddedDecode as DecoderEmbeddedOptions<T>) ?? {};
|
||||
this.state = new DecoderState(packet_or_state, options);
|
||||
this.embeddedDecode = options.embeddedDecode ?? neverEmbeddedTypeDecode;
|
||||
}
|
||||
}
|
||||
|
||||
write(data: BytesLike) {
|
||||
this.state.write(data);
|
||||
}
|
||||
|
||||
nextvalues(): Value<T>[] {
|
||||
const result = [];
|
||||
while (!this.state.peekend()) result.push(this.next());
|
||||
return result;
|
||||
}
|
||||
|
||||
static dictionaryFromArray<T>(vs: Value<T>[]): Dictionary<T> {
|
||||
const d = new Dictionary<T>();
|
||||
if (vs.length % 2) throw new DecodeError("Missing dictionary value");
|
||||
for (let i = 0; i < vs.length; i += 2) {
|
||||
d.set(vs[i], vs[i+1]);
|
||||
}
|
||||
return d;
|
||||
}
|
||||
|
||||
next(): Value<T> {
|
||||
const tag = this.state.nextbyte();
|
||||
switch (tag) {
|
||||
case Tag.False: return this.state.wrap<T>(false);
|
||||
case Tag.True: return this.state.wrap<T>(true);
|
||||
case Tag.Float: return this.state.wrap<T>(new SingleFloat(this.state.nextbytes(4).getFloat32(0, false)));
|
||||
case Tag.Double: return this.state.wrap<T>(new DoubleFloat(this.state.nextbytes(8).getFloat64(0, false)));
|
||||
case Tag.End: throw new DecodeError("Unexpected Compound end marker");
|
||||
case Tag.Annotation: {
|
||||
const a = this.next();
|
||||
const v = this.next() as Annotated<T>;
|
||||
return this.state.unshiftAnnotation(a, v);
|
||||
}
|
||||
case Tag.Embedded: return this.state.wrap<T>(embed(this.embeddedDecode.decode(this.state)));
|
||||
case Tag.SignedInteger: return this.state.wrap<T>(this.state.nextint(this.state.varint()));
|
||||
case Tag.String: return this.state.wrap<T>(Bytes.from(this.state.nextbytes(this.state.varint())).fromUtf8());
|
||||
case Tag.ByteString: return this.state.wrap<T>(Bytes.from(this.state.nextbytes(this.state.varint())));
|
||||
case Tag.Symbol: return this.state.wrap<T>(Symbol.for(Bytes.from(this.state.nextbytes(this.state.varint())).fromUtf8()));
|
||||
case Tag.Record: {
|
||||
const vs = this.nextvalues();
|
||||
if (vs.length === 0) throw new DecodeError("Too few elements in encoded record");
|
||||
return this.state.wrap<T>(Record(vs[0], vs.slice(1)));
|
||||
}
|
||||
case Tag.Sequence: return this.state.wrap<T>(this.nextvalues());
|
||||
case Tag.Set: return this.state.wrap<T>(new Set(this.nextvalues()));
|
||||
case Tag.Dictionary: return this.state.wrap<T>(Decoder.dictionaryFromArray(this.nextvalues()));
|
||||
default: {
|
||||
const v = this.state.nextSmallOrMediumInteger(tag);
|
||||
if (v === void 0) {
|
||||
throw new DecodeError("Unsupported Preserves tag: " + tag);
|
||||
}
|
||||
return this.state.wrap<T>(v);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try_next(): Value<T> | undefined {
|
||||
return this.state.shortGuard(() => this.next(), () => void 0);
|
||||
}
|
||||
|
||||
atEnd(): boolean {
|
||||
return this.state.atEnd();
|
||||
}
|
||||
|
||||
mark(): any {
|
||||
return this.state.mark();
|
||||
}
|
||||
|
||||
restoreMark(m: any): void {
|
||||
this.state.restoreMark(m);
|
||||
}
|
||||
|
||||
skip(): void {
|
||||
// TODO: be more efficient
|
||||
this.next();
|
||||
}
|
||||
|
||||
withEmbeddedDecode<S, R>(
|
||||
embeddedDecode: EmbeddedTypeDecode<S>,
|
||||
body: (d: TypedDecoder<S>) => R): R
|
||||
{
|
||||
return body(new Decoder(this.state, embeddedDecode));
|
||||
}
|
||||
|
||||
skipAnnotations(): void {
|
||||
if (!this.state.atEnd() && this.state.packet[this.state.index] === Tag.Annotation) {
|
||||
this.state.index++;
|
||||
this.skip();
|
||||
}
|
||||
}
|
||||
|
||||
nextBoolean(): boolean | undefined {
|
||||
this.skipAnnotations();
|
||||
switch (this.state.nextbyte()) {
|
||||
case Tag.False: return false;
|
||||
case Tag.True: return true;
|
||||
default: return void 0;
|
||||
}
|
||||
}
|
||||
|
||||
nextFloat(): SingleFloat | undefined {
|
||||
this.skipAnnotations();
|
||||
switch (this.state.nextbyte()) {
|
||||
case Tag.Float: return new SingleFloat(this.state.nextbytes(4).getFloat32(0, false));
|
||||
default: return void 0;
|
||||
}
|
||||
}
|
||||
|
||||
nextDouble(): DoubleFloat | undefined {
|
||||
this.skipAnnotations();
|
||||
switch (this.state.nextbyte()) {
|
||||
case Tag.Double: return new DoubleFloat(this.state.nextbytes(8).getFloat64(0, false));
|
||||
default: return void 0;
|
||||
}
|
||||
}
|
||||
|
||||
nextEmbedded(): Embedded<T> | undefined {
|
||||
this.skipAnnotations();
|
||||
switch (this.state.nextbyte()) {
|
||||
case Tag.Embedded: return embed(this.embeddedDecode.decode(this.state));
|
||||
default: return void 0;
|
||||
}
|
||||
}
|
||||
|
||||
nextSignedInteger(): number | undefined {
|
||||
this.skipAnnotations();
|
||||
const b = this.state.nextbyte();
|
||||
switch (b) {
|
||||
case Tag.SignedInteger: return this.state.nextint(this.state.varint());
|
||||
default: return this.state.nextSmallOrMediumInteger(b);
|
||||
}
|
||||
}
|
||||
|
||||
nextString(): string | undefined {
|
||||
this.skipAnnotations();
|
||||
switch (this.state.nextbyte()) {
|
||||
case Tag.String: return Bytes.from(this.state.nextbytes(this.state.varint())).fromUtf8();
|
||||
default: return void 0;
|
||||
}
|
||||
}
|
||||
|
||||
nextByteString(): Bytes | undefined {
|
||||
this.skipAnnotations();
|
||||
switch (this.state.nextbyte()) {
|
||||
case Tag.ByteString: return Bytes.from(this.state.nextbytes(this.state.varint()));
|
||||
default: return void 0;
|
||||
}
|
||||
}
|
||||
|
||||
nextSymbol(): symbol | undefined {
|
||||
this.skipAnnotations();
|
||||
switch (this.state.nextbyte()) {
|
||||
case Tag.Symbol:
|
||||
return Symbol.for(Bytes.from(this.state.nextbytes(this.state.varint())).fromUtf8());
|
||||
default:
|
||||
return void 0;
|
||||
}
|
||||
}
|
||||
|
||||
openRecord(): boolean {
|
||||
this.skipAnnotations();
|
||||
return (this.state.nextbyte() === Tag.Record) || (this.state.index--, false);
|
||||
}
|
||||
|
||||
openSequence(): boolean {
|
||||
this.skipAnnotations();
|
||||
return (this.state.nextbyte() === Tag.Sequence) || (this.state.index--, false);
|
||||
}
|
||||
|
||||
openSet(): boolean {
|
||||
this.skipAnnotations();
|
||||
return (this.state.nextbyte() === Tag.Set) || (this.state.index--, false);
|
||||
}
|
||||
|
||||
openDictionary(): boolean {
|
||||
this.skipAnnotations();
|
||||
return (this.state.nextbyte() === Tag.Dictionary) || (this.state.index--, false);
|
||||
}
|
||||
|
||||
closeCompound(): boolean {
|
||||
return this.state.peekend();
|
||||
}
|
||||
}
|
||||
|
||||
export function decode<T>(bs: BytesLike, options: DecoderEmbeddedOptions<T> = {}): Value<T> {
|
||||
return new Decoder(bs, options).next();
|
||||
}
|
||||
|
||||
export function decodeWithAnnotations<T>(bs: BytesLike,
|
||||
options: DecoderEmbeddedOptions<T> = {}): Annotated<T> {
|
||||
return decode(bs, { ... options, includeAnnotations: true }) as Annotated<T>;
|
||||
}
|
|
@ -0,0 +1,137 @@
|
|||
import { Encoder, canonicalEncode, canonicalString } from "./encoder";
|
||||
import { Tag } from "./constants";
|
||||
import { FlexMap, FlexSet, _iterMap } from "./flex";
|
||||
import { PreserveOn } from "./symbols";
|
||||
import { stringify } from "./text";
|
||||
import { Value } from "./values";
|
||||
import { Bytes } from './bytes';
|
||||
import { GenericEmbedded } from "./embedded";
|
||||
|
||||
export type DictionaryType = 'Dictionary' | 'Set';
|
||||
export const DictionaryType = Symbol.for('DictionaryType');
|
||||
|
||||
export class KeyedDictionary<K extends Value<T>, V, T = GenericEmbedded> extends FlexMap<K, V> {
|
||||
get [DictionaryType](): DictionaryType {
|
||||
return 'Dictionary';
|
||||
}
|
||||
|
||||
static isKeyedDictionary<K extends Value<T>, V, T = GenericEmbedded>(x: any): x is KeyedDictionary<K, V, T> {
|
||||
return x?.[DictionaryType] === 'Dictionary';
|
||||
}
|
||||
|
||||
constructor(items?: readonly [K, V][]);
|
||||
constructor(items?: Iterable<readonly [K, V]>);
|
||||
constructor(items?: Iterable<readonly [K, V]>) {
|
||||
super(canonicalString, items);
|
||||
}
|
||||
|
||||
mapEntries<W, S extends Value<R>, R = GenericEmbedded>(f: (entry: [K, V]) => [S, W]): KeyedDictionary<S, W, R> {
|
||||
const result = new KeyedDictionary<S, W, R>();
|
||||
for (let oldEntry of this.entries()) {
|
||||
const newEntry = f(oldEntry);
|
||||
result.set(newEntry[0], newEntry[1])
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
asPreservesText(): string {
|
||||
return '{' +
|
||||
Array.from(_iterMap(this.entries(), ([k, v]) =>
|
||||
stringify(k) + ': ' + stringify(v))).join(', ') +
|
||||
'}';
|
||||
}
|
||||
|
||||
clone(): KeyedDictionary<K, V, T> {
|
||||
return new KeyedDictionary(this);
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return this.asPreservesText();
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag]() { return 'Dictionary'; }
|
||||
|
||||
[PreserveOn](encoder: Encoder<T>) {
|
||||
if (encoder.canonical) {
|
||||
const entries = Array.from(this);
|
||||
const pieces = entries.map<[Bytes, number]>(([k, _v], i) => [canonicalEncode(k), i]);
|
||||
pieces.sort((a, b) => Bytes.compare(a[0], b[0]));
|
||||
encoder.state.emitbyte(Tag.Dictionary);
|
||||
pieces.forEach(([_encodedKey, i]) => {
|
||||
const [k, v] = entries[i];
|
||||
encoder.push(k);
|
||||
encoder.push(v as unknown as Value<T>); // Suuuuuuuper unsound
|
||||
});
|
||||
encoder.state.emitbyte(Tag.End);
|
||||
} else {
|
||||
encoder.state.emitbyte(Tag.Dictionary);
|
||||
this.forEach((v, k) => {
|
||||
encoder.push(k);
|
||||
encoder.push(v as unknown as Value<T>); // Suuuuuuuper unsound
|
||||
});
|
||||
encoder.state.emitbyte(Tag.End);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class Dictionary<T = GenericEmbedded, V = Value<T>> extends KeyedDictionary<Value<T>, V, T> {
|
||||
static isDictionary<T = GenericEmbedded, V = Value<T>>(x: any): x is Dictionary<T, V> {
|
||||
return x?.[DictionaryType] === 'Dictionary';
|
||||
}
|
||||
}
|
||||
|
||||
export class KeyedSet<K extends Value<T>, T = GenericEmbedded> extends FlexSet<K> {
|
||||
get [DictionaryType](): DictionaryType {
|
||||
return 'Set';
|
||||
}
|
||||
|
||||
static isKeyedSet<K extends Value<T>, T = GenericEmbedded>(x: any): x is KeyedSet<K, T> {
|
||||
return x?.[DictionaryType] === 'Set';
|
||||
}
|
||||
|
||||
constructor(items?: Iterable<K>) {
|
||||
super(canonicalString, items);
|
||||
}
|
||||
|
||||
map<S extends Value<R>, R = GenericEmbedded>(f: (value: K) => S): KeyedSet<S, R> {
|
||||
return new KeyedSet(_iterMap(this[Symbol.iterator](), f));
|
||||
}
|
||||
|
||||
filter(f: (value: K) => boolean): KeyedSet<K, T> {
|
||||
const result = new KeyedSet<K, T>();
|
||||
for (let k of this) if (f(k)) result.add(k);
|
||||
return result;
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return this.asPreservesText();
|
||||
}
|
||||
|
||||
asPreservesText(): string {
|
||||
return '#{' +
|
||||
Array.from(_iterMap(this.values(), stringify)).join(', ') +
|
||||
'}';
|
||||
}
|
||||
|
||||
clone(): KeyedSet<K, T> {
|
||||
return new KeyedSet(this);
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag]() { return 'Set'; }
|
||||
|
||||
[PreserveOn](encoder: Encoder<T>) {
|
||||
if (encoder.canonical) {
|
||||
const pieces = Array.from(this).map<[Bytes, K]>(k => [canonicalEncode(k), k]);
|
||||
pieces.sort((a, b) => Bytes.compare(a[0], b[0]));
|
||||
encoder.encodevalues(Tag.Set, pieces.map(e => e[1]));
|
||||
} else {
|
||||
encoder.encodevalues(Tag.Set, this);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class Set<T = GenericEmbedded> extends KeyedSet<Value<T>, T> {
|
||||
static isSet<T = GenericEmbedded>(x: any): x is Set<T> {
|
||||
return x?.[DictionaryType] === 'Set';
|
||||
}
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
import type { EncoderState } from "./encoder";
|
||||
import type { DecoderState } from "./decoder";
|
||||
import type { Value } from "./values";
|
||||
import { ReaderStateOptions } from "./reader";
|
||||
|
||||
export type EmbeddedTypeEncode<T> = {
|
||||
encode(s: EncoderState, v: T): void;
|
||||
toValue(v: T): Value<GenericEmbedded>;
|
||||
}
|
||||
|
||||
export type EmbeddedTypeDecode<T> = {
|
||||
decode(s: DecoderState): T;
|
||||
fromValue(v: Value<GenericEmbedded>, options: ReaderStateOptions): T;
|
||||
}
|
||||
|
||||
export type EmbeddedType<T> = EmbeddedTypeEncode<T> & EmbeddedTypeDecode<T>;
|
||||
|
||||
export class Embedded<T> {
|
||||
embeddedValue: T;
|
||||
|
||||
constructor(embeddedValue: T) {
|
||||
this.embeddedValue = embeddedValue;
|
||||
}
|
||||
|
||||
equals(other: any, is: (a: any, b: any) => boolean) {
|
||||
return isEmbedded<T>(other) && is(this.embeddedValue, other.embeddedValue);
|
||||
}
|
||||
|
||||
asPreservesText(): string {
|
||||
return '#!' + (this.embeddedValue as any).asPreservesText();
|
||||
}
|
||||
}
|
||||
|
||||
export function embed<T>(embeddedValue: T): Embedded<T> {
|
||||
return new Embedded(embeddedValue);
|
||||
}
|
||||
|
||||
export function isEmbedded<T>(v: Value<T>): v is Embedded<T> {
|
||||
return typeof v === 'object' && 'embeddedValue' in v;
|
||||
}
|
||||
|
||||
export class GenericEmbedded {
|
||||
generic: Value;
|
||||
|
||||
constructor(generic: Value) {
|
||||
this.generic = generic;
|
||||
}
|
||||
|
||||
equals(other: any, is: (a: any, b: any) => boolean) {
|
||||
return typeof other === 'object' && 'generic' in other && is(this.generic, other.generic);
|
||||
}
|
||||
|
||||
asPreservesText(): string {
|
||||
return this.generic.asPreservesText();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
import { GenericEmbedded, EmbeddedType, EmbeddedTypeDecode, EmbeddedTypeEncode } from "./embedded";
|
||||
import { Encoder, EncoderState, identityEmbeddedTypeEncode } from "./encoder";
|
||||
import { genericEmbeddedTypeDecode, ReaderStateOptions } from "./reader";
|
||||
import { Value } from "./values";
|
||||
import { DecoderState, neverEmbeddedTypeDecode } from "./decoder";
|
||||
|
||||
export const genericEmbeddedTypeEncode: EmbeddedTypeEncode<GenericEmbedded> = {
|
||||
encode(s: EncoderState, v: GenericEmbedded): void {
|
||||
new Encoder(s, this).push(v.generic);
|
||||
},
|
||||
|
||||
toValue(v: GenericEmbedded): Value<GenericEmbedded> {
|
||||
return v.generic;
|
||||
}
|
||||
};
|
||||
|
||||
export const genericEmbeddedType: EmbeddedType<GenericEmbedded> =
|
||||
Object.assign({},
|
||||
genericEmbeddedTypeDecode,
|
||||
genericEmbeddedTypeEncode);
|
||||
|
||||
export const neverEmbeddedTypeEncode: EmbeddedTypeEncode<never> = {
|
||||
encode(_s: EncoderState, _v: never): void {
|
||||
throw new Error("Embeddeds not permitted encoding Preserves document");
|
||||
},
|
||||
|
||||
toValue(_v: never): Value<GenericEmbedded> {
|
||||
throw new Error("Embeddeds not permitted encoding Preserves document");
|
||||
}
|
||||
};
|
||||
|
||||
export const neverEmbeddedType: EmbeddedType<never> =
|
||||
Object.assign({},
|
||||
neverEmbeddedTypeDecode,
|
||||
neverEmbeddedTypeEncode);
|
||||
|
||||
export const identityEmbeddedTypeDecode: EmbeddedTypeDecode<any> = {
|
||||
decode(_s: DecoderState): any {
|
||||
throw new Error("Cannot decode identityEmbeddedType");
|
||||
},
|
||||
|
||||
fromValue(_v: Value<GenericEmbedded>, _options: ReaderStateOptions): any {
|
||||
throw new Error("Cannot decode identityEmbeddedType");
|
||||
},
|
||||
};
|
||||
|
||||
export const identityEmbeddedType: EmbeddedType<any> =
|
||||
Object.assign({},
|
||||
identityEmbeddedTypeDecode,
|
||||
identityEmbeddedTypeEncode);
|
|
@ -0,0 +1,299 @@
|
|||
import { Tag } from "./constants";
|
||||
import { Bytes } from "./bytes";
|
||||
import { Value } from "./values";
|
||||
import { PreserveOn } from "./symbols";
|
||||
import { EncodeError } from "./codec";
|
||||
import { Record, Tuple } from "./record";
|
||||
import { GenericEmbedded, EmbeddedTypeEncode } from "./embedded";
|
||||
|
||||
export type Encodable<T> =
|
||||
Value<T> | Preservable<T> | Iterable<Value<T>> | ArrayBufferView;
|
||||
|
||||
export interface Preservable<T> {
|
||||
[PreserveOn](encoder: Encoder<T>): void;
|
||||
}
|
||||
|
||||
export function isPreservable<T>(v: any): v is Preservable<T> {
|
||||
return typeof v === 'object' && v !== null && typeof v[PreserveOn] === 'function';
|
||||
}
|
||||
|
||||
export interface EncoderOptions {
|
||||
canonical?: boolean;
|
||||
includeAnnotations?: boolean;
|
||||
}
|
||||
|
||||
export interface EncoderEmbeddedOptions<T> extends EncoderOptions {
|
||||
embeddedEncode?: EmbeddedTypeEncode<T>;
|
||||
}
|
||||
|
||||
export function asLatin1(bs: Uint8Array): string {
|
||||
return String.fromCharCode.apply(null, bs as any as number[]);
|
||||
}
|
||||
|
||||
function isIterable<T>(v: any): v is Iterable<T> {
|
||||
return typeof v === 'object' && v !== null && typeof v[Symbol.iterator] === 'function';
|
||||
}
|
||||
|
||||
let _nextId = 0;
|
||||
const _registry = new WeakMap<object, number>();
|
||||
export function embeddedId(v: any): number {
|
||||
let id = _registry.get(v);
|
||||
if (id === void 0) {
|
||||
id = _nextId++;
|
||||
_registry.set(v, id);
|
||||
}
|
||||
return id;
|
||||
}
|
||||
|
||||
export const identityEmbeddedTypeEncode: EmbeddedTypeEncode<any> = {
|
||||
encode(s: EncoderState, v: any): void {
|
||||
new Encoder(s, this).push(embeddedId(v));
|
||||
},
|
||||
|
||||
toValue(v: any): Value<GenericEmbedded> {
|
||||
return embeddedId(v);
|
||||
}
|
||||
};
|
||||
|
||||
export class EncoderState {
|
||||
chunks: Array<Uint8Array>;
|
||||
view: DataView;
|
||||
index: number;
|
||||
options: EncoderOptions;
|
||||
|
||||
constructor(options: EncoderOptions) {
|
||||
this.chunks = [];
|
||||
this.view = new DataView(new ArrayBuffer(256));
|
||||
this.index = 0;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
get canonical(): boolean {
|
||||
return this.options.canonical ?? true;
|
||||
}
|
||||
|
||||
get includeAnnotations(): boolean {
|
||||
return this.options.includeAnnotations ?? !this.canonical;
|
||||
}
|
||||
|
||||
contents(): Bytes {
|
||||
if (this.chunks.length === 0) {
|
||||
const resultLength = this.index;
|
||||
this.index = 0;
|
||||
return new Bytes(this.view.buffer.slice(0, resultLength));
|
||||
} else {
|
||||
this.rotatebuffer(4096);
|
||||
return Bytes.concat(this.chunks);
|
||||
}
|
||||
}
|
||||
|
||||
/* Like contents(), but hands back a string containing binary data "encoded" via latin-1 */
|
||||
contentsString(): string {
|
||||
if (this.chunks.length === 0) {
|
||||
const s = asLatin1(new Uint8Array(this.view.buffer, 0, this.index));
|
||||
this.index = 0;
|
||||
return s;
|
||||
} else {
|
||||
this.rotatebuffer(4096);
|
||||
return this.chunks.map(asLatin1).join('');
|
||||
}
|
||||
}
|
||||
|
||||
rotatebuffer(size: number) {
|
||||
this.chunks.push(new Uint8Array(this.view.buffer, 0, this.index));
|
||||
this.view = new DataView(new ArrayBuffer(size));
|
||||
this.index = 0;
|
||||
}
|
||||
|
||||
makeroom(amount: number) {
|
||||
if (this.index + amount > this.view.byteLength) {
|
||||
this.rotatebuffer(amount + 4096);
|
||||
}
|
||||
}
|
||||
|
||||
emitbyte(b: number) {
|
||||
this.makeroom(1);
|
||||
this.view.setUint8(this.index++, b);
|
||||
}
|
||||
|
||||
emitbytes(bs: Uint8Array) {
|
||||
this.makeroom(bs.length);
|
||||
(new Uint8Array(this.view.buffer)).set(bs, this.index);
|
||||
this.index += bs.length;
|
||||
}
|
||||
|
||||
varint(v: number) {
|
||||
while (v >= 128) {
|
||||
this.emitbyte((v % 128) + 128);
|
||||
v = Math.floor(v / 128);
|
||||
}
|
||||
this.emitbyte(v);
|
||||
}
|
||||
|
||||
encodeint(v: number) {
|
||||
// TODO: Bignums :-/
|
||||
const plain_bitcount = Math.floor(Math.log2(v > 0 ? v : -(1 + v))) + 1;
|
||||
const signed_bitcount = plain_bitcount + 1;
|
||||
const bytecount = (signed_bitcount + 7) >> 3;
|
||||
if (bytecount <= 16) {
|
||||
this.emitbyte(Tag.MediumInteger_lo + bytecount - 1);
|
||||
} else {
|
||||
this.emitbyte(Tag.SignedInteger);
|
||||
this.varint(bytecount);
|
||||
}
|
||||
const enc = (n: number, x: number) => {
|
||||
if (n > 0) {
|
||||
enc(n - 1, Math.floor(x / 256));
|
||||
this.emitbyte(x & 255);
|
||||
}
|
||||
};
|
||||
enc(bytecount, v);
|
||||
}
|
||||
|
||||
encodebytes(tag: Tag, bs: Uint8Array) {
|
||||
this.emitbyte(tag);
|
||||
this.varint(bs.length);
|
||||
this.emitbytes(bs);
|
||||
}
|
||||
}
|
||||
|
||||
export class Encoder<T = object> {
|
||||
state: EncoderState;
|
||||
embeddedEncode: EmbeddedTypeEncode<T>;
|
||||
|
||||
constructor(options: EncoderEmbeddedOptions<T>);
|
||||
constructor(state: EncoderState, embeddedEncode?: EmbeddedTypeEncode<T>);
|
||||
constructor(
|
||||
state_or_options: (EncoderState | EncoderEmbeddedOptions<T>) = {},
|
||||
embeddedEncode?: EmbeddedTypeEncode<T>)
|
||||
{
|
||||
if (state_or_options instanceof EncoderState) {
|
||||
this.state = state_or_options;
|
||||
this.embeddedEncode = embeddedEncode ?? identityEmbeddedTypeEncode;
|
||||
} else {
|
||||
this.state = new EncoderState(state_or_options);
|
||||
this.embeddedEncode = state_or_options.embeddedEncode ?? identityEmbeddedTypeEncode;
|
||||
}
|
||||
}
|
||||
|
||||
withEmbeddedEncode<S>(
|
||||
embeddedEncode: EmbeddedTypeEncode<S>,
|
||||
body: (e: Encoder<S>) => void): this
|
||||
{
|
||||
body(new Encoder(this.state, embeddedEncode));
|
||||
return this;
|
||||
}
|
||||
|
||||
get canonical(): boolean {
|
||||
return this.state.canonical;
|
||||
}
|
||||
|
||||
get includeAnnotations(): boolean {
|
||||
return this.state.includeAnnotations;
|
||||
}
|
||||
|
||||
contents(): Bytes {
|
||||
return this.state.contents();
|
||||
}
|
||||
|
||||
contentsString(): string {
|
||||
return this.state.contentsString();
|
||||
}
|
||||
|
||||
encodevalues(tag: Tag, items: Iterable<Value<T>>) {
|
||||
this.state.emitbyte(tag);
|
||||
for (let i of items) { this.push(i); }
|
||||
this.state.emitbyte(Tag.End);
|
||||
}
|
||||
|
||||
push(v: Encodable<T>) {
|
||||
if (isPreservable<never>(v)) {
|
||||
v[PreserveOn](this as unknown as Encoder<never>);
|
||||
}
|
||||
else if (isPreservable<T>(v)) {
|
||||
v[PreserveOn](this);
|
||||
}
|
||||
else if (typeof v === 'boolean') {
|
||||
this.state.emitbyte(v ? Tag.True : Tag.False);
|
||||
}
|
||||
else if (typeof v === 'number') {
|
||||
if (v >= -3 && v <= 12) {
|
||||
this.state.emitbyte(Tag.SmallInteger_lo + ((v + 16) & 0xf));
|
||||
} else {
|
||||
this.state.encodeint(v);
|
||||
}
|
||||
}
|
||||
else if (typeof v === 'string') {
|
||||
this.state.encodebytes(Tag.String, new Bytes(v)._view);
|
||||
}
|
||||
else if (typeof v === 'symbol') {
|
||||
const key = Symbol.keyFor(v);
|
||||
if (key === void 0) throw new EncodeError("Cannot preserve non-global Symbol", v);
|
||||
this.state.encodebytes(Tag.Symbol, new Bytes(key)._view);
|
||||
}
|
||||
else if (ArrayBuffer.isView(v)) {
|
||||
if (v instanceof Uint8Array) {
|
||||
this.state.encodebytes(Tag.ByteString, v);
|
||||
} else {
|
||||
const bs = new Uint8Array(v.buffer, v.byteOffset, v.byteLength);
|
||||
this.state.encodebytes(Tag.ByteString, bs);
|
||||
}
|
||||
}
|
||||
else if (Record.isRecord<Value<T>, Tuple<Value<T>>, T>(v)) {
|
||||
this.state.emitbyte(Tag.Record);
|
||||
this.push(v.label);
|
||||
for (let i of v) { this.push(i); }
|
||||
this.state.emitbyte(Tag.End);
|
||||
}
|
||||
else if (Array.isArray(v)) {
|
||||
this.encodevalues(Tag.Sequence, v);
|
||||
}
|
||||
else if (isIterable<Value<T>>(v)) {
|
||||
this.encodevalues(Tag.Sequence, v as Iterable<Value<T>>);
|
||||
}
|
||||
else {
|
||||
this.state.emitbyte(Tag.Embedded);
|
||||
this.embeddedEncode.encode(this.state, v.embeddedValue);
|
||||
}
|
||||
return this; // for chaining
|
||||
}
|
||||
}
|
||||
|
||||
export function encode<T>(
|
||||
v: Encodable<T>,
|
||||
options: EncoderEmbeddedOptions<T> = {}): Bytes
|
||||
{
|
||||
return new Encoder(options).push(v).contents();
|
||||
}
|
||||
|
||||
const _canonicalEncoder = new Encoder({ canonical: true });
|
||||
let _usingCanonicalEncoder = false;
|
||||
|
||||
export function canonicalEncode(v: Encodable<never>, options?: EncoderEmbeddedOptions<never>): Bytes;
|
||||
export function canonicalEncode(v: Encodable<any>, options?: EncoderEmbeddedOptions<any>): Bytes;
|
||||
export function canonicalEncode(v: any, options?: EncoderEmbeddedOptions<any>): Bytes {
|
||||
if (options === void 0 && !_usingCanonicalEncoder) {
|
||||
_usingCanonicalEncoder = true;
|
||||
const bs = _canonicalEncoder.push(v).contents();
|
||||
_usingCanonicalEncoder = false;
|
||||
return bs;
|
||||
} else {
|
||||
return encode(v, { ... options, canonical: true });
|
||||
}
|
||||
}
|
||||
|
||||
export function canonicalString(v: Encodable<any>): string {
|
||||
if (!_usingCanonicalEncoder) {
|
||||
_usingCanonicalEncoder = true;
|
||||
const s = _canonicalEncoder.push(v).contentsString();
|
||||
_usingCanonicalEncoder = false;
|
||||
return s;
|
||||
} else {
|
||||
return new Encoder({ canonical: true }).push(v).contentsString();
|
||||
}
|
||||
}
|
||||
|
||||
export function encodeWithAnnotations<T>(v: Encodable<T>,
|
||||
options: EncoderEmbeddedOptions<T> = {}): Bytes {
|
||||
return encode(v, { ... options, includeAnnotations: true });
|
||||
}
|
|
@ -0,0 +1,287 @@
|
|||
// FlexMap, FlexSet: like built-in Map and Set, but with a
|
||||
// canonicalization function which gives us the possibility of a
|
||||
// coarser equivalence than the identity equivalence used in Map and
|
||||
// Set.
|
||||
|
||||
// A Canonicalizer represents the equivalence you have in mind. For
|
||||
//
|
||||
// c: Canonicalizer<V>
|
||||
// eqv: Equivalence<V>
|
||||
// v1: V
|
||||
// v2: V
|
||||
//
|
||||
// where `eqv` is the equivalence you want,
|
||||
//
|
||||
// eqv(v1, v2) ⇔ c(v1) === c(v2)
|
||||
//
|
||||
export type Canonicalizer<V> = (v: V) => string;
|
||||
export type Equivalence<V> = (v1: V, v2: V) => boolean;
|
||||
|
||||
export type IdentityMap<K, V> = Map<K, V>;
|
||||
export type IdentitySet<V> = Set<V>;
|
||||
export const IdentityMap = Map;
|
||||
export const IdentitySet = Set;
|
||||
|
||||
export const IsMap = Symbol.for('IsMap');
|
||||
export const IsSet = Symbol.for('IsSet');
|
||||
|
||||
declare global {
|
||||
interface Map<K, V> { [IsMap]: boolean; }
|
||||
interface MapConstructor { isMap<K, V>(x: any): x is Map<K, V>; }
|
||||
interface Set<T> { [IsSet]: boolean; }
|
||||
interface SetConstructor { isSet<T>(x: any): x is Set<T>; }
|
||||
}
|
||||
if (!(IsMap in Map.prototype)) {
|
||||
Object.defineProperty(Map.prototype, IsMap, { get() { return true; } });
|
||||
Map.isMap = <K,V> (x: any): x is Map<K, V> => !!x?.[IsMap];
|
||||
}
|
||||
if (!(IsSet in Set.prototype)) {
|
||||
Object.defineProperty(Set.prototype, IsSet, { get() { return true; } });
|
||||
Set.isSet = <T> (x: any): x is Set<T> => !!x?.[IsSet];
|
||||
}
|
||||
|
||||
export function _iterMap<S,T>(i: Iterator<S>, f : (s: S) => T): IterableIterator<T> {
|
||||
const _f = (r: IteratorResult<S>): IteratorResult<T> => {
|
||||
if (r.done) {
|
||||
return { done: true, value: null };
|
||||
} else {
|
||||
return { done: false, value: f(r.value) };
|
||||
}
|
||||
};
|
||||
return {
|
||||
next: (v?: any): IteratorResult<T> => _f(i.next(v)),
|
||||
return: (v?: any): IteratorResult<T> => _f(i.return?.(v) ?? { done: true, value: null }),
|
||||
throw: (e?: any): IteratorResult<T> => _f(i.throw?.(e) ?? { done: true, value: null }),
|
||||
[Symbol.iterator]() { return this; },
|
||||
};
|
||||
}
|
||||
|
||||
export class FlexMap<K, V> implements Map<K, V> {
|
||||
readonly items: Map<string, [K, V]>;
|
||||
readonly canonicalizer: Canonicalizer<K>;
|
||||
|
||||
constructor(c: Canonicalizer<K>, items?: Iterable<readonly [K, V]>) {
|
||||
this.canonicalizer = c;
|
||||
this.items = (items === void 0)
|
||||
? new Map()
|
||||
: new Map(_iterMap(items[Symbol.iterator](), ([k, v]) => [this._key(k), [k, v]]));
|
||||
}
|
||||
|
||||
_key(k: K): string {
|
||||
return this.canonicalizer(k);
|
||||
}
|
||||
|
||||
get(k: K, defaultValue?: V): V | undefined {
|
||||
const e = this.items.get(this._key(k));
|
||||
return (e === void 0) ? defaultValue : e[1];
|
||||
}
|
||||
|
||||
getOrSet(k: K, initializer: () => V): V {
|
||||
const ks = this._key(k);
|
||||
let e = this.items.get(ks);
|
||||
if (e === void 0) {
|
||||
e = [k, initializer()];
|
||||
this.items.set(ks, e);
|
||||
}
|
||||
return e[1];
|
||||
}
|
||||
|
||||
set(k: K, v: V): this {
|
||||
this.items.set(this._key(k), [k, v]);
|
||||
return this;
|
||||
}
|
||||
|
||||
forEach(f: <T extends Map<K, V>> (v: V, k: K, map: T) => void, thisArg?: any) {
|
||||
this.items.forEach(([k, v]) => f.call(thisArg, v, k, this));
|
||||
}
|
||||
|
||||
entries(): IterableIterator<[K, V]> {
|
||||
return this.items.values();
|
||||
}
|
||||
|
||||
keys(): IterableIterator<K> {
|
||||
return _iterMap(this.items.values(), ([k, _v]) => k);
|
||||
}
|
||||
|
||||
values(): IterableIterator<V> {
|
||||
return _iterMap(this.items.values(), ([_k, v]) => v);
|
||||
}
|
||||
|
||||
delete(k: K): boolean {
|
||||
return this.items.delete(this._key(k));
|
||||
}
|
||||
|
||||
getAndDelete(k: K, defaultValue?: V): V | undefined {
|
||||
const ks = this._key(k);
|
||||
const e = this.items.get(ks);
|
||||
if (e === void 0) return defaultValue;
|
||||
this.items.delete(ks);
|
||||
return e[1];
|
||||
}
|
||||
|
||||
clear() {
|
||||
this.items.clear();
|
||||
}
|
||||
|
||||
has(k: K): boolean {
|
||||
return this.items.has(this._key(k));
|
||||
}
|
||||
|
||||
get size(): number {
|
||||
return this.items.size;
|
||||
}
|
||||
|
||||
[Symbol.iterator](): IterableIterator<[K, V]> {
|
||||
return this.items.values();
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag]() { return 'FlexMap'; }
|
||||
|
||||
equals(other: any, eqv: Equivalence<V> = (v1, v2) => v1 === v2): boolean {
|
||||
if (!('size' in other && 'has' in other && 'get' in other)) return false;
|
||||
if (this.size !== other.size) return false;
|
||||
for (let [k, v] of this.items.values()) {
|
||||
if (!other.has(k)) return false;
|
||||
if (!eqv(v, other.get(k))) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
update(key: K,
|
||||
f: (oldValue?: V) => V | undefined,
|
||||
defaultValue?: V,
|
||||
eqv: Equivalence<V> = (v1, v2) => v1 === v2): number
|
||||
{
|
||||
const ks = this._key(key);
|
||||
if (this.items.has(ks)) {
|
||||
const oldValue = this.items.get(ks)![1];
|
||||
const newValue = f(oldValue);
|
||||
if (newValue === void 0) {
|
||||
this.items.delete(ks);
|
||||
return -1;
|
||||
} else {
|
||||
if (!eqv(newValue, oldValue)) this.items.set(ks, [key, newValue]);
|
||||
return 0;
|
||||
}
|
||||
} else {
|
||||
const newValue = f(defaultValue);
|
||||
if (newValue === void 0) {
|
||||
return 0;
|
||||
} else {
|
||||
this.items.set(ks, [key, newValue]);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
canonicalKeys(): IterableIterator<string> {
|
||||
return this.items.keys();
|
||||
}
|
||||
|
||||
get [IsMap](): boolean {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
export class FlexSet<V> implements Set<V> {
|
||||
readonly items: Map<string, V>;
|
||||
readonly canonicalizer: Canonicalizer<V>;
|
||||
|
||||
constructor(c: Canonicalizer<V>, items?: Iterable<V>) {
|
||||
this.canonicalizer = c;
|
||||
this.items = (items === void 0)
|
||||
? new Map()
|
||||
: new Map(_iterMap(items[Symbol.iterator](), (v) => [this._key(v), v]));
|
||||
}
|
||||
|
||||
_key(v: V): string {
|
||||
return this.canonicalizer(v);
|
||||
}
|
||||
|
||||
has(v: V): boolean {
|
||||
return this.items.has(this._key(v));
|
||||
}
|
||||
|
||||
get(v: V): {item: V} | null {
|
||||
const vs = this._key(v);
|
||||
if (this.items.has(vs)) {
|
||||
return { item: this.items.get(vs)! };
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
add(v: V): this {
|
||||
this.items.set(this._key(v), v);
|
||||
return this;
|
||||
}
|
||||
|
||||
forEach(f: <T extends Set<V>>(v: V, v2: V, set: T) => void, thisArg?: any) {
|
||||
this.items.forEach((v) => f.call(thisArg, v, v, this));
|
||||
}
|
||||
|
||||
entries(): IterableIterator<[V, V]> {
|
||||
return _iterMap(this.items.values(), (v) => [v, v]);
|
||||
}
|
||||
|
||||
keys(): IterableIterator<V> {
|
||||
return this.items.values();
|
||||
}
|
||||
|
||||
values(): IterableIterator<V> {
|
||||
return this.items.values();
|
||||
}
|
||||
|
||||
delete(v: V): boolean {
|
||||
return this.items.delete(this._key(v));
|
||||
}
|
||||
|
||||
clear() {
|
||||
this.items.clear();
|
||||
}
|
||||
|
||||
get size(): number {
|
||||
return this.items.size;
|
||||
}
|
||||
|
||||
[Symbol.iterator](): IterableIterator<V> {
|
||||
return this.items.values();
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag]() { return 'FlexSet'; }
|
||||
|
||||
equals(other: any): boolean {
|
||||
if (!('size' in other && 'has' in other)) return false;
|
||||
if (this.size !== other.size) return false;
|
||||
for (let v of this.items.values()) {
|
||||
if (!other.has(v)) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
canonicalValues(): IterableIterator<string> {
|
||||
return this.items.keys();
|
||||
}
|
||||
|
||||
union(other: Set<V>): FlexSet<V> {
|
||||
const result = new FlexSet(this.canonicalizer, this);
|
||||
for (let k of other) result.add(k);
|
||||
return result;
|
||||
}
|
||||
|
||||
intersect(other: Set<V>): FlexSet<V> {
|
||||
const result = new FlexSet(this.canonicalizer);
|
||||
for (let k of this) if (other.has(k)) result.add(k);
|
||||
return result;
|
||||
}
|
||||
|
||||
subtract(other: Set<V>): FlexSet<V> {
|
||||
const result = new FlexSet(this.canonicalizer);
|
||||
for (let k of this) if (!other.has(k)) result.add(k);
|
||||
return result;
|
||||
}
|
||||
|
||||
get [IsSet](): boolean {
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,95 @@
|
|||
import { Encoder, Preservable } from "./encoder";
|
||||
import { Tag } from "./constants";
|
||||
import { AsPreserve, PreserveOn } from "./symbols";
|
||||
import { Value } from "./values";
|
||||
import { GenericEmbedded } from "./embedded";
|
||||
|
||||
export type FloatType = 'Single' | 'Double';
|
||||
export const FloatType = Symbol.for('FloatType');
|
||||
|
||||
export abstract class Float {
|
||||
readonly value: number;
|
||||
|
||||
constructor(value: number | Float) {
|
||||
this.value = typeof value === 'number' ? value : value.value;
|
||||
}
|
||||
|
||||
toString() {
|
||||
return this.asPreservesText();
|
||||
}
|
||||
|
||||
equals(other: any): boolean {
|
||||
return Object.is(other.constructor, this.constructor) && (other.value === this.value);
|
||||
}
|
||||
|
||||
hashCode(): number {
|
||||
return (this.value | 0); // TODO: something better?
|
||||
}
|
||||
|
||||
abstract asPreservesText(): string;
|
||||
abstract get [FloatType](): FloatType;
|
||||
|
||||
static isFloat = (x: any): x is Float => x?.[FloatType] !== void 0;
|
||||
static isSingle = (x: any): x is SingleFloat => x?.[FloatType] === 'Single';
|
||||
static isDouble = (x: any): x is DoubleFloat => x?.[FloatType] === 'Double';
|
||||
}
|
||||
|
||||
export function floatValue(f: any): number {
|
||||
if (typeof f === 'number') {
|
||||
return f;
|
||||
} else if (Float.isFloat(f)) {
|
||||
return f.value;
|
||||
} else {
|
||||
return NaN;
|
||||
}
|
||||
}
|
||||
|
||||
export class SingleFloat extends Float implements Preservable<never> {
|
||||
[AsPreserve]<T = GenericEmbedded>(): Value<T> {
|
||||
return this;
|
||||
}
|
||||
|
||||
[PreserveOn](encoder: Encoder<never>) {
|
||||
encoder.state.emitbyte(Tag.Float);
|
||||
encoder.state.makeroom(4);
|
||||
encoder.state.view.setFloat32(encoder.state.index, this.value, false);
|
||||
encoder.state.index += 4;
|
||||
}
|
||||
|
||||
get [FloatType](): 'Single' {
|
||||
return 'Single';
|
||||
}
|
||||
|
||||
asPreservesText(): string {
|
||||
return '' + this.value + 'f';
|
||||
}
|
||||
}
|
||||
|
||||
export function Single(value: number | Float): SingleFloat {
|
||||
return new SingleFloat(value);
|
||||
}
|
||||
|
||||
export class DoubleFloat extends Float implements Preservable<never> {
|
||||
[AsPreserve]<T = GenericEmbedded>(): Value<T> {
|
||||
return this;
|
||||
}
|
||||
|
||||
[PreserveOn](encoder: Encoder<never>) {
|
||||
encoder.state.emitbyte(Tag.Double);
|
||||
encoder.state.makeroom(8);
|
||||
encoder.state.view.setFloat64(encoder.state.index, this.value, false);
|
||||
encoder.state.index += 8;
|
||||
}
|
||||
|
||||
get [FloatType](): 'Double' {
|
||||
return 'Double';
|
||||
}
|
||||
|
||||
asPreservesText(): string {
|
||||
return '' + this.value;
|
||||
}
|
||||
}
|
||||
|
||||
export function Double(value: number | Float): DoubleFloat {
|
||||
return new DoubleFloat(value);
|
||||
}
|
|
@ -0,0 +1,140 @@
|
|||
import { Record, Tuple } from "./record";
|
||||
import { Bytes } from "./bytes";
|
||||
import { Value } from "./values";
|
||||
import { Set, Dictionary } from "./dictionary";
|
||||
import { annotate, Annotated } from "./annotated";
|
||||
import { Double, Float, Single } from "./float";
|
||||
import { Embedded } from "./embedded";
|
||||
|
||||
export type Fold<T, R = Value<T>> = (v: Value<T>) => R;
|
||||
|
||||
export interface FoldMethods<T, R> {
|
||||
boolean(b: boolean): R;
|
||||
single(f: number): R;
|
||||
double(f: number): R;
|
||||
integer(i: number): R;
|
||||
string(s: string): R;
|
||||
bytes(b: Bytes): R;
|
||||
symbol(s: symbol): R;
|
||||
|
||||
record(r: Record<Value<T>, Tuple<Value<T>>, T>, k: Fold<T, R>): R;
|
||||
array(a: Array<Value<T>>, k: Fold<T, R>): R;
|
||||
set(s: Set<T>, k: Fold<T, R>): R;
|
||||
dictionary(d: Dictionary<T>, k: Fold<T, R>): R;
|
||||
|
||||
annotated(a: Annotated<T>, k: Fold<T, R>): R;
|
||||
|
||||
embedded(t: Embedded<T>, k: Fold<T, R>): R;
|
||||
}
|
||||
|
||||
export abstract class ValueFold<T, R = T> implements FoldMethods<T, Value<R>> {
|
||||
boolean(b: boolean): Value<R> {
|
||||
return b;
|
||||
}
|
||||
single(f: number): Value<R> {
|
||||
return Single(f);
|
||||
}
|
||||
double(f: number): Value<R> {
|
||||
return Double(f);
|
||||
}
|
||||
integer(i: number): Value<R> {
|
||||
return i;
|
||||
}
|
||||
string(s: string): Value<R> {
|
||||
return s;
|
||||
}
|
||||
bytes(b: Bytes): Value<R> {
|
||||
return b;
|
||||
}
|
||||
symbol(s: symbol): Value<R> {
|
||||
return s;
|
||||
}
|
||||
record(r: Record<Value<T>, Tuple<Value<T>>, T>, k: Fold<T, Value<R>>): Value<R> {
|
||||
return Record(k(r.label), r.map(k));
|
||||
}
|
||||
array(a: Value<T>[], k: Fold<T, Value<R>>): Value<R> {
|
||||
return a.map(k);
|
||||
}
|
||||
set(s: Set<T>, k: Fold<T, Value<R>>): Value<R> {
|
||||
return s.map(k);
|
||||
}
|
||||
dictionary(d: Dictionary<T>, k: Fold<T, Value<R>>): Value<R> {
|
||||
return d.mapEntries(([key, value]) => [k(key), k(value)]);
|
||||
}
|
||||
annotated(a: Annotated<T>, k: Fold<T, Value<R>>): Value<R> {
|
||||
return annotate(k(a.item), ...a.annotations.map(k));
|
||||
}
|
||||
abstract embedded(t: Embedded<T>, k: Fold<T, Value<R>>): Value<R>;
|
||||
}
|
||||
|
||||
export class IdentityFold<T> extends ValueFold<T, T> {
|
||||
embedded(t: Embedded<T>, _k: Fold<T, Value<T>>): Value<T> {
|
||||
return t;
|
||||
}
|
||||
}
|
||||
|
||||
export class MapFold<T, R> extends ValueFold<T, R> {
|
||||
readonly f: (t: T) => Value<R>;
|
||||
|
||||
constructor(f: (t: T) => Value<R>) {
|
||||
super();
|
||||
this.f = f;
|
||||
}
|
||||
|
||||
embedded(t: Embedded<T>, _k: Fold<T, Value<R>>): Value<R> {
|
||||
return this.f(t.embeddedValue);
|
||||
}
|
||||
}
|
||||
|
||||
export const IDENTITY_FOLD = new IdentityFold<any>();
|
||||
|
||||
export function fold<T, R>(v: Value<T>, o: FoldMethods<T, R>): R {
|
||||
const walk = (v: Value<T>): R => {
|
||||
switch (typeof v) {
|
||||
case 'boolean':
|
||||
return o.boolean(v);
|
||||
case 'number':
|
||||
if (!Number.isInteger(v)) {
|
||||
// TODO: Is this convenience warranted?
|
||||
return o.double(v);
|
||||
} else {
|
||||
return o.integer(v);
|
||||
}
|
||||
case 'string':
|
||||
return o.string(v);
|
||||
case 'symbol':
|
||||
return o.symbol(v);
|
||||
case 'object':
|
||||
if (Record.isRecord<Value<T>, Tuple<Value<T>>, T>(v)) {
|
||||
return o.record(v, walk);
|
||||
} else if (Array.isArray(v)) {
|
||||
return o.array(v, walk);
|
||||
} else if (Set.isSet<T>(v)) {
|
||||
return o.set(v, walk);
|
||||
} else if (Dictionary.isDictionary<T>(v)) {
|
||||
return o.dictionary(v, walk);
|
||||
} else if (Annotated.isAnnotated<T>(v)) {
|
||||
return o.annotated(v, walk);
|
||||
} else if (Bytes.isBytes(v)) {
|
||||
return o.bytes(v);
|
||||
} else if (Float.isSingle(v)) {
|
||||
return o.single(v.value);
|
||||
} else if (Float.isDouble(v)) {
|
||||
return o.double(v.value);
|
||||
} else {
|
||||
return o.embedded(v, walk);
|
||||
}
|
||||
default:
|
||||
((_v: never): never => { throw new Error("Internal error"); })(v);
|
||||
}
|
||||
};
|
||||
return walk(v);
|
||||
}
|
||||
|
||||
export function mapEmbeddeds<T, R>(
|
||||
v: Value<T>,
|
||||
f: (t: T) => Value<R>,
|
||||
): Value<R>
|
||||
{
|
||||
return fold(v, new MapFold(f));
|
||||
}
|
|
@ -0,0 +1,73 @@
|
|||
import { embed, GenericEmbedded } from "./embedded";
|
||||
import { Bytes } from "./bytes";
|
||||
import { Record, Tuple } from "./record";
|
||||
import { AsPreserve } from "./symbols";
|
||||
import { Value } from "./values";
|
||||
import { Dictionary, Set } from "./dictionary";
|
||||
|
||||
export function fromJS<T = GenericEmbedded>(x: any): Value<T> {
|
||||
switch (typeof x) {
|
||||
case 'number':
|
||||
if (!Number.isInteger(x)) {
|
||||
// We require that clients be explicit about integer vs. non-integer types.
|
||||
throw new TypeError("Refusing to autoconvert non-integer number to Single or Double");
|
||||
}
|
||||
// FALL THROUGH
|
||||
case 'string':
|
||||
case 'symbol':
|
||||
case 'boolean':
|
||||
return x;
|
||||
|
||||
case 'undefined':
|
||||
case 'function':
|
||||
case 'bigint':
|
||||
break;
|
||||
|
||||
case 'object':
|
||||
if (x === null) {
|
||||
break;
|
||||
}
|
||||
if (typeof x[AsPreserve] === 'function') {
|
||||
return x[AsPreserve]();
|
||||
}
|
||||
if (Record.isRecord<Value<T>, Tuple<Value<T>>, T>(x)) {
|
||||
return x;
|
||||
}
|
||||
if (Array.isArray(x)) {
|
||||
return x.map<Value<T>>(fromJS);
|
||||
}
|
||||
if (ArrayBuffer.isView(x) || x instanceof ArrayBuffer) {
|
||||
return Bytes.from(x);
|
||||
}
|
||||
if (Map.isMap(x)) {
|
||||
const d = new Dictionary<T>();
|
||||
x.forEach((v, k) => d.set(fromJS(k), fromJS(v)));
|
||||
return d;
|
||||
}
|
||||
if (Set.isSet(x)) {
|
||||
const s = new Set<T>();
|
||||
x.forEach(v => s.add(fromJS(v)));
|
||||
return s;
|
||||
}
|
||||
// Just... assume it's a T.
|
||||
return embed(x as T);
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
throw new TypeError("Cannot represent JavaScript value as Preserves: " + x);
|
||||
}
|
||||
|
||||
declare module "./dictionary" {
|
||||
namespace Dictionary {
|
||||
export function fromJS<T = GenericEmbedded, V = GenericEmbedded>(x: object): Dictionary<T, Value<V>>;
|
||||
}
|
||||
}
|
||||
|
||||
Dictionary.fromJS = function <T = GenericEmbedded, V = GenericEmbedded>(x: object): Dictionary<T, Value<V>> {
|
||||
if (Dictionary.isDictionary<T, Value<V>>(x)) return x;
|
||||
const d = new Dictionary<T, Value<V>>();
|
||||
Object.entries(x).forEach(([key, value]) => d.set(key, fromJS(value)));
|
||||
return d;
|
||||
};
|
|
@ -0,0 +1,6 @@
|
|||
export * from './runtime';
|
||||
export * as Constants from './constants';
|
||||
|
||||
const _Array = Array;
|
||||
type _Array<T> = Array<T>;
|
||||
export { _Array as Array };
|
|
@ -0,0 +1,29 @@
|
|||
import type { GenericEmbedded } from "./embedded";
|
||||
import type { Annotated } from "./annotated";
|
||||
|
||||
export const IsPreservesAnnotated = Symbol.for('IsPreservesAnnotated');
|
||||
|
||||
export function isAnnotated<T = GenericEmbedded>(x: any): x is Annotated<T>
|
||||
{
|
||||
return !!x?.[IsPreservesAnnotated];
|
||||
}
|
||||
|
||||
export function is(a: any, b: any): boolean {
|
||||
if (isAnnotated(a)) a = a.item;
|
||||
if (isAnnotated(b)) b = b.item;
|
||||
if (Object.is(a, b)) return true;
|
||||
if (typeof a !== typeof b) return false;
|
||||
if (typeof a === 'object') {
|
||||
if (a === null || b === null) return false;
|
||||
if ('equals' in a && typeof a.equals === 'function') return a.equals(b, is);
|
||||
if (Array.isArray(a) && Array.isArray(b)) {
|
||||
const isRecord = 'label' in a;
|
||||
if (isRecord !== 'label' in b) return false;
|
||||
if (isRecord && !is((a as any).label, (b as any).label)) return false;
|
||||
if (a.length !== b.length) return false;
|
||||
for (let i = 0; i < a.length; i++) if (!is(a[i], b[i])) return false;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
|
@ -0,0 +1,75 @@
|
|||
import { Record, Tuple } from "./record";
|
||||
import { Bytes } from "./bytes";
|
||||
import { fold } from "./fold";
|
||||
import { is } from "./is";
|
||||
import { Value } from "./values";
|
||||
import { Set, Dictionary } from "./dictionary";
|
||||
import { Annotated } from "./annotated";
|
||||
import { unannotate } from "./strip";
|
||||
import { embed, isEmbedded, Embedded } from "./embedded";
|
||||
|
||||
export function merge<T>(
|
||||
mergeEmbeddeds: (a: T, b: T) => T | undefined,
|
||||
item0: Value<T>,
|
||||
... items: Array<Value<T>>): Value<T>
|
||||
{
|
||||
function die(): never {
|
||||
throw new Error("Cannot merge items");
|
||||
}
|
||||
|
||||
function walk(a: Value<T>, b: Value<T>): Value<T> {
|
||||
if (a === b) return a;
|
||||
return fold<T, Value<T>>(a, {
|
||||
boolean: die,
|
||||
single(_f: number) { return is(a, b) ? a : die(); },
|
||||
double(_f: number) { return is(a, b) ? a : die(); },
|
||||
integer: die,
|
||||
string: die,
|
||||
bytes(_b: Bytes) { return is(a, b) ? a : die(); },
|
||||
symbol: die,
|
||||
|
||||
record(r: Record<Value<T>, Tuple<Value<T>>, T>) {
|
||||
if (!Record.isRecord<Value<T>, Tuple<Value<T>>, T>(b)) die();
|
||||
return Record(walk(r.label, b.label), walkMany(r, b));
|
||||
},
|
||||
array(a: Array<Value<T>>) {
|
||||
if (!Array.isArray(b) || Record.isRecord(b)) die();
|
||||
return walkMany(a, b);
|
||||
},
|
||||
set(_s: Set<T>) { die(); },
|
||||
dictionary(d: Dictionary<T>) {
|
||||
if (!Dictionary.isDictionary<T>(b)) die();
|
||||
const r = new Dictionary<T>();
|
||||
d.forEach((av,ak) => {
|
||||
const bv = b.get(ak);
|
||||
r.set(ak, bv === void 0 ? av : walk(av, bv));
|
||||
});
|
||||
b.forEach((bv, bk) => {
|
||||
if (!d.has(bk)) r.set(bk, bv);
|
||||
});
|
||||
return r;
|
||||
},
|
||||
|
||||
annotated(a: Annotated<T>) {
|
||||
return walk(a, unannotate(b));
|
||||
},
|
||||
|
||||
embedded(t: Embedded<T>) {
|
||||
if (!isEmbedded<T>(b)) die();
|
||||
const r = mergeEmbeddeds(t.embeddedValue, b.embeddedValue);
|
||||
if (r === void 0) die();
|
||||
return embed(r);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
function walkMany(a: Array<Value<T>>, b: Array<Value<T>>): Array<Value<T>> {
|
||||
if (a.length <= b.length) {
|
||||
return b.map((bb, i) => (i < a.length) ? walk(a[i], bb) : bb);
|
||||
} else {
|
||||
return a.map((aa, i) => (i < b.length) ? walk(aa, b[i]) : aa);
|
||||
}
|
||||
}
|
||||
|
||||
return items.reduce(walk, item0);
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
// Patching to support node.js extensions.
|
||||
|
||||
import { Annotated } from './annotated';
|
||||
import { Bytes } from './bytes';
|
||||
import { Set, Dictionary } from './dictionary';
|
||||
import { Record } from './record';
|
||||
|
||||
import * as util from 'util';
|
||||
|
||||
[Bytes, Annotated, Set, Dictionary].forEach((C) => {
|
||||
(C as any).prototype[util.inspect.custom] =
|
||||
function (_depth: any, _options: any) {
|
||||
return this.asPreservesText();
|
||||
};
|
||||
});
|
||||
|
||||
Record.fallbackToString = util.inspect;
|
|
@ -0,0 +1,480 @@
|
|||
// Text syntax reader.
|
||||
|
||||
import type { Value } from './values';
|
||||
import { DecodeError, ShortPacket } from './codec';
|
||||
import { Dictionary, Set } from './dictionary';
|
||||
import { strip, unannotate } from './strip';
|
||||
import { Bytes, unhexDigit } from './bytes';
|
||||
import { decode, Decoder, DecoderState, neverEmbeddedTypeDecode } from './decoder';
|
||||
import { Record } from './record';
|
||||
import { Annotated, newPosition, Position, updatePosition } from './annotated';
|
||||
import { Double, DoubleFloat, Single, SingleFloat } from './float';
|
||||
import { stringify } from './text';
|
||||
import { embed, GenericEmbedded, EmbeddedTypeDecode } from './embedded';
|
||||
|
||||
export interface ReaderStateOptions {
|
||||
includeAnnotations?: boolean;
|
||||
name?: string | Position;
|
||||
}
|
||||
|
||||
export interface ReaderOptions<T> extends ReaderStateOptions {
|
||||
embeddedDecode?: EmbeddedTypeDecode<T>;
|
||||
}
|
||||
|
||||
type IntOrFloat = 'int' | 'float';
|
||||
type Numeric = number | SingleFloat | DoubleFloat;
|
||||
type IntContinuation = (kind: IntOrFloat, acc: string) => Numeric;
|
||||
|
||||
export class ReaderState {
|
||||
buffer: string;
|
||||
pos: Position;
|
||||
index: number;
|
||||
discarded = 0;
|
||||
options: ReaderStateOptions;
|
||||
|
||||
constructor(buffer: string, options: ReaderStateOptions) {
|
||||
this.buffer = buffer;
|
||||
switch (typeof options.name) {
|
||||
case 'undefined': this.pos = newPosition(); break;
|
||||
case 'string': this.pos = newPosition(options.name); break;
|
||||
case 'object': this.pos = { ... options.name }; break;
|
||||
}
|
||||
this.index = 0;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
error(message: string, pos: Position): never {
|
||||
throw new DecodeError(message, { ... pos });
|
||||
}
|
||||
|
||||
get includeAnnotations(): boolean {
|
||||
return this.options.includeAnnotations ?? false;
|
||||
}
|
||||
|
||||
copyPos(): Position {
|
||||
return { ... this.pos };
|
||||
}
|
||||
|
||||
write(data: string) {
|
||||
if (this.atEnd()) {
|
||||
this.buffer = data;
|
||||
} else {
|
||||
this.buffer = this.buffer.substr(this.index) + data;
|
||||
}
|
||||
this.discarded += this.index;
|
||||
this.index = 0;
|
||||
}
|
||||
|
||||
atEnd(): boolean {
|
||||
return (this.index >= this.buffer.length);
|
||||
}
|
||||
|
||||
peek(): string {
|
||||
if (this.atEnd()) throw new ShortPacket("Short term", this.pos);
|
||||
return this.buffer[this.index];
|
||||
}
|
||||
|
||||
advance(): number {
|
||||
const n = this.index++;
|
||||
updatePosition(this.pos, this.buffer[n]);
|
||||
return n;
|
||||
}
|
||||
|
||||
nextchar(): string {
|
||||
if (this.atEnd()) throw new ShortPacket("Short term", this.pos);
|
||||
return this.buffer[this.advance()];
|
||||
}
|
||||
|
||||
nextcharcode(): number {
|
||||
if (this.atEnd()) throw new ShortPacket("Short term", this.pos);
|
||||
return this.buffer.charCodeAt(this.advance());
|
||||
}
|
||||
|
||||
skipws() {
|
||||
while (true) {
|
||||
if (this.atEnd()) break;
|
||||
if (!isSpace(this.peek())) break;
|
||||
this.advance();
|
||||
}
|
||||
}
|
||||
|
||||
readHex2(): number {
|
||||
const x1 = unhexDigit(this.nextcharcode());
|
||||
const x2 = unhexDigit(this.nextcharcode());
|
||||
return (x1 << 4) | x2;
|
||||
}
|
||||
|
||||
readHex4(): number {
|
||||
const x1 = unhexDigit(this.nextcharcode());
|
||||
const x2 = unhexDigit(this.nextcharcode());
|
||||
const x3 = unhexDigit(this.nextcharcode());
|
||||
const x4 = unhexDigit(this.nextcharcode());
|
||||
return (x1 << 12) | (x2 << 8) | (x3 << 4) | x4;
|
||||
}
|
||||
|
||||
readHexBinary(): Bytes {
|
||||
const acc: number[] = [];
|
||||
while (true) {
|
||||
this.skipws();
|
||||
if (this.peek() === '"') {
|
||||
this.advance();
|
||||
return Bytes.from(acc);
|
||||
}
|
||||
acc.push(this.readHex2());
|
||||
}
|
||||
}
|
||||
|
||||
readBase64Binary(): Bytes {
|
||||
let acc = '';
|
||||
while (true) {
|
||||
this.skipws();
|
||||
const c = this.nextchar();
|
||||
if (c === ']') break;
|
||||
acc = acc + c;
|
||||
}
|
||||
return decodeBase64(acc);
|
||||
}
|
||||
|
||||
readIntpart(acc: string, ch: string): Numeric {
|
||||
if (ch === '0') return this.readFracexp('int', acc + ch);
|
||||
return this.readDigit1('int', acc, (kind, acc) => this.readFracexp(kind, acc), ch);
|
||||
}
|
||||
|
||||
readDigit1(kind: IntOrFloat, acc: string, k: IntContinuation, ch?: string): Numeric {
|
||||
if (ch === void 0) ch = this.nextchar();
|
||||
if (ch >= '0' && ch <= '9') return this.readDigit0(kind, acc + ch, k);
|
||||
this.error('Incomplete number', this.pos);
|
||||
}
|
||||
|
||||
readDigit0(kind: IntOrFloat, acc: string, k: IntContinuation): Numeric {
|
||||
while (true) {
|
||||
const ch = this.peek();
|
||||
if (!(ch >= '0' && ch <= '9')) break;
|
||||
this.advance();
|
||||
acc = acc + ch;
|
||||
}
|
||||
return k(kind, acc);
|
||||
}
|
||||
|
||||
readFracexp(kind: IntOrFloat, acc: string): Numeric {
|
||||
if (this.peek() === '.') {
|
||||
this.advance();
|
||||
return this.readDigit1('float', acc + '.', (kind, acc) => this.readExp(kind, acc));
|
||||
}
|
||||
return this.readExp(kind, acc);
|
||||
}
|
||||
|
||||
readExp(kind: IntOrFloat, acc: string): Numeric {
|
||||
const ch = this.peek();
|
||||
if (ch === 'e' || ch === 'E') {
|
||||
this.advance();
|
||||
return this.readSignAndExp(acc + ch);
|
||||
}
|
||||
return this.finishNumber(kind, acc);
|
||||
}
|
||||
|
||||
readSignAndExp(acc: string): Numeric {
|
||||
const ch = this.peek();
|
||||
if (ch === '+' || ch === '-') {
|
||||
this.advance();
|
||||
return this.readDigit1('float', acc + ch, (kind, acc) => this.finishNumber(kind, acc));
|
||||
}
|
||||
return this.readDigit1('float', acc, (kind, acc) => this.finishNumber(kind, acc));
|
||||
}
|
||||
|
||||
finishNumber(kind: IntOrFloat, acc: string): Numeric {
|
||||
const i = parseFloat(acc);
|
||||
if (kind === 'int') return i;
|
||||
const ch = this.peek();
|
||||
if (ch === 'f' || ch === 'F') {
|
||||
this.advance();
|
||||
return Single(i);
|
||||
} else {
|
||||
return Double(i);
|
||||
}
|
||||
}
|
||||
|
||||
readRawSymbol<T>(acc: string): Value<T> {
|
||||
while (true) {
|
||||
if (this.atEnd()) break;
|
||||
const ch = this.peek();
|
||||
if (('(){}[]<>";,@#:|'.indexOf(ch) !== -1) || isSpace(ch)) break;
|
||||
this.advance();
|
||||
acc = acc + ch;
|
||||
}
|
||||
return Symbol.for(acc);
|
||||
}
|
||||
|
||||
readStringlike<E, R>(xform: (ch: string) => E,
|
||||
finish: (acc: E[]) => R,
|
||||
terminator: string,
|
||||
hexescape: string,
|
||||
hex: () => E): R
|
||||
{
|
||||
let acc: E[] = [];
|
||||
while (true) {
|
||||
const ch = this.nextchar();
|
||||
switch (ch) {
|
||||
case terminator:
|
||||
return finish(acc);
|
||||
case '\\': {
|
||||
const ch = this.nextchar();
|
||||
switch (ch) {
|
||||
case hexescape: acc.push(hex()); break;
|
||||
|
||||
case terminator:
|
||||
case '\\':
|
||||
case '/':
|
||||
acc.push(xform(ch)); break;
|
||||
|
||||
case 'b': acc.push(xform('\x08')); break;
|
||||
case 'f': acc.push(xform('\x0c')); break;
|
||||
case 'n': acc.push(xform('\x0a')); break;
|
||||
case 'r': acc.push(xform('\x0d')); break;
|
||||
case 't': acc.push(xform('\x09')); break;
|
||||
|
||||
default:
|
||||
this.error(`Invalid escape code \\${ch}`, this.pos);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
acc.push(xform(ch));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
readString(terminator: string): string {
|
||||
return this.readStringlike(x => x, xs => xs.join(''), terminator, 'u', () => {
|
||||
const n1 = this.readHex4();
|
||||
if ((n1 >= 0xd800) && (n1 <= 0xdfff)) {
|
||||
if ((this.nextchar() === '\\') && (this.nextchar() === 'u')) {
|
||||
const n2 = this.readHex4();
|
||||
if ((n2 >= 0xdc00) && (n2 <= 0xdfff) && (n1 <= 0xdbff)) {
|
||||
return String.fromCharCode(n1, n2);
|
||||
}
|
||||
}
|
||||
this.error('Invalid surrogate pair', this.pos);
|
||||
}
|
||||
return String.fromCharCode(n1);
|
||||
});
|
||||
}
|
||||
|
||||
readLiteralBinary(): Bytes {
|
||||
return this.readStringlike(
|
||||
x => {
|
||||
const v = x.charCodeAt(0);
|
||||
if (v >= 256) this.error(`Invalid code point ${v} in literal binary`, this.pos);
|
||||
return v;
|
||||
},
|
||||
Bytes.from,
|
||||
'"',
|
||||
'x',
|
||||
() => this.readHex2());
|
||||
}
|
||||
}
|
||||
|
||||
export const genericEmbeddedTypeDecode: EmbeddedTypeDecode<GenericEmbedded> = {
|
||||
decode(s: DecoderState): GenericEmbedded {
|
||||
return new GenericEmbedded(new Decoder(s, this).next());
|
||||
},
|
||||
|
||||
fromValue(v: Value<GenericEmbedded>, options: ReaderStateOptions): GenericEmbedded {
|
||||
return new GenericEmbedded(options.includeAnnotations ? v : strip(v));
|
||||
},
|
||||
};
|
||||
|
||||
export class Reader<T> {
|
||||
state: ReaderState;
|
||||
embeddedType: EmbeddedTypeDecode<T>;
|
||||
|
||||
constructor(state: ReaderState, embeddedType: EmbeddedTypeDecode<T>);
|
||||
constructor(buffer: string, options?: ReaderOptions<T>);
|
||||
constructor(
|
||||
state_or_buffer: (ReaderState | string) = '',
|
||||
embeddedType_or_options?: (EmbeddedTypeDecode<T> | ReaderOptions<T>))
|
||||
{
|
||||
if (state_or_buffer instanceof ReaderState) {
|
||||
this.state = state_or_buffer;
|
||||
this.embeddedType = embeddedType_or_options as EmbeddedTypeDecode<T>;
|
||||
} else {
|
||||
const options = (embeddedType_or_options as ReaderOptions<T>) ?? {};
|
||||
this.state = new ReaderState(state_or_buffer, options);
|
||||
this.embeddedType = options.embeddedDecode ?? neverEmbeddedTypeDecode;
|
||||
}
|
||||
}
|
||||
|
||||
write(data: string) {
|
||||
this.state.write(data);
|
||||
}
|
||||
|
||||
readCommentLine(): Value<T> {
|
||||
const startPos = this.state.copyPos();
|
||||
let acc = '';
|
||||
while (true) {
|
||||
const c = this.state.nextchar();
|
||||
if (c === '\n' || c === '\r') {
|
||||
return this.wrap(acc, startPos);
|
||||
}
|
||||
acc = acc + c;
|
||||
}
|
||||
}
|
||||
|
||||
wrap(v: Value<T>, pos: Position): Value<T> {
|
||||
if (this.state.includeAnnotations && !Annotated.isAnnotated(v)) {
|
||||
v = new Annotated(v, pos);
|
||||
}
|
||||
return v;
|
||||
}
|
||||
|
||||
annotateNextWith(v: Value<T>): Value<T> {
|
||||
this.state.skipws();
|
||||
if (this.state.atEnd()) {
|
||||
throw new DecodeError("Trailing annotations and comments are not permitted",
|
||||
this.state.pos);
|
||||
}
|
||||
const u = this.next();
|
||||
if (this.state.includeAnnotations) (u as Annotated<T>).annotations.unshift(v);
|
||||
return u;
|
||||
}
|
||||
|
||||
readToEnd(): Array<Value<T>> {
|
||||
const acc = [];
|
||||
while (true) {
|
||||
this.state.skipws();
|
||||
if (this.state.atEnd()) return acc;
|
||||
acc.push(this.next());
|
||||
}
|
||||
}
|
||||
|
||||
next(): Value<T> {
|
||||
this.state.skipws();
|
||||
const startPos = this.state.copyPos();
|
||||
const unwrapped = ((): Value<T> => {
|
||||
const c = this.state.nextchar();
|
||||
switch (c) {
|
||||
case '-':
|
||||
return this.state.readIntpart('-', this.state.nextchar());
|
||||
case '0': case '1': case '2': case '3': case '4':
|
||||
case '5': case '6': case '7': case '8': case '9':
|
||||
return this.state.readIntpart('', c);
|
||||
case '"':
|
||||
return this.state.readString('"');
|
||||
case '|':
|
||||
return Symbol.for(this.state.readString('|'));
|
||||
case ';':
|
||||
return this.annotateNextWith(this.readCommentLine());
|
||||
case '@':
|
||||
return this.annotateNextWith(this.next());
|
||||
case ':':
|
||||
this.state.error('Unexpected key/value separator between items', startPos);
|
||||
case '#': {
|
||||
const c = this.state.nextchar();
|
||||
switch (c) {
|
||||
case 'f': return false;
|
||||
case 't': return true;
|
||||
case '{': return this.seq(new Set<T>(), (v, s) => s.add(v), '}');
|
||||
case '"': return this.state.readLiteralBinary();
|
||||
case 'x':
|
||||
if (this.state.nextchar() !== '"') {
|
||||
this.state.error('Expected open-quote at start of hex ByteString',
|
||||
startPos);
|
||||
}
|
||||
return this.state.readHexBinary();
|
||||
case '[': return this.state.readBase64Binary();
|
||||
case '=': {
|
||||
const bs = unannotate(this.next());
|
||||
if (!Bytes.isBytes(bs)) this.state.error('ByteString must follow #=',
|
||||
startPos);
|
||||
return decode<T>(bs, {
|
||||
embeddedDecode: this.embeddedType,
|
||||
includeAnnotations: this.state.options.includeAnnotations,
|
||||
});
|
||||
}
|
||||
case '!': return embed(this.embeddedType.fromValue(
|
||||
new Reader<GenericEmbedded>(this.state, genericEmbeddedTypeDecode).next(),
|
||||
this.state.options));
|
||||
default:
|
||||
this.state.error(`Invalid # syntax: ${c}`, startPos);
|
||||
}
|
||||
}
|
||||
case '<': {
|
||||
const label = this.next();
|
||||
const fields = this.readSequence('>');
|
||||
return Record(label, fields);
|
||||
}
|
||||
case '[': return this.readSequence(']');
|
||||
case '{': return this.readDictionary();
|
||||
case '>': this.state.error('Unexpected >', startPos);
|
||||
case ']': this.state.error('Unexpected ]', startPos);
|
||||
case '}': this.state.error('Unexpected }', startPos);
|
||||
default:
|
||||
return this.state.readRawSymbol(c);
|
||||
}
|
||||
})();
|
||||
return this.wrap(unwrapped, startPos);
|
||||
}
|
||||
|
||||
seq<S>(acc: S, update: (v: Value<T>, acc: S) => void, ch: string): S {
|
||||
while (true) {
|
||||
this.state.skipws();
|
||||
if (this.state.peek() === ch) {
|
||||
this.state.advance();
|
||||
return acc;
|
||||
}
|
||||
update(this.next(), acc);
|
||||
}
|
||||
}
|
||||
|
||||
readSequence(ch: string): Array<Value<T>> {
|
||||
return this.seq([] as Array<Value<T>>, (v, acc) => acc.push(v), ch);
|
||||
}
|
||||
|
||||
readDictionary(): Dictionary<T> {
|
||||
return this.seq(new Dictionary<T>(),
|
||||
(k, acc) => {
|
||||
this.state.skipws();
|
||||
switch (this.state.peek()) {
|
||||
case ':':
|
||||
if (acc.has(k)) this.state.error(
|
||||
`Duplicate key: ${stringify(k)}`, this.state.pos);
|
||||
this.state.advance();
|
||||
acc.set(k, this.next());
|
||||
break;
|
||||
default:
|
||||
this.state.error('Missing key/value separator', this.state.pos);
|
||||
}
|
||||
},
|
||||
'}');
|
||||
}
|
||||
}
|
||||
|
||||
const BASE64: {[key: string]: number} = {};
|
||||
[... 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'].forEach(
|
||||
(c, i) => BASE64[c] = i);
|
||||
BASE64['+'] = BASE64['-'] = 62;
|
||||
BASE64['/'] = BASE64['_'] = 63;
|
||||
|
||||
export function decodeBase64(s: string): Bytes {
|
||||
const bs = new Uint8Array(Math.floor(s.length * 3/4));
|
||||
let i = 0;
|
||||
let j = 0;
|
||||
while (i < s.length) {
|
||||
const v1 = BASE64[s[i++]];
|
||||
const v2 = BASE64[s[i++]];
|
||||
const v3 = BASE64[s[i++]];
|
||||
const v4 = BASE64[s[i++]];
|
||||
const v = (v1 << 18) | (v2 << 12) | (v3 << 6) | v4;
|
||||
bs[j++] = (v >> 16) & 255;
|
||||
if (v3 === void 0) break;
|
||||
bs[j++] = (v >> 8) & 255;
|
||||
if (v4 === void 0) break;
|
||||
bs[j++] = v & 255;
|
||||
}
|
||||
return Bytes.from(bs.subarray(0, j));
|
||||
}
|
||||
|
||||
function isSpace(s: string): boolean {
|
||||
return ' \t\n\r,'.indexOf(s) !== -1;
|
||||
}
|
|
@ -0,0 +1,101 @@
|
|||
import { GenericEmbedded } from "./embedded";
|
||||
import { is } from "./is";
|
||||
import { Value } from "./values";
|
||||
|
||||
export type Tuple<T> = Array<T> | [T];
|
||||
|
||||
export type Record<LabelType extends Value<T>, FieldsType extends Tuple<Value<T>>, T = GenericEmbedded>
|
||||
= FieldsType & { label: LabelType };
|
||||
|
||||
export type RecordGetters<Fs, R> = {
|
||||
[K in string & keyof Fs]: (r: R) => Fs[K];
|
||||
};
|
||||
|
||||
export type CtorTypes<Fs, Names extends Tuple<keyof Fs>> =
|
||||
{ [K in keyof Names]: Fs[keyof Fs & Names[K]] } & any[];
|
||||
|
||||
export interface RecordConstructor<L extends Value<T>, Fs, Names extends Tuple<keyof Fs>, T = GenericEmbedded> {
|
||||
(...fields: CtorTypes<Fs, Names>): Record<L, CtorTypes<Fs, Names>, T>;
|
||||
constructorInfo: RecordConstructorInfo<L, T>;
|
||||
isClassOf(v: any): v is Record<L, CtorTypes<Fs, Names>, T>;
|
||||
_: RecordGetters<Fs, Record<L, CtorTypes<Fs, Names>, T>>;
|
||||
};
|
||||
|
||||
export interface RecordConstructorInfo<L extends Value<T>, T = GenericEmbedded> {
|
||||
label: L;
|
||||
arity: number;
|
||||
}
|
||||
|
||||
export type InferredRecordType<L, FieldsType extends Tuple<any>> =
|
||||
L extends symbol ? (FieldsType extends Tuple<Value<infer T>>
|
||||
? (Exclude<T, never> extends symbol ? Record<L, FieldsType, never> : Record<L, FieldsType, T>)
|
||||
: (FieldsType extends Tuple<Value<never>>
|
||||
? Record<L, FieldsType, never>
|
||||
: "TYPE_ERROR_cannotInferFieldsType" & [never])) :
|
||||
L extends Value<infer T> ? (FieldsType extends Tuple<Value<T>>
|
||||
? Record<L, FieldsType, T>
|
||||
: "TYPE_ERROR_cannotMatchFieldsTypeToLabelType" & [never]) :
|
||||
"TYPE_ERROR_cannotInferEmbeddedType" & [never];
|
||||
|
||||
export function Record<L, FieldsType extends Tuple<any>>(
|
||||
label: L,
|
||||
fields: FieldsType): InferredRecordType<L, FieldsType>
|
||||
{
|
||||
(fields as any).label = label;
|
||||
return fields as any;
|
||||
}
|
||||
|
||||
export namespace Record {
|
||||
export function isRecord<L extends Value<T>, FieldsType extends Tuple<Value<T>>, T = GenericEmbedded>(x: any): x is Record<L, FieldsType, T> {
|
||||
return Array.isArray(x) && 'label' in x;
|
||||
}
|
||||
|
||||
export function fallbackToString (_f: Value<any>): string {
|
||||
return '<unprintable_preserves_field_value>';
|
||||
}
|
||||
|
||||
export function constructorInfo<L extends Value<T>, FieldsType extends Tuple<Value<T>>, T = GenericEmbedded>(
|
||||
r: Record<L, FieldsType, T>): RecordConstructorInfo<L, T>
|
||||
{
|
||||
return { label: r.label, arity: r.length };
|
||||
}
|
||||
|
||||
export function isClassOf<L extends Value<T>, FieldsType extends Tuple<Value<T>>, T = GenericEmbedded>(
|
||||
ci: RecordConstructorInfo<L, T>, v: any): v is Record<L, FieldsType, T>
|
||||
{
|
||||
return (Record.isRecord(v)) && is(ci.label, v.label) && (ci.arity === v.length);
|
||||
}
|
||||
|
||||
export function makeConstructor<Fs, T = GenericEmbedded>()
|
||||
: (<L extends Value<T>, Names extends Tuple<keyof Fs>>(label: L, fieldNames: Names) =>
|
||||
RecordConstructor<L, Fs, Names, T>)
|
||||
{
|
||||
return <L extends Value<T>, Names extends Tuple<keyof Fs>>(label: L, fieldNames: Names) => {
|
||||
const ctor: RecordConstructor<L, Fs, Names, T> =
|
||||
((...fields: CtorTypes<Fs, Names>) =>
|
||||
Record(label, fields)) as unknown as RecordConstructor<L, Fs, Names, T>;
|
||||
const constructorInfo = { label, arity: fieldNames.length };
|
||||
ctor.constructorInfo = constructorInfo;
|
||||
ctor.isClassOf = (v: any): v is Record<L, CtorTypes<Fs, Names>, T> => Record.isClassOf<L, CtorTypes<Fs, Names>, T>(constructorInfo, v);
|
||||
(ctor as any)._ = {};
|
||||
fieldNames.forEach((name, i) => (ctor._ as any)[name] = (r: Record<L, CtorTypes<Fs, Names>, T>) => r[i]);
|
||||
return ctor;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
Array.prototype.asPreservesText = function (): string {
|
||||
if ('label' in (this as any)) {
|
||||
const r = this as Record<Value, Tuple<Value>, GenericEmbedded>;
|
||||
return '<' + r.label.asPreservesText() + (r.length > 0 ? ' ': '') +
|
||||
r.map(f => {
|
||||
try {
|
||||
return f.asPreservesText();
|
||||
} catch (e) {
|
||||
return Record.fallbackToString(f);
|
||||
}
|
||||
}).join(' ') + '>';
|
||||
} else {
|
||||
return '[' + this.map(i => i.asPreservesText()).join(', ') + ']';
|
||||
}
|
||||
};
|
|
@ -0,0 +1,21 @@
|
|||
export * from './annotated';
|
||||
export * from './bytes';
|
||||
export * from './codec';
|
||||
export * from './compound';
|
||||
export * from './decoder';
|
||||
export * from './dictionary';
|
||||
export * from './embedded';
|
||||
export * from './embeddedTypes';
|
||||
export * from './encoder';
|
||||
export * from './flex';
|
||||
export * from './float';
|
||||
export * from './fold';
|
||||
export * from './fromjs';
|
||||
export * from './is';
|
||||
export * from './merge';
|
||||
export * from './reader';
|
||||
export * from './record';
|
||||
export * from './strip';
|
||||
export * from './symbols';
|
||||
export * from './text';
|
||||
export * from './values';
|
|
@ -0,0 +1,43 @@
|
|||
import { Value } from "./values";
|
||||
import { Annotated } from "./annotated";
|
||||
import { Record, Tuple } from "./record";
|
||||
import { Set, Dictionary } from "./dictionary";
|
||||
import type { GenericEmbedded } from "./embedded";
|
||||
|
||||
export function unannotate<T = GenericEmbedded>(v: Value<T>): Value<T> {
|
||||
return Annotated.isAnnotated<T>(v) ? v.item : v;
|
||||
}
|
||||
|
||||
export function peel<T = GenericEmbedded>(v: Value<T>): Value<T> {
|
||||
return strip(v, 1);
|
||||
}
|
||||
|
||||
export function strip<T = GenericEmbedded>(
|
||||
v: Value<T>,
|
||||
depth: number = Infinity): Value<T>
|
||||
{
|
||||
function step(v: Value<T>, depth: number): Value<T> {
|
||||
if (depth === 0) return v;
|
||||
if (!Annotated.isAnnotated<T>(v)) return v;
|
||||
|
||||
const nextDepth = depth - 1;
|
||||
function walk(v: Value<T>): Value<T> { return step(v, nextDepth); }
|
||||
|
||||
if (Record.isRecord<Value<T>, Tuple<Value<T>>, T>(v.item)) {
|
||||
return Record(step(v.item.label, depth), v.item.map(walk));
|
||||
} else if (Annotated.isAnnotated(v.item)) {
|
||||
throw new Error("Improper annotation structure");
|
||||
} else if (nextDepth === 0) {
|
||||
return v.item;
|
||||
} else if (Array.isArray(v.item)) {
|
||||
return (v.item as Value<T>[]).map(walk);
|
||||
} else if (Set.isSet<T>(v.item)) {
|
||||
return v.item.map(walk);
|
||||
} else if (Dictionary.isDictionary<T>(v.item)) {
|
||||
return v.item.mapEntries((e) => [walk(e[0]), walk(e[1])]);
|
||||
} else {
|
||||
return v.item;
|
||||
}
|
||||
}
|
||||
return step(v, depth);
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
// Symbols for various Preserves protocols.
|
||||
|
||||
export const PreserveOn = Symbol.for('PreserveOn');
|
||||
export const AsPreserve = Symbol.for('AsPreserve');
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
import type { Value } from './values';
|
||||
|
||||
export function stringify(x: any): string {
|
||||
if (typeof x?.asPreservesText === 'function') {
|
||||
return x.asPreservesText();
|
||||
} else {
|
||||
try {
|
||||
return JSON.stringify(x);
|
||||
} catch (_e) {
|
||||
return ('' + x).asPreservesText();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function preserves<T>(pieces: TemplateStringsArray, ...values: Value<T>[]): string {
|
||||
const result = [pieces[0]];
|
||||
values.forEach((v, i) => {
|
||||
result.push(stringify(v));
|
||||
result.push(pieces[i + 1]);
|
||||
});
|
||||
return result.join('');
|
||||
}
|
||||
|
||||
|
||||
declare global {
|
||||
interface Object { asPreservesText(): string; }
|
||||
}
|
||||
|
||||
Object.defineProperty(Object.prototype, 'asPreservesText', {
|
||||
enumerable: false,
|
||||
writable: true,
|
||||
value: function(): string {
|
||||
return JSON.stringify(this);
|
||||
}
|
||||
});
|
||||
|
||||
Boolean.prototype.asPreservesText = function (): string {
|
||||
return this ? '#t' : '#f';
|
||||
};
|
||||
|
||||
Number.prototype.asPreservesText = function (): string {
|
||||
return '' + this;
|
||||
};
|
||||
|
||||
String.prototype.asPreservesText = function (): string {
|
||||
return JSON.stringify(this);
|
||||
};
|
||||
|
||||
Symbol.prototype.asPreservesText = function (): string {
|
||||
// TODO: escaping
|
||||
return this.description ?? '||';
|
||||
};
|
|
@ -0,0 +1,31 @@
|
|||
// Preserves Values.
|
||||
|
||||
import type { Bytes } from './bytes';
|
||||
import type { DoubleFloat, SingleFloat } from './float';
|
||||
import type { Annotated } from './annotated';
|
||||
import type { Set, Dictionary } from './dictionary';
|
||||
import type { Embedded, GenericEmbedded } from './embedded';
|
||||
|
||||
export type Value<T = GenericEmbedded> =
|
||||
| Atom
|
||||
| Compound<T>
|
||||
| Embedded<T>
|
||||
| Annotated<T>;
|
||||
export type Atom =
|
||||
| boolean
|
||||
| SingleFloat
|
||||
| DoubleFloat
|
||||
| number
|
||||
| string
|
||||
| Bytes
|
||||
| symbol;
|
||||
export type Compound<T = GenericEmbedded> =
|
||||
| (Array<Value<T>> | [Value<T>]) & { label: Value<T> }
|
||||
// ^ expanded from definition of Record<> in record.ts,
|
||||
// because if we use Record<Value<T>, Tuple<Value<T>>, T>,
|
||||
// TypeScript currently complains about circular use of Value<T>,
|
||||
// and if we use Record<any, any, T>, it accepts it but collapses
|
||||
// Value<T> to any.
|
||||
| Array<Value<T>>
|
||||
| Set<T>
|
||||
| Dictionary<T>;
|
|
@ -0,0 +1,118 @@
|
|||
import { Bytes, decodeBase64, fromJS } from '../src/index';
|
||||
import './test-utils';
|
||||
|
||||
describe('immutable byte arrays', () => {
|
||||
describe('Uint8Array methods', () => {
|
||||
const bs = Bytes.of(10, 20, 30, 40);
|
||||
it('should yield entries', () => {
|
||||
expect(fromJS(Array.from(bs.entries())))
|
||||
.is(fromJS([[0,10],[1,20],[2,30],[3,40]]));
|
||||
});
|
||||
it('should implement every', () => {
|
||||
expect(bs.every((b) => !(b & 1))).toBe(true);
|
||||
expect(bs.every((b) => b !== 50)).toBe(true);
|
||||
expect(!(bs.every((b) => b !== 20))).toBe(true);
|
||||
});
|
||||
it('should implement find', () => {
|
||||
expect(bs.find((b) => b > 20)).toBe(30);
|
||||
expect(bs.find((b) => b > 50)).toBe(void 0);
|
||||
});
|
||||
it('should implement findIndex', () => {
|
||||
expect(bs.findIndex((b) => b > 20)).toBe(2);
|
||||
expect(bs.findIndex((b) => b > 50)).toBe(-1);
|
||||
});
|
||||
it('should implement forEach', () => {
|
||||
const vs: number[] = [];
|
||||
bs.forEach((b) => vs.push(b));
|
||||
expect(fromJS(vs)).is(fromJS([10, 20, 30, 40]));
|
||||
});
|
||||
it('should implement includes', () => {
|
||||
expect(bs.includes(20)).toBe(true);
|
||||
expect(!bs.includes(50)).toBe(true);
|
||||
});
|
||||
it('should implement indexOf', () => {
|
||||
expect(bs.indexOf(20)).toBe(1);
|
||||
expect(bs.indexOf(50)).toBe(-1);
|
||||
});
|
||||
it('should implement join', () => {
|
||||
expect(bs.join('-')).toBe('10-20-30-40');
|
||||
});
|
||||
it('should implement keys', () => {
|
||||
expect(fromJS(Array.from(bs.keys()))).is(fromJS([0,1,2,3]));
|
||||
});
|
||||
it('should implement values', () => {
|
||||
expect(fromJS(Array.from(bs.values()))).is(fromJS([10,20,30,40]));
|
||||
});
|
||||
it('should implement filter', () => {
|
||||
expect(bs.filter((b) => b !== 30)).is(Bytes.of(10,20,40));
|
||||
});
|
||||
it('should implement slice', () => {
|
||||
const vs = bs.slice(2);
|
||||
expect(Object.is(vs._view.buffer, bs._view.buffer)).toBe(false);
|
||||
expect(vs._view.buffer.byteLength).toBe(2);
|
||||
expect(vs.get(0)).toBe(30);
|
||||
expect(vs.get(1)).toBe(40);
|
||||
expect(vs.length).toBe(2);
|
||||
});
|
||||
it('should implement subarray', () => {
|
||||
const vs = bs.subarray(2);
|
||||
expect(Object.is(vs._view.buffer, bs._view.buffer)).toBe(true);
|
||||
expect(vs._view.buffer.byteLength).toBe(4);
|
||||
expect(vs.get(0)).toBe(30);
|
||||
expect(vs.get(1)).toBe(40);
|
||||
expect(vs.length).toBe(2);
|
||||
});
|
||||
it('should implement reverse', () => {
|
||||
const vs = bs.reverse();
|
||||
expect(Object.is(vs._view.buffer, bs._view.buffer)).toBe(false);
|
||||
expect(bs.get(0)).toBe(10);
|
||||
expect(bs.get(3)).toBe(40);
|
||||
expect(vs.get(0)).toBe(40);
|
||||
expect(vs.get(3)).toBe(10);
|
||||
});
|
||||
it('should implement sort', () => {
|
||||
const vs = bs.reverse().sort();
|
||||
expect(Object.is(vs._view.buffer, bs._view.buffer)).toBe(false);
|
||||
expect(bs.get(0)).toBe(10);
|
||||
expect(bs.get(3)).toBe(40);
|
||||
expect(vs.get(0)).toBe(10);
|
||||
expect(vs.get(3)).toBe(40);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('base64 decoder', () => {
|
||||
describe('RFC4648 tests', () => {
|
||||
it('10.0', () => expect(decodeBase64("")).is(Bytes.of()));
|
||||
it('10.1', () => expect(decodeBase64("Zg==")).is(Bytes.of(102)));
|
||||
it('10.2', () => expect(decodeBase64("Zm8=")).is(Bytes.of(102, 111)));
|
||||
it('10.3', () => expect(decodeBase64("Zm9v")).is(Bytes.of(102, 111, 111)));
|
||||
it('10.4', () => expect(decodeBase64("Zm9vYg==")).is(Bytes.of(102, 111, 111, 98)));
|
||||
it('10.5', () => expect(decodeBase64("Zm9vYmE=")).is(Bytes.of(102, 111, 111, 98, 97)));
|
||||
it('10.6', () => expect(decodeBase64("Zm9vYmFy")).is(Bytes.of(102, 111, 111, 98, 97, 114)));
|
||||
|
||||
it('10.1b', () => expect(decodeBase64("Zg")).is(Bytes.of(102)));
|
||||
it('10.2b', () => expect(decodeBase64("Zm8")).is(Bytes.of(102, 111)));
|
||||
it('10.4b', () => expect(decodeBase64("Zm9vYg")).is(Bytes.of(102, 111, 111, 98)));
|
||||
it('10.5b', () => expect(decodeBase64("Zm9vYmE")).is(Bytes.of(102, 111, 111, 98, 97)));
|
||||
});
|
||||
|
||||
describe('RFC4648 examples', () => {
|
||||
it('example0', () =>
|
||||
expect(decodeBase64('FPucA9l+')).is(Bytes.of(0x14, 0xfb, 0x9c, 0x03, 0xd9, 0x7e)));
|
||||
it('example1', () =>
|
||||
expect(decodeBase64('FPucA9k=')).is(Bytes.of(0x14, 0xfb, 0x9c, 0x03, 0xd9)));
|
||||
it('example1b', () =>
|
||||
expect(decodeBase64('FPucA9k')).is(Bytes.of(0x14, 0xfb, 0x9c, 0x03, 0xd9)));
|
||||
it('example2', () =>
|
||||
expect(decodeBase64('FPucAw==')).is(Bytes.of(0x14, 0xfb, 0x9c, 0x03)));
|
||||
it('example2b', () =>
|
||||
expect(decodeBase64('FPucAw=')).is(Bytes.of(0x14, 0xfb, 0x9c, 0x03)));
|
||||
it('example2c', () =>
|
||||
expect(decodeBase64('FPucAw')).is(Bytes.of(0x14, 0xfb, 0x9c, 0x03)));
|
||||
});
|
||||
|
||||
describe('Misc test cases', () => {
|
||||
it('gQ==', () => expect(decodeBase64('gQ==')).is(Bytes.of(0x81)));
|
||||
});
|
||||
});
|
|
@ -0,0 +1,335 @@
|
|||
import {
|
||||
Value,
|
||||
Dictionary,
|
||||
decode, decodeWithAnnotations, encode, encodeWithAnnotations, canonicalEncode,
|
||||
DecodeError, ShortPacket,
|
||||
Bytes, Record,
|
||||
annotate,
|
||||
strip, peel,
|
||||
preserves,
|
||||
fromJS,
|
||||
Constants,
|
||||
Encoder,
|
||||
GenericEmbedded,
|
||||
EncoderState,
|
||||
EmbeddedType,
|
||||
DecoderState,
|
||||
Decoder,
|
||||
Embedded,
|
||||
embed,
|
||||
genericEmbeddedTypeDecode,
|
||||
genericEmbeddedTypeEncode,
|
||||
} from '../src/index';
|
||||
const { Tag } = Constants;
|
||||
import './test-utils';
|
||||
|
||||
import * as fs from 'fs';
|
||||
|
||||
const _discard = Symbol.for('discard');
|
||||
const _capture = Symbol.for('capture');
|
||||
const _observe = Symbol.for('observe');
|
||||
const Discard = Record.makeConstructor<{}, GenericEmbedded>()(_discard, []);
|
||||
const Capture = Record.makeConstructor<{pattern: Value<GenericEmbedded>}, GenericEmbedded>()(_capture, ['pattern']);
|
||||
const Observe = Record.makeConstructor<{pattern: Value<GenericEmbedded>}, GenericEmbedded>()(_observe, ['pattern']);
|
||||
|
||||
describe('record constructors', () => {
|
||||
it('should have constructorInfo', () => {
|
||||
expect(Discard.constructorInfo.label).toEqual(Symbol.for('discard'));
|
||||
expect(Capture.constructorInfo.label).toEqual(Symbol.for('capture'));
|
||||
expect(Observe.constructorInfo.label).toEqual(Symbol.for('observe'));
|
||||
expect(Discard.constructorInfo.arity).toEqual(0);
|
||||
expect(Capture.constructorInfo.arity).toEqual(1);
|
||||
expect(Observe.constructorInfo.arity).toEqual(1);
|
||||
});
|
||||
})
|
||||
|
||||
describe('RecordConstructorInfo', () => {
|
||||
const C1 = Record.makeConstructor<{x: number, y: number}>()([1], ['x', 'y']);
|
||||
const C2 = Record.makeConstructor<{z: number, w: number}>()([1], ['z', 'w']);
|
||||
it('instance comparison should ignore embedded and fieldname differences', () => {
|
||||
expect(C1(9,9)).is(C2(9,9));
|
||||
expect(C1(9,9)).not.is(C2(9,8));
|
||||
});
|
||||
it('comparison based on embedded equality should not work', () => {
|
||||
expect(C1.constructorInfo).not.toBe(C2.constructorInfo);
|
||||
});
|
||||
it('comparison based on .equals should work', () => {
|
||||
expect(C1.constructorInfo).toEqual(C2.constructorInfo);
|
||||
});
|
||||
});
|
||||
|
||||
describe('records', () => {
|
||||
it('should have correct getConstructorInfo', () => {
|
||||
expect(Record.constructorInfo(Discard())).toEqual(Discard.constructorInfo);
|
||||
expect(Record.constructorInfo(Capture(Discard()))).toEqual(Capture.constructorInfo);
|
||||
expect(Record.constructorInfo(Observe(Capture(Discard())))).toEqual(Observe.constructorInfo);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parsing from subarray', () => {
|
||||
it('should maintain alignment of nextbytes', () => {
|
||||
const u = Uint8Array.of(1, 1, 1, 1, 0xb1, 0x03, 0x33, 0x33, 0x33);
|
||||
const bs = Bytes.from(u.subarray(4));
|
||||
expect(decode(bs)).is("333");
|
||||
});
|
||||
});
|
||||
|
||||
describe('reusing buffer space', () => {
|
||||
it('should be done safely, even with nested dictionaries', () => {
|
||||
expect(canonicalEncode(fromJS(['aaa', Dictionary.fromJS({a: 1}), 'zzz'])).toHex()).is(
|
||||
`b5
|
||||
b103616161
|
||||
b7
|
||||
b10161 91
|
||||
84
|
||||
b1037a7a7a
|
||||
84`.replace(/\s+/g, ''));
|
||||
});
|
||||
});
|
||||
|
||||
describe('encoding and decoding embeddeds', () => {
|
||||
class LookasideEmbeddedType implements EmbeddedType<object> {
|
||||
readonly objects: object[];
|
||||
|
||||
constructor(objects: object[]) {
|
||||
this.objects = objects;
|
||||
}
|
||||
|
||||
decode(d: DecoderState): object {
|
||||
return this.fromValue(new Decoder<GenericEmbedded>(d).next());
|
||||
}
|
||||
|
||||
encode(e: EncoderState, v: object): void {
|
||||
new Encoder(e).push(this.toValue(v));
|
||||
}
|
||||
|
||||
equals(a: object, b: object): boolean {
|
||||
return Object.is(a, b);
|
||||
}
|
||||
|
||||
fromValue(v: Value<GenericEmbedded>): object {
|
||||
if (typeof v !== 'number' || v < 0 || v >= this.objects.length) {
|
||||
throw new Error("Unknown embedded target");
|
||||
}
|
||||
return this.objects[v];
|
||||
}
|
||||
|
||||
toValue(v: object): number {
|
||||
let i = this.objects.indexOf(v);
|
||||
if (i !== -1) return i;
|
||||
this.objects.push(v);
|
||||
return this.objects.length - 1;
|
||||
}
|
||||
}
|
||||
|
||||
it('should encode using embeddedId when no function has been supplied', () => {
|
||||
const A1 = embed({a: 1});
|
||||
const A2 = embed({a: 1});
|
||||
const bs1 = canonicalEncode(A1);
|
||||
const bs2 = canonicalEncode(A2);
|
||||
const bs3 = canonicalEncode(A1);
|
||||
expect(bs1.get(0)).toBe(Tag.Embedded);
|
||||
expect(bs2.get(0)).toBe(Tag.Embedded);
|
||||
expect(bs3.get(0)).toBe(Tag.Embedded);
|
||||
// Can't really check the value assigned to the object. But we
|
||||
// can check that it's different to a similar object!
|
||||
expect(bs1).not.is(bs2);
|
||||
expect(bs1).is(bs3);
|
||||
});
|
||||
it('should refuse to decode embeddeds when no function has been supplied', () => {
|
||||
expect(() => decode(Bytes.from([Tag.Embedded, Tag.SmallInteger_lo])))
|
||||
.toThrow("Embeddeds not permitted at this point in Preserves document");
|
||||
});
|
||||
it('should encode properly', () => {
|
||||
const objects: object[] = [];
|
||||
const pt = new LookasideEmbeddedType(objects);
|
||||
const A = embed({a: 1});
|
||||
const B = embed({b: 2});
|
||||
expect(encode([A, B], { embeddedEncode: pt })).is(
|
||||
Bytes.from([Tag.Sequence,
|
||||
Tag.Embedded, Tag.SmallInteger_lo,
|
||||
Tag.Embedded, Tag.SmallInteger_lo + 1,
|
||||
Tag.End]));
|
||||
expect(objects).toEqual([A.embeddedValue, B.embeddedValue]);
|
||||
});
|
||||
it('should decode properly', () => {
|
||||
const objects: object[] = [];
|
||||
const pt = new LookasideEmbeddedType(objects);
|
||||
const X: Embedded<object> = embed({x: 123});
|
||||
const Y: Embedded<object> = embed({y: 456});
|
||||
objects.push(X.embeddedValue);
|
||||
objects.push(Y.embeddedValue);
|
||||
expect(decode(Bytes.from([
|
||||
Tag.Sequence,
|
||||
Tag.Embedded, Tag.SmallInteger_lo,
|
||||
Tag.Embedded, Tag.SmallInteger_lo + 1,
|
||||
Tag.End
|
||||
]), { embeddedDecode: pt })).is([X, Y]);
|
||||
});
|
||||
it('should store embeddeds embedded in map keys correctly', () => {
|
||||
const A1a = {a: 1};
|
||||
const A1: Embedded<object> = embed(A1a);
|
||||
const A2: Embedded<object> = embed({a: 1});
|
||||
const m = new Dictionary<object, number>();
|
||||
m.set([A1], 1);
|
||||
m.set([A2], 2);
|
||||
expect(m.get(A1)).toBeUndefined();
|
||||
expect(m.get([A1])).toBe(1);
|
||||
expect(m.get([A2])).toBe(2);
|
||||
expect(m.get([embed({a: 1})])).toBeUndefined();
|
||||
A1a.a = 3;
|
||||
expect(m.get([A1])).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('common test suite', () => {
|
||||
const samples_bin = fs.readFileSync(__dirname + '/../../../../../tests/samples.bin');
|
||||
const samples = decodeWithAnnotations(samples_bin, { embeddedDecode: genericEmbeddedTypeDecode });
|
||||
|
||||
const TestCases = Record.makeConstructor<{
|
||||
cases: Dictionary<GenericEmbedded>
|
||||
}>()(Symbol.for('TestCases'), ['cases']);
|
||||
type TestCases = ReturnType<typeof TestCases>;
|
||||
|
||||
function DS(bs: Bytes) {
|
||||
return decode(bs, { embeddedDecode: genericEmbeddedTypeDecode });
|
||||
}
|
||||
function D(bs: Bytes) {
|
||||
return decodeWithAnnotations(bs, { embeddedDecode: genericEmbeddedTypeDecode });
|
||||
}
|
||||
function E(v: Value<GenericEmbedded>) {
|
||||
return encodeWithAnnotations(v, { embeddedEncode: genericEmbeddedTypeEncode });
|
||||
}
|
||||
|
||||
interface ExpectedValues {
|
||||
[testName: string]: ({
|
||||
value: Value<GenericEmbedded>;
|
||||
} | {
|
||||
forward: Value<GenericEmbedded>;
|
||||
back: Value<GenericEmbedded>;
|
||||
});
|
||||
}
|
||||
|
||||
const expectedValues: ExpectedValues = {
|
||||
annotation1: { forward: annotate<GenericEmbedded>(9, "abc"),
|
||||
back: 9 },
|
||||
annotation2: { forward: annotate<GenericEmbedded>([[], annotate<GenericEmbedded>([], "x")],
|
||||
"abc",
|
||||
"def"),
|
||||
back: [[], []] },
|
||||
annotation3: { forward: annotate<GenericEmbedded>(5,
|
||||
annotate<GenericEmbedded>(2, 1),
|
||||
annotate<GenericEmbedded>(4, 3)),
|
||||
back: 5 },
|
||||
annotation5: {
|
||||
forward: annotate<GenericEmbedded>(
|
||||
Record<symbol, any>(Symbol.for('R'),
|
||||
[annotate<GenericEmbedded>(Symbol.for('f'),
|
||||
Symbol.for('af'))]),
|
||||
Symbol.for('ar')),
|
||||
back: Record<Value<GenericEmbedded>, any>(Symbol.for('R'), [Symbol.for('f')])
|
||||
},
|
||||
annotation6: {
|
||||
forward: Record<Value<GenericEmbedded>, any>(
|
||||
annotate<GenericEmbedded>(Symbol.for('R'),
|
||||
Symbol.for('ar')),
|
||||
[annotate<GenericEmbedded>(Symbol.for('f'),
|
||||
Symbol.for('af'))]),
|
||||
back: Record<symbol, any>(Symbol.for('R'), [Symbol.for('f')])
|
||||
},
|
||||
annotation7: {
|
||||
forward: annotate<GenericEmbedded>([], Symbol.for('a'), Symbol.for('b'), Symbol.for('c')),
|
||||
back: []
|
||||
},
|
||||
list1: {
|
||||
forward: [1, 2, 3, 4],
|
||||
back: [1, 2, 3, 4]
|
||||
},
|
||||
record2: {
|
||||
value: Observe(Record(Symbol.for("speak"), [
|
||||
Discard(),
|
||||
Capture(Discard())
|
||||
]))
|
||||
},
|
||||
};
|
||||
|
||||
type Variety = 'normal' | 'nondeterministic' | 'decode';
|
||||
|
||||
function runTestCase(variety: Variety,
|
||||
tName: string,
|
||||
binaryForm: Bytes,
|
||||
annotatedTextForm: Value<GenericEmbedded>)
|
||||
{
|
||||
describe(tName, () => {
|
||||
const textForm = strip(annotatedTextForm);
|
||||
const {forward, back} = (function () {
|
||||
const entry = expectedValues[tName] ?? {value: textForm};
|
||||
if ('value' in entry) {
|
||||
return {forward: entry.value, back: entry.value};
|
||||
} else if ('forward' in entry && 'back' in entry) {
|
||||
return entry;
|
||||
} else {
|
||||
throw new Error('Invalid expectedValues entry for ' + tName);
|
||||
}
|
||||
})();
|
||||
it('should match the expected value', () => expect(textForm).is(back));
|
||||
it('should round-trip', () => expect(DS(E(textForm))).is(back));
|
||||
it('should go forward', () => expect(DS(E(forward))).is(back));
|
||||
it('should go back', () => expect(DS(binaryForm)).is(back));
|
||||
it('should go back with annotations',
|
||||
() => expect(D(E(annotatedTextForm))).is(annotatedTextForm));
|
||||
if (variety !== 'decode' && variety !== 'nondeterministic') {
|
||||
it('should encode correctly', () => expect(E(forward)).is(binaryForm));
|
||||
it('should encode correctly with annotations',
|
||||
() => expect(E(annotatedTextForm)).is(binaryForm));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const tests = (peel(TestCases._.cases(peel(samples) as TestCases)) as
|
||||
Dictionary<GenericEmbedded>);
|
||||
tests.forEach((t0: Value<GenericEmbedded>, tName0: Value<GenericEmbedded>) => {
|
||||
const tName = Symbol.keyFor(strip(tName0) as symbol)!;
|
||||
const t = peel(t0) as Record<symbol, any, GenericEmbedded>;
|
||||
switch (t.label) {
|
||||
case Symbol.for('Test'):
|
||||
runTestCase('normal', tName, strip(t[0]) as Bytes, t[1]);
|
||||
break;
|
||||
case Symbol.for('NondeterministicTest'):
|
||||
runTestCase('nondeterministic', tName, strip(t[0]) as Bytes, t[1]);
|
||||
break;
|
||||
case Symbol.for('DecodeTest'):
|
||||
runTestCase('decode', tName, strip(t[0]) as Bytes, t[1]);
|
||||
break;
|
||||
case Symbol.for('DecodeError'):
|
||||
describe(tName, () => {
|
||||
it('should fail with DecodeError', () => {
|
||||
expect(() => D(strip(t[0]) as Bytes))
|
||||
.toThrowFilter(e =>
|
||||
DecodeError.isDecodeError(e) &&
|
||||
!ShortPacket.isShortPacket(e));
|
||||
});
|
||||
});
|
||||
break;
|
||||
case Symbol.for('DecodeEOF'): // fall through
|
||||
case Symbol.for('DecodeShort'):
|
||||
describe(tName, () => {
|
||||
it('should fail with ShortPacket', () => {
|
||||
expect(() => D(strip(t[0]) as Bytes))
|
||||
.toThrowFilter(e => ShortPacket.isShortPacket(e));
|
||||
});
|
||||
});
|
||||
break;
|
||||
case Symbol.for('ParseError'):
|
||||
case Symbol.for('ParseEOF'):
|
||||
case Symbol.for('ParseShort'):
|
||||
/* Skipped for now, until we have an implementation of text syntax */
|
||||
break;
|
||||
default:{
|
||||
const e = new Error(preserves`Unsupported test kind ${t}`);
|
||||
console.error(e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
|
@ -0,0 +1,31 @@
|
|||
import { Bytes, Decoder, genericEmbeddedType, encode, Reader } from '../src/index';
|
||||
import './test-utils';
|
||||
|
||||
import * as fs from 'fs';
|
||||
|
||||
describe('reading common test suite', () => {
|
||||
const samples_bin = fs.readFileSync(__dirname + '/../../../../../tests/samples.bin');
|
||||
const samples_pr = fs.readFileSync(__dirname + '/../../../../../tests/samples.pr', 'utf-8');
|
||||
|
||||
it('should read equal to decoded binary without annotations', () => {
|
||||
const s1 = new Reader(samples_pr, { embeddedDecode: genericEmbeddedType, includeAnnotations: false }).next();
|
||||
const s2 = new Decoder(samples_bin, { embeddedDecode: genericEmbeddedType, includeAnnotations: false }).next();
|
||||
expect(s1).is(s2);
|
||||
});
|
||||
|
||||
it('should read equal to decoded binary with annotations', () => {
|
||||
const s1 = new Reader(samples_pr, { embeddedDecode: genericEmbeddedType, includeAnnotations: true }).next();
|
||||
const s2 = new Decoder(samples_bin, { embeddedDecode: genericEmbeddedType, includeAnnotations: true }).next();
|
||||
expect(s1).is(s2);
|
||||
});
|
||||
|
||||
it('should read and encode back to binary with annotations', () => {
|
||||
const s = new Reader(samples_pr, { embeddedDecode: genericEmbeddedType, includeAnnotations: true }).next();
|
||||
const bs = Bytes.toIO(encode(s, {
|
||||
embeddedEncode: genericEmbeddedType,
|
||||
includeAnnotations: true,
|
||||
canonical: true,
|
||||
}));
|
||||
expect(bs).toEqual(new Uint8Array(samples_bin));
|
||||
});
|
||||
});
|
|
@ -0,0 +1,36 @@
|
|||
import { Value, is, preserves } from '../src/index';
|
||||
import '../src/node_support';
|
||||
|
||||
declare global {
|
||||
namespace jest {
|
||||
interface Matchers<R> {
|
||||
is<T>(expected: Value<T>): R;
|
||||
toThrowFilter(f: (e: Error) => boolean): R;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expect.extend({
|
||||
is(actual, expected) {
|
||||
return is(actual, expected)
|
||||
? { message: () => preserves`expected ${actual} not to be Preserves.is to ${expected}`,
|
||||
pass: true }
|
||||
: { message: () => preserves`expected ${actual} to be Preserves.is to ${expected}`,
|
||||
pass: false };
|
||||
},
|
||||
|
||||
toThrowFilter(thunk, f) {
|
||||
try {
|
||||
thunk();
|
||||
return { message: () => preserves`expected an exception`, pass: false };
|
||||
} catch (e) {
|
||||
if (f(e)) {
|
||||
return { message: () => preserves`expected an exception not matching the filter`,
|
||||
pass: true };
|
||||
} else {
|
||||
return { message: () => preserves`expected an exception matching the filter: ${e.constructor.name}`,
|
||||
pass: false };
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
|
@ -0,0 +1,44 @@
|
|||
import { Single, Double, fromJS, Dictionary, IDENTITY_FOLD, fold, mapEmbeddeds, Value, embed } from '../src/index';
|
||||
import './test-utils';
|
||||
|
||||
describe('Single', () => {
|
||||
it('should print reasonably', () => {
|
||||
expect(Single(123.45).toString()).toEqual("123.45f");
|
||||
});
|
||||
});
|
||||
|
||||
describe('Double', () => {
|
||||
it('should print reasonably', () => {
|
||||
expect(Double(123.45).toString()).toEqual("123.45");
|
||||
});
|
||||
});
|
||||
|
||||
describe('fold', () => {
|
||||
function mkv<T extends object>(t: T): Value<T> {
|
||||
return fromJS<T>([
|
||||
1,
|
||||
2,
|
||||
new Dictionary([[[3, 4], fromJS([5, 6])],
|
||||
['a', 1],
|
||||
['b', true]]),
|
||||
Single(3.4),
|
||||
t,
|
||||
]);
|
||||
}
|
||||
|
||||
it('should support identity', () => {
|
||||
const w = new Date();
|
||||
const v = mkv(w);
|
||||
expect(fold(v, IDENTITY_FOLD)).is(v);
|
||||
const w1 = new Date();
|
||||
const v1 = mkv(w1);
|
||||
expect(fold(v, IDENTITY_FOLD)).not.is(v1);
|
||||
expect(mapEmbeddeds(v, _t => embed(w1))).is(v1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('fromJS', () => {
|
||||
it('should map integers to themselves', () => {
|
||||
expect(fromJS(1)).toBe(1);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,16 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2017",
|
||||
"lib": ["es2019", "DOM"],
|
||||
"declaration": true,
|
||||
"baseUrl": "./src",
|
||||
"rootDir": "./src",
|
||||
"outDir": "./lib",
|
||||
"declarationDir": "./lib",
|
||||
"esModuleInterop": true,
|
||||
"moduleResolution": "node",
|
||||
"sourceMap": true,
|
||||
"strict": true
|
||||
},
|
||||
"include": ["src/**/*"]
|
||||
}
|
|
@ -0,0 +1,2 @@
|
|||
dist/
|
||||
lib/
|
|
@ -0,0 +1 @@
|
|||
version-tag-prefix javascript-@preserves/schema@
|
|
@ -0,0 +1,2 @@
|
|||
#!/usr/bin/env node
|
||||
require('../dist/bin/preserves-schema-ts.js').main(process.argv.slice(2));
|
|
@ -0,0 +1,2 @@
|
|||
#!/usr/bin/env node
|
||||
require('../dist/bin/preserves-schemac.js').main(process.argv.slice(2));
|
|
@ -0,0 +1,4 @@
|
|||
export default {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
};
|
|
@ -0,0 +1,39 @@
|
|||
{
|
||||
"name": "@preserves/schema",
|
||||
"version": "0.17.0",
|
||||
"description": "Schema support for Preserves data serialization format",
|
||||
"homepage": "https://gitlab.com/preserves/preserves",
|
||||
"license": "Apache-2.0",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"repository": "gitlab:preserves/preserves",
|
||||
"main": "dist/preserves-schema.js",
|
||||
"module": "lib/index.js",
|
||||
"types": "lib/index.d.ts",
|
||||
"author": "Tony Garnock-Jones <tonyg@leastfixedpoint.com>",
|
||||
"scripts": {
|
||||
"regenerate": "rm -rf ./src/gen && ./bin/preserves-schema-ts.js --output ./src/gen ../../../../schema/schema.prs",
|
||||
"clean": "rm -rf lib dist",
|
||||
"prepare": "tsc && rollup -c",
|
||||
"rollupwatch": "rollup -c -w",
|
||||
"test": "jest",
|
||||
"testwatch": "jest --watch",
|
||||
"veryclean": "yarn run clean && rm -rf node_modules",
|
||||
"watch": "tsc -w"
|
||||
},
|
||||
"bin": {
|
||||
"preserves-schema-ts": "./bin/preserves-schema-ts.js",
|
||||
"preserves-schemac": "./bin/preserves-schemac.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@preserves/core": "^0.17.0",
|
||||
"@types/glob": "^7.1.3",
|
||||
"@types/minimatch": "^3.0.3",
|
||||
"chalk": "^4.1.0",
|
||||
"chokidar": "^3.5.1",
|
||||
"commander": "^7.2.0",
|
||||
"glob": "^7.1.6",
|
||||
"minimatch": "^3.0.4"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,58 @@
|
|||
import { terser } from 'rollup-plugin-terser';
|
||||
|
||||
const distfile = (insertion) => `dist/preserves-schema${insertion}.js`;
|
||||
|
||||
function umd(insertion, extra) {
|
||||
return {
|
||||
file: distfile(insertion),
|
||||
format: 'umd',
|
||||
name: 'PreservesSchema',
|
||||
globals: {
|
||||
'@preserves/core': 'Preserves',
|
||||
},
|
||||
... (extra || {})
|
||||
};
|
||||
}
|
||||
|
||||
function es6(insertion, extra) {
|
||||
return {
|
||||
file: distfile('.es6' + insertion),
|
||||
format: 'es',
|
||||
globals: {
|
||||
'@preserves/core': 'Preserves',
|
||||
},
|
||||
... (extra || {}),
|
||||
};
|
||||
}
|
||||
|
||||
function cli(name) {
|
||||
return {
|
||||
input: `lib/bin/${name}.js`,
|
||||
output: [{file: `dist/bin/${name}.js`, format: 'commonjs'}],
|
||||
external: [
|
||||
'@preserves/core',
|
||||
'chalk',
|
||||
'chokidar',
|
||||
'fs',
|
||||
'glob',
|
||||
'minimatch',
|
||||
'path',
|
||||
'commander',
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
export default [
|
||||
{
|
||||
input: 'lib/index.js',
|
||||
output: [
|
||||
umd(''),
|
||||
umd('.min', { plugins: [terser()] }),
|
||||
es6(''),
|
||||
es6('.min', { plugins: [terser()] }),
|
||||
],
|
||||
external: ['@preserves/core'],
|
||||
},
|
||||
cli('preserves-schema-ts'),
|
||||
cli('preserves-schemac'),
|
||||
];
|
|
@ -0,0 +1,99 @@
|
|||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { glob } from 'glob';
|
||||
import { formatPosition, Position } from '@preserves/core';
|
||||
import { readSchema } from '../reader';
|
||||
import chalk from 'chalk';
|
||||
import * as M from '../meta';
|
||||
|
||||
export interface Diagnostic {
|
||||
type: 'warn' | 'error';
|
||||
file: string | null;
|
||||
detail: Error | { message: string, pos: Position | null };
|
||||
};
|
||||
|
||||
export type Expanded = {
|
||||
base: string,
|
||||
inputFiles: Array<{
|
||||
inputFilePath: string,
|
||||
text: string,
|
||||
baseRelPath: string,
|
||||
modulePath: M.ModulePath,
|
||||
schema: M.Schema,
|
||||
}>,
|
||||
failures: Array<Diagnostic>,
|
||||
};
|
||||
|
||||
export function computeBase(paths: string[]): string {
|
||||
if (paths.length === 0) {
|
||||
return '';
|
||||
} else if (paths.length === 1) {
|
||||
const d = path.dirname(paths[0]);
|
||||
return (d === '.') ? '' : d + '/';
|
||||
} else {
|
||||
let i = 0;
|
||||
while (true) {
|
||||
let ch: string | null = null
|
||||
for (const p of paths) {
|
||||
if (i >= p.length) return p.slice(0, i);
|
||||
if (ch === null) ch = p[i];
|
||||
if (p[i] !== ch) return p.slice(0, i);
|
||||
}
|
||||
i++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function expandInputGlob(input: string[], base0: string | undefined): Expanded {
|
||||
const matches = input.flatMap(i => glob.sync(i));
|
||||
const base = base0 ?? computeBase(matches);
|
||||
const failures: Array<Diagnostic> = [];
|
||||
|
||||
return {
|
||||
base,
|
||||
inputFiles: matches.flatMap(inputFilePath => {
|
||||
if (!inputFilePath.startsWith(base)) {
|
||||
throw new Error(`Input filename ${inputFilePath} falls outside base ${base}`);
|
||||
}
|
||||
try {
|
||||
const text = fs.readFileSync(inputFilePath, 'utf-8');
|
||||
const baseRelPath = inputFilePath.slice(base.length);
|
||||
const modulePath = baseRelPath.split('/').map(p => p.split('.')[0]).map(Symbol.for);
|
||||
const schema = readSchema(text, {
|
||||
name: inputFilePath,
|
||||
readInclude(includePath: string): string {
|
||||
return fs.readFileSync(
|
||||
path.resolve(path.dirname(inputFilePath), includePath),
|
||||
'utf-8');
|
||||
},
|
||||
});
|
||||
return [{ inputFilePath, text, baseRelPath, modulePath, schema }];
|
||||
} catch (e) {
|
||||
failures.push({ type: 'error', file: inputFilePath, detail: e });
|
||||
return [];
|
||||
}
|
||||
}),
|
||||
failures,
|
||||
};
|
||||
}
|
||||
|
||||
export function changeExt(p: string, newext: string): string {
|
||||
return p.slice(0, -path.extname(p).length) + newext;
|
||||
}
|
||||
|
||||
export function formatFailures(failures: Array<Diagnostic>, traceback = false): void {
|
||||
for (const d of failures) {
|
||||
console.error(
|
||||
(d.type === 'error' ? chalk.redBright('[ERROR]') : chalk.yellowBright('[WARNING]'))
|
||||
+ ' '
|
||||
+ chalk.blueBright(formatPosition((d.detail as any).pos ?? d.file))
|
||||
+ ': '
|
||||
+ d.detail.message
|
||||
+ (traceback && (d.detail instanceof Error)
|
||||
? '\n' + d.detail.stack
|
||||
: ''));
|
||||
}
|
||||
if (failures.length > 0) {
|
||||
console.error();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,178 @@
|
|||
import { compile } from '../index';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import minimatch from 'minimatch';
|
||||
import { Command } from 'commander';
|
||||
import * as M from '../meta';
|
||||
import chalk from 'chalk';
|
||||
import { Position } from '@preserves/core';
|
||||
import chokidar from 'chokidar';
|
||||
import { changeExt, Diagnostic, expandInputGlob, formatFailures } from './cli-utils';
|
||||
|
||||
export type CommandLineArguments = {
|
||||
inputs: string[];
|
||||
base: string | undefined;
|
||||
output: string | undefined;
|
||||
stdout: boolean;
|
||||
core: string;
|
||||
watch: boolean;
|
||||
traceback: boolean;
|
||||
module: string[];
|
||||
};
|
||||
|
||||
export type CompilationResult = {
|
||||
options: CommandLineArguments,
|
||||
inputFiles: Array<InputFile>,
|
||||
failures: Array<Diagnostic>,
|
||||
base: string,
|
||||
output: string,
|
||||
};
|
||||
|
||||
export type InputFile = {
|
||||
inputFilePath: string,
|
||||
outputFilePath: string,
|
||||
schemaPath: M.ModulePath,
|
||||
schema: M.Schema,
|
||||
};
|
||||
|
||||
function failureCount(type: 'warn' | 'error', r: CompilationResult): number {
|
||||
return r.failures.filter(f => f.type === type).length;
|
||||
}
|
||||
|
||||
export function run(options: CommandLineArguments): void {
|
||||
if (!options.watch) {
|
||||
if (failureCount('error', runOnce(options)) > 0) {
|
||||
process.exit(1);
|
||||
}
|
||||
} else {
|
||||
function runWatch() {
|
||||
console.clear();
|
||||
console.log(chalk.gray(new Date().toISOString()) +
|
||||
' Compiling Schemas in watch mode...\n');
|
||||
const r = runOnce(options);
|
||||
const warningCount = failureCount('warn', r);
|
||||
const errorCount = failureCount('error', r);
|
||||
const wMsg = (warningCount > 0) && chalk.yellowBright(`${warningCount} warning(s)`);
|
||||
const eMsg = (errorCount > 0) && chalk.redBright(`${errorCount} error(s)`);
|
||||
const errorSummary =
|
||||
(wMsg && eMsg) ? `with ${eMsg} and ${wMsg}` :
|
||||
(wMsg) ? `with ${wMsg}` :
|
||||
(eMsg) ? `with ${eMsg}` :
|
||||
chalk.greenBright('successfully');
|
||||
console.log(chalk.gray(new Date().toISOString()) +
|
||||
` Processed ${r.inputFiles.length} file(s) ${errorSummary}. Waiting for changes.`);
|
||||
const watcher = chokidar.watch(r.base, {
|
||||
ignoreInitial: true,
|
||||
}).on('all', (_event, filename) => {
|
||||
if (options.inputs.some(i => minimatch(filename, i))) {
|
||||
watcher.close();
|
||||
runWatch();
|
||||
}
|
||||
});
|
||||
}
|
||||
runWatch();
|
||||
}
|
||||
}
|
||||
|
||||
export function modulePathTo(file1: string, file2: string): string {
|
||||
let naive = path.relative(path.dirname(file1), file2);
|
||||
if (naive[0] !== '.' && naive[0] !== '/') naive = './' + naive;
|
||||
return changeExt(naive, '');
|
||||
}
|
||||
|
||||
export function runOnce(options: CommandLineArguments): CompilationResult {
|
||||
const { base, failures, inputFiles: inputFiles0 } =
|
||||
expandInputGlob(options.inputs, options.base);
|
||||
const output = options.output ?? base;
|
||||
|
||||
const extensionEnv: M.Environment = options.module.map(arg => {
|
||||
const i = arg.indexOf('=');
|
||||
if (i === -1) throw new Error(`--module argument must be Namespace=path: ${arg}`);
|
||||
const ns = arg.slice(0, i);
|
||||
const path = arg.slice(i + 1);
|
||||
return {
|
||||
schema: null,
|
||||
schemaModulePath: ns.split('.').map(Symbol.for),
|
||||
typescriptModulePath: path,
|
||||
};
|
||||
});
|
||||
|
||||
const inputFiles: Array<InputFile> = inputFiles0.map(i => {
|
||||
const { inputFilePath, baseRelPath, modulePath, schema } = i;
|
||||
const outputFilePath = path.join(output, changeExt(baseRelPath, '.ts'));
|
||||
return { inputFilePath, outputFilePath, schemaPath: modulePath, schema };
|
||||
});
|
||||
|
||||
inputFiles.forEach(c => {
|
||||
const env: M.Environment = [
|
||||
... extensionEnv.flatMap(e => {
|
||||
const p = modulePathTo(c.outputFilePath, e.typescriptModulePath);
|
||||
if (p === null) return [];
|
||||
return [{... e, typescriptModulePath: p}];
|
||||
}),
|
||||
... inputFiles.map(cc => ({
|
||||
schema: cc.schema,
|
||||
schemaModulePath: cc.schemaPath,
|
||||
typescriptModulePath: modulePathTo(c.outputFilePath, cc.outputFilePath),
|
||||
})),
|
||||
];
|
||||
fs.mkdirSync(path.dirname(c.outputFilePath), { recursive: true });
|
||||
let compiledModule;
|
||||
try {
|
||||
compiledModule = compile(env, c.schemaPath, c.schema, {
|
||||
preservesModule: options.core,
|
||||
warn: (message: string, pos: Position | null) =>
|
||||
failures.push({ type: 'warn', file: c.inputFilePath, detail: { message, pos } }),
|
||||
});
|
||||
} catch (e) {
|
||||
failures.push({ type: 'error', file: c.inputFilePath, detail: e });
|
||||
}
|
||||
if (compiledModule !== void 0) {
|
||||
if (options.stdout) {
|
||||
console.log('////------------------------------------------------------------');
|
||||
console.log('//// ' + c.outputFilePath);
|
||||
console.log();
|
||||
console.log(compiledModule);
|
||||
} else {
|
||||
fs.writeFileSync(c.outputFilePath, compiledModule, 'utf-8');
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
formatFailures(failures, options.traceback);
|
||||
|
||||
return { options, inputFiles, failures, base, output };
|
||||
}
|
||||
|
||||
export function main(argv: Array<string>) {
|
||||
new Command()
|
||||
.arguments('[input...]')
|
||||
.description('Compile Preserves schema definitions to TypeScript', {
|
||||
input: 'Input filename or glob',
|
||||
})
|
||||
.option('--output <directory>', 'Output directory for modules (default: next to sources)')
|
||||
.option('--stdout', 'Prints each module to stdout one after the other instead ' +
|
||||
'of writing them to files in the `--output` directory')
|
||||
.option('--base <directory>', 'Base directory for sources (default: common prefix)')
|
||||
.option('--core <path>', 'Import path for @preserves/core', '@preserves/core')
|
||||
.option('--watch', 'Watch base directory for changes')
|
||||
.option('--traceback', 'Include stack traces in compiler errors')
|
||||
.option('--module <namespace=path>', 'Additional Namespace=path import',
|
||||
(nsPath: string, previous: string[]): string[] => [... previous, nsPath],
|
||||
[])
|
||||
.action((inputs: string[], rawOptions) => {
|
||||
const options: CommandLineArguments = {
|
||||
inputs: inputs.map(i => path.normalize(i)),
|
||||
base: rawOptions.base,
|
||||
output: rawOptions.output,
|
||||
stdout: rawOptions.stdout,
|
||||
core: rawOptions.core,
|
||||
watch: rawOptions.watch,
|
||||
traceback: rawOptions.traceback,
|
||||
module: rawOptions.module,
|
||||
};
|
||||
Error.stackTraceLimit = Infinity;
|
||||
run(options);
|
||||
})
|
||||
.parse(argv, { from: 'user' });
|
||||
}
|
|
@ -0,0 +1,58 @@
|
|||
import { Command } from 'commander';
|
||||
import { canonicalEncode, KeyedDictionary, underlying } from '@preserves/core';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import * as M from '../meta';
|
||||
import { expandInputGlob, formatFailures } from './cli-utils';
|
||||
|
||||
export type CommandLineArguments = {
|
||||
inputs: string[];
|
||||
base: string | undefined;
|
||||
bundle: boolean;
|
||||
};
|
||||
|
||||
export function run(options: CommandLineArguments): void {
|
||||
const { failures, inputFiles } = expandInputGlob(options.inputs, options.base);
|
||||
|
||||
if (!options.bundle && inputFiles.length !== 1) {
|
||||
failures.push({ type: 'error', file: null, detail: {
|
||||
message: 'Cannot emit non-bundle with anything other than exactly one input file',
|
||||
pos: null,
|
||||
}});
|
||||
}
|
||||
|
||||
formatFailures(failures);
|
||||
|
||||
if (failures.length === 0) {
|
||||
if (options.bundle) {
|
||||
fs.writeSync(1, underlying(canonicalEncode(M.fromBundle({
|
||||
modules: new KeyedDictionary<M.ModulePath, M.Schema, M.InputEmbedded>(
|
||||
inputFiles.map(i => [i.modulePath, i.schema])),
|
||||
}))));
|
||||
} else {
|
||||
fs.writeSync(1, underlying(canonicalEncode(M.fromSchema(inputFiles[0].schema))));
|
||||
}
|
||||
} else {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
export function main(argv: Array<string>) {
|
||||
new Command()
|
||||
.arguments('[input...]')
|
||||
.description('Compile textual Preserves schema definitions to binary format', {
|
||||
input: 'Input filename or glob',
|
||||
})
|
||||
.option('--no-bundle', 'Emit a single Schema instead of a schema Bundle')
|
||||
.option('--base <directory>', 'Base directory for sources (default: common prefix)')
|
||||
.action((inputs: string[], rawOptions) => {
|
||||
const options: CommandLineArguments = {
|
||||
inputs: inputs.map(i => path.normalize(i)),
|
||||
base: rawOptions.base,
|
||||
bundle: rawOptions.bundle,
|
||||
};
|
||||
Error.stackTraceLimit = Infinity;
|
||||
run(options);
|
||||
})
|
||||
.parse(argv, { from: 'user' });
|
||||
}
|
|
@ -0,0 +1,130 @@
|
|||
import * as M from './meta';
|
||||
|
||||
export function checkSchema(schema: M.Schema): (
|
||||
{ ok: true, schema: M.Schema } | { ok: false, problems: Array<string> })
|
||||
{
|
||||
const checker = new Checker();
|
||||
schema.definitions.forEach(checker.checkDefinition.bind(checker));
|
||||
if (checker.problems.length > 0) {
|
||||
return { ok: false, problems: checker.problems };
|
||||
} else {
|
||||
return { ok: true, schema };
|
||||
}
|
||||
}
|
||||
|
||||
enum ValueAvailability {
|
||||
AVAILABLE,
|
||||
NOT_AVAILABLE,
|
||||
};
|
||||
|
||||
class Checker {
|
||||
problems: Array<string> = [];
|
||||
|
||||
recordProblem(context: string, detail: string): void {
|
||||
this.problems.push(`${detail} in ${context}`);
|
||||
}
|
||||
|
||||
checkBinding(scope: Set<string>, sym: symbol, context: string): void {
|
||||
const name = sym.description!;
|
||||
if (scope.has(name)) {
|
||||
this.recordProblem(context, `duplicate binding named ${JSON.stringify(name)}`);
|
||||
}
|
||||
if (!M.isValidToken(name)) {
|
||||
this.recordProblem(context, `invalid binding name ${JSON.stringify(name)}`);
|
||||
}
|
||||
scope.add(name);
|
||||
}
|
||||
|
||||
checkDefinition(def: M.Definition, name: symbol): void {
|
||||
switch (def._variant) {
|
||||
case 'or': {
|
||||
const labels = new Set<string>();
|
||||
[def.pattern0, def.pattern1, ... def.patternN].forEach(({ variantLabel, pattern }) => {
|
||||
const context = `variant ${variantLabel} of ${name.description!}`;
|
||||
if (labels.has(variantLabel)) {
|
||||
this.recordProblem(context, `duplicate variant label`);
|
||||
}
|
||||
if (!M.isValidToken(variantLabel)) {
|
||||
this.recordProblem(context, `invalid variant label`);
|
||||
}
|
||||
labels.add(variantLabel);
|
||||
this.checkPattern(new Set(), pattern, context, ValueAvailability.AVAILABLE);
|
||||
});
|
||||
break;
|
||||
}
|
||||
case 'and': {
|
||||
const ps = [def.pattern0, def.pattern1, ... def.patternN];
|
||||
const scope = new Set<string>();
|
||||
ps.forEach((p) => this.checkNamedPattern(scope, p, name.description!));
|
||||
break;
|
||||
}
|
||||
case 'Pattern':
|
||||
this.checkPattern(
|
||||
new Set(), def.value, name.description!, ValueAvailability.AVAILABLE);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
checkNamedPattern(scope: Set<string>, p: M.NamedPattern, context: string): void
|
||||
{
|
||||
switch (p._variant) {
|
||||
case 'named': {
|
||||
this.checkBinding(scope, p.value.name, context);
|
||||
this.checkPattern(scope,
|
||||
M.Pattern.SimplePattern(p.value.pattern),
|
||||
`${JSON.stringify(p.value.name.description!)} of ${context}`,
|
||||
ValueAvailability.AVAILABLE);
|
||||
break;
|
||||
}
|
||||
case 'anonymous':
|
||||
this.checkPattern(scope, p.value, context, ValueAvailability.NOT_AVAILABLE);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
checkPattern(scope: Set<string>,
|
||||
p: M.Pattern,
|
||||
context: string,
|
||||
availability: ValueAvailability): void
|
||||
{
|
||||
switch (p._variant) {
|
||||
case 'SimplePattern':
|
||||
if (p.value._variant !== 'lit' && availability === ValueAvailability.NOT_AVAILABLE) {
|
||||
this.recordProblem(context, 'cannot recover serialization of non-literal pattern');
|
||||
}
|
||||
if (p.value._variant === 'Ref' &&
|
||||
!(M.isValidToken(p.value.value.name.description!) &&
|
||||
p.value.value.module.every(n => M.isValidToken(n.description!))))
|
||||
{
|
||||
this.recordProblem(context, 'invalid reference name');
|
||||
}
|
||||
break;
|
||||
case 'CompoundPattern':
|
||||
((p: M.CompoundPattern): void => {
|
||||
switch (p._variant) {
|
||||
case 'rec':
|
||||
this.checkNamedPattern(scope, p.label, `label of ${context}`);
|
||||
this.checkNamedPattern(scope, p.fields, `fields of ${context}`);
|
||||
break;
|
||||
case 'tuple':
|
||||
p.patterns.forEach((pp, i) =>
|
||||
this.checkNamedPattern(scope, pp, `item ${i} of ${context}`));
|
||||
break;
|
||||
case 'tuplePrefix':
|
||||
p.fixed.forEach((pp, i) =>
|
||||
this.checkNamedPattern(scope, pp, `item ${i} of ${context}`));
|
||||
this.checkNamedPattern(
|
||||
scope, M.promoteNamedSimplePattern(p.variable), `tail of ${context}`);
|
||||
break;
|
||||
case 'dict':
|
||||
p.entries.forEach((np, key) =>
|
||||
this.checkNamedPattern(
|
||||
scope,
|
||||
M.promoteNamedSimplePattern(np),
|
||||
`entry ${key.asPreservesText()} in dictionary in ${context}`));
|
||||
break;
|
||||
}
|
||||
})(p.value);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,99 @@
|
|||
import { stringify } from "@preserves/core";
|
||||
import * as M from "./meta";
|
||||
import { CompilerOptions, ModuleContext } from "./compiler/context";
|
||||
import { Formatter, block, seq } from "./compiler/block";
|
||||
import { typeForDefinition } from "./compiler/gentype";
|
||||
import { converterForDefinition } from "./compiler/genconverter";
|
||||
import { renderType } from "./compiler/rendertype";
|
||||
import { genConstructor } from "./compiler/genctor";
|
||||
import { unconverterForDefinition } from "./compiler/genunconverter";
|
||||
import { sourceCodeFor } from "./compiler/value";
|
||||
|
||||
export function compile(
|
||||
env: M.Environment,
|
||||
modulePath: M.ModulePath,
|
||||
schema: M.Schema,
|
||||
options: CompilerOptions = {},
|
||||
): string {
|
||||
const mod = new ModuleContext(env, modulePath, schema, options);
|
||||
|
||||
const embeddedName = schema.embeddedType;
|
||||
if (embeddedName._variant !== 'false') {
|
||||
mod.defineType(seq(`export type _embedded = `, mod.embeddedType, `;`));
|
||||
}
|
||||
|
||||
for (const [name, def] of schema.definitions) {
|
||||
const t = typeForDefinition(mod.resolver(), def);
|
||||
const nameStr = stringify(name);
|
||||
const resultTypeItem = nameStr + mod.genericArgsFor(t);
|
||||
|
||||
mod.defineType(seq(`export type ${nameStr}`, mod.genericParametersFor(t),
|
||||
` = `, renderType(mod, t), `;`));
|
||||
|
||||
if (t.kind === 'union') {
|
||||
mod.defineFunction(_ctx =>
|
||||
seq(`export namespace ${nameStr} `, block(
|
||||
... Array.from(t.variants).map(([vn, vt]) =>
|
||||
genConstructor(mod, vn, vn, vt, t, resultTypeItem))
|
||||
)));
|
||||
} else {
|
||||
mod.defineFunction(_ctx =>
|
||||
genConstructor(mod, nameStr, void 0, t, t, resultTypeItem));
|
||||
}
|
||||
}
|
||||
|
||||
for (const [name0, def] of schema.definitions) {
|
||||
const t = typeForDefinition(mod.resolver(), def);
|
||||
const name = name0 as symbol;
|
||||
|
||||
mod.defineFunction(ctx =>
|
||||
seq(`export function as${name.description!}`, mod.genericParameters(),
|
||||
`(v: _.Value<_embedded>): `, name.description!, mod.genericArgsFor(t), ` `,
|
||||
ctx.block(() => [
|
||||
seq(`let result = to${name.description!}(v)`),
|
||||
seq(`if (result === void 0) `,
|
||||
`throw new TypeError(\`Invalid ${name.description!}: \${_.stringify(v)}\`)`),
|
||||
seq(`return result`)])));
|
||||
|
||||
mod.defineFunction(ctx =>
|
||||
seq(`export function to${name.description!}`, mod.genericParameters(),
|
||||
`(v: _.Value<_embedded>): undefined | `, name.description!, mod.genericArgsFor(t), ` `,
|
||||
ctx.block(() => [seq(`let result: undefined | `, name.description!, mod.genericArgsFor(t)),
|
||||
... converterForDefinition(ctx, def, `v`, `result`),
|
||||
seq(`return result`)])));
|
||||
|
||||
mod.defineFunction(ctx =>
|
||||
seq(`export function from${name.description!}`, mod.genericParameters(),
|
||||
`(_v: `, name.description!, mod.genericArgsFor(t), `): _.Value<_embedded> `,
|
||||
ctx.block(() => unconverterForDefinition(ctx, def, `_v`))));
|
||||
}
|
||||
|
||||
const f = new Formatter();
|
||||
f.write(`import * as _ from ${JSON.stringify(options.preservesModule ?? '@preserves/core')};\n`);
|
||||
mod.imports.forEach(([identifier, path]) => {
|
||||
f.write(`import * as ${identifier} from ${JSON.stringify(path)};\n`);
|
||||
});
|
||||
f.newline();
|
||||
|
||||
const sortedLiterals = Array.from(mod.literals);
|
||||
sortedLiterals.sort((a, b) => a[1] < b[1] ? -1 : a[1] === b[1] ? 0 : 1);
|
||||
for (const [lit, varname] of sortedLiterals) {
|
||||
f.write(seq(`export const ${varname} = `, sourceCodeFor(lit), `;\n`));
|
||||
}
|
||||
f.newline();
|
||||
|
||||
mod.typedefs.forEach(t => {
|
||||
f.write(t);
|
||||
f.newline();
|
||||
f.newline();
|
||||
});
|
||||
f.newline();
|
||||
|
||||
mod.functiondefs.forEach(p => {
|
||||
f.write(p);
|
||||
f.newline();
|
||||
f.newline();
|
||||
});
|
||||
|
||||
return f.toString();
|
||||
}
|
|
@ -0,0 +1,177 @@
|
|||
export type Item = Emittable | string;
|
||||
|
||||
export const DEFAULT_WIDTH = 80;
|
||||
|
||||
export class Formatter {
|
||||
width = DEFAULT_WIDTH;
|
||||
indentDelta = ' ';
|
||||
currentIndent = '\n';
|
||||
buffer: Array<string> = [];
|
||||
|
||||
get indentSize(): number { return this.indentDelta.length; }
|
||||
set indentSize(n: number) { this.indentDelta = new Array(n + 1).join(' '); }
|
||||
|
||||
write(i: Item) {
|
||||
if (typeof i === 'string') {
|
||||
this.buffer.push(i);
|
||||
} else {
|
||||
i.writeOn(this);
|
||||
}
|
||||
}
|
||||
|
||||
newline() {
|
||||
this.write(this.currentIndent);
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return this.buffer.join('');
|
||||
}
|
||||
|
||||
withIndent(f: () => void): void {
|
||||
const oldIndent = this.currentIndent;
|
||||
try {
|
||||
this.currentIndent = this.currentIndent + this.indentDelta;
|
||||
f();
|
||||
} finally {
|
||||
this.currentIndent = oldIndent;
|
||||
}
|
||||
}
|
||||
|
||||
clone(): Formatter {
|
||||
const f = Object.assign(new Formatter(), this);
|
||||
f.buffer = [];
|
||||
return f;
|
||||
}
|
||||
}
|
||||
|
||||
export function formatItems(i: Item[], width = DEFAULT_WIDTH): string {
|
||||
const f = new Formatter();
|
||||
f.width = width;
|
||||
i.forEach(i => f.write(i));
|
||||
return f.toString();
|
||||
}
|
||||
|
||||
export interface Emittable {
|
||||
writeOn(f: Formatter): void;
|
||||
}
|
||||
|
||||
export class Sequence implements Emittable {
|
||||
items: Array<Item>;
|
||||
|
||||
constructor(items: Array<Item>) {
|
||||
if (items.some(i => i === void 0)) throw new Error('aiee');
|
||||
this.items = items;
|
||||
}
|
||||
|
||||
get separator(): string { return ''; }
|
||||
get terminator(): string { return ''; }
|
||||
|
||||
writeOn(f: Formatter): void {
|
||||
let needSeparator = false;
|
||||
this.items.forEach(i => {
|
||||
if (needSeparator) {
|
||||
f.write(this.separator);
|
||||
} else {
|
||||
needSeparator = true;
|
||||
}
|
||||
f.write(i);
|
||||
});
|
||||
f.write(this.terminator);
|
||||
}
|
||||
}
|
||||
|
||||
export class CommaSequence extends Sequence {
|
||||
get separator(): string { return ', '; }
|
||||
}
|
||||
|
||||
export abstract class Grouping extends CommaSequence {
|
||||
abstract get open(): string;
|
||||
abstract get close(): string;
|
||||
|
||||
writeHorizontally(f: Formatter): void {
|
||||
f.write(this.open);
|
||||
super.writeOn(f);
|
||||
f.write(this.close);
|
||||
}
|
||||
|
||||
writeVertically(f: Formatter): void {
|
||||
f.write(this.open);
|
||||
if (this.items.length > 0) {
|
||||
f.withIndent(() => {
|
||||
this.items.forEach((i, index) => {
|
||||
f.newline();
|
||||
f.write(i);
|
||||
const delim = index === this.items.length - 1 ? this.terminator : this.separator;
|
||||
f.write(delim.trimRight());
|
||||
});
|
||||
});
|
||||
f.newline();
|
||||
}
|
||||
f.write(this.close);
|
||||
}
|
||||
|
||||
writeOn(f: Formatter): void {
|
||||
const g = f.clone();
|
||||
this.writeHorizontally(g);
|
||||
const s = g.toString();
|
||||
if (s.length <= f.width) {
|
||||
f.write(s);
|
||||
} else {
|
||||
this.writeVertically(f);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class Parens extends Grouping {
|
||||
get open(): string { return '('; }
|
||||
get close(): string { return ')'; }
|
||||
}
|
||||
|
||||
export class OperatorSequence extends Parens {
|
||||
operator: string;
|
||||
|
||||
constructor(operator: string, items: Array<Item>) {
|
||||
super(items);
|
||||
this.operator = operator;
|
||||
}
|
||||
|
||||
get separator(): string { return this.operator; }
|
||||
}
|
||||
|
||||
export class Brackets extends Grouping {
|
||||
get open(): string { return '['; }
|
||||
get close(): string { return ']'; }
|
||||
}
|
||||
|
||||
export class AngleBrackets extends Grouping {
|
||||
get open(): string { return '<'; }
|
||||
get close(): string { return '>'; }
|
||||
}
|
||||
|
||||
export class Braces extends Grouping {
|
||||
get open(): string { return '{'; }
|
||||
get close(): string { return '}'; }
|
||||
}
|
||||
|
||||
export class Block extends Braces {
|
||||
get separator(): string { return '; ' }
|
||||
get terminator(): string { return ';' }
|
||||
}
|
||||
|
||||
export const seq = (... items: Item[]) => new Sequence(items);
|
||||
export const commas = (... items: Item[]) => new CommaSequence(items);
|
||||
export const parens = (... items: Item[]) => new Parens(items);
|
||||
export const opseq = (zero: string, op: string, ... items: Item[]) =>
|
||||
(items.length === 0) ? zero : new OperatorSequence(op, items);
|
||||
export const brackets = (... items: Item[]) => new Brackets(items);
|
||||
export const anglebrackets = (... items: Item[]) => new AngleBrackets(items);
|
||||
export const braces = (... items: Item[]) => new Braces(items);
|
||||
export const block = (... items: Item[]) => {
|
||||
if (items.length === 1 && items[0] instanceof Block) {
|
||||
return items[0];
|
||||
} else {
|
||||
return new Block(items);
|
||||
}
|
||||
}
|
||||
export const fnblock = (... items: Item[]) => seq('((() => ', block(... items), ')())');
|
||||
export const keyvalue = (k: string, v: Item) => seq(JSON.stringify(k), ': ', v);
|
|
@ -0,0 +1,291 @@
|
|||
import { Dictionary, KeyedSet, FlexSet, Position, stringify, is } from "@preserves/core";
|
||||
import { refPosition } from "../reader";
|
||||
import * as M from "../meta";
|
||||
import { anglebrackets, block, braces, commas, formatItems, Item, keyvalue, seq } from "./block";
|
||||
import { ANY_TYPE, RefType, Type } from "./type";
|
||||
import { renderType, variantInitFor } from "./rendertype";
|
||||
import { typeForDefinition } from "./gentype";
|
||||
import { SchemaSyntaxError } from "../error";
|
||||
|
||||
export interface CompilerOptions {
|
||||
preservesModule?: string;
|
||||
defaultEmbeddedType?: M.Ref;
|
||||
warn?(message: string, pos: Position | null): void;
|
||||
}
|
||||
|
||||
export interface Capture {
|
||||
fieldName: string;
|
||||
sourceExpr: string;
|
||||
}
|
||||
|
||||
export const RECURSION_LIMIT = 128;
|
||||
|
||||
export class ModuleContext {
|
||||
readonly env: M.Environment;
|
||||
readonly modulePath: M.ModulePath;
|
||||
readonly schema: M.Schema;
|
||||
readonly options: CompilerOptions;
|
||||
readonly embeddedType: Item;
|
||||
|
||||
readonly literals = new Dictionary<M.InputEmbedded, string>();
|
||||
readonly typedefs: Item[] = [];
|
||||
readonly functiondefs: Item[] = [];
|
||||
readonly imports = new KeyedSet<[string, string]>();
|
||||
|
||||
constructor(
|
||||
env: M.Environment,
|
||||
modulePath: M.ModulePath,
|
||||
schema: M.Schema,
|
||||
options: CompilerOptions,
|
||||
) {
|
||||
this.env = env;
|
||||
this.modulePath = modulePath;
|
||||
this.schema = schema;
|
||||
this.options = options;
|
||||
switch (schema.embeddedType._variant) {
|
||||
case 'false':
|
||||
this.embeddedType = '_.GenericEmbedded';
|
||||
break;
|
||||
case 'Ref': {
|
||||
const t = this.resolver()(schema.embeddedType.value);
|
||||
this.embeddedType = t.typeName;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
literal(v: M.Input): Item {
|
||||
let varname = this.literals.get(v);
|
||||
if (varname === void 0) {
|
||||
varname = M.jsId('$' + v.asPreservesText(), () => '__lit' + this.literals.size);
|
||||
this.literals.set(v, varname);
|
||||
}
|
||||
return varname;
|
||||
}
|
||||
|
||||
derefPattern(p: M.Definition, refCount = 0): M.Definition {
|
||||
if (refCount > RECURSION_LIMIT) {
|
||||
throw new Error('Recursion limit exceeded');
|
||||
}
|
||||
if (p._variant === 'Pattern' &&
|
||||
p.value._variant === 'SimplePattern' &&
|
||||
p.value.value._variant === 'Ref')
|
||||
{
|
||||
return this.lookup(p.value.value.value,
|
||||
(p, _t) => this.derefPattern(p, refCount + 1),
|
||||
(_modId, _modPath, pp, _tt) => this.derefPattern(pp ?? p, refCount + 1));
|
||||
} else {
|
||||
return p;
|
||||
}
|
||||
}
|
||||
|
||||
defineType(f: Item): void {
|
||||
this.typedefs.push(f);
|
||||
}
|
||||
|
||||
defineFunction(f: (ctx: FunctionContext) => Item): void {
|
||||
this.functiondefs.push(f(new FunctionContext(this)));
|
||||
}
|
||||
|
||||
resolver(modulePath?: M.ModulePath): (ref: M.Ref) => RefType {
|
||||
return (ref) => this.lookup(ref,
|
||||
(_p, _t) => Type.ref(ref.name.description!, ref),
|
||||
(modId, modPath, _p, _t) => {
|
||||
this.imports.add([modId, modPath]);
|
||||
return Type.ref(`${modId}.${ref.name.description!}`, ref);
|
||||
},
|
||||
modulePath);
|
||||
}
|
||||
|
||||
lookupType(name: M.Ref, modulePath?: M.ModulePath): Type | null {
|
||||
const t = this.lookup(name, (_p, t) => t, (_modId, _modPath, _p, t) => t, modulePath);
|
||||
return t ? t() : null;
|
||||
}
|
||||
|
||||
lookup<R>(name: M.Ref,
|
||||
kLocal: (p: M.Definition, t: () => Type) => R,
|
||||
kOther: (modId: string, modPath: string, p: M.Definition | null, t: (() => Type) | null) => R,
|
||||
modulePath?: M.ModulePath): R
|
||||
{
|
||||
const soughtModule = name.module.length ? name.module : (modulePath ?? this.modulePath);
|
||||
|
||||
for (const e of this.env) {
|
||||
if (is(e.schemaModulePath, soughtModule)) {
|
||||
if (e.schema === null) {
|
||||
// It's an artificial module, not from a schema. Assume the identifier is present.
|
||||
return kOther(M.modsymFor(e), e.typescriptModulePath, null, null);
|
||||
} else {
|
||||
const p = e.schema.definitions.get(name.name);
|
||||
if (p !== void 0) {
|
||||
let t = () => typeForDefinition(this.resolver(soughtModule), p);
|
||||
if (name.module.length) {
|
||||
return kOther(M.modsymFor(e), e.typescriptModulePath, p, t);
|
||||
} else {
|
||||
return kLocal(p, t);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw new SchemaSyntaxError(`Undefined reference: ${M.formatRef(name)}`, refPosition(name));
|
||||
}
|
||||
|
||||
genericParameters(): Item {
|
||||
return anglebrackets(seq('_embedded = ', this.embeddedType));
|
||||
}
|
||||
|
||||
genericParametersFor(t: Type): Item {
|
||||
return this.hasEmbedded(t) ? this.genericParameters() : '';
|
||||
}
|
||||
|
||||
genericArgs(): Item {
|
||||
return `<_embedded>`;
|
||||
}
|
||||
|
||||
genericArgsFor(t: Type): Item {
|
||||
return this.hasEmbedded(t) ? this.genericArgs() : '';
|
||||
}
|
||||
|
||||
hasEmbedded(t: Type): boolean {
|
||||
const self = this;
|
||||
const state = new WalkState(this.modulePath);
|
||||
|
||||
function walk(t: Type): boolean {
|
||||
switch (t.kind) {
|
||||
case 'union':
|
||||
for (const v of t.variants.values()) { if (walk(v)) return true; };
|
||||
return false;
|
||||
case 'unit': return false;
|
||||
case 'array': return walk(t.type);
|
||||
case 'set': return true; // because ref to _embedded in renderType()
|
||||
case 'dictionary': return true; // because ref to _embedded in renderType()
|
||||
case 'ref': {
|
||||
if (t.ref === null) {
|
||||
switch (t.typeName) {
|
||||
case '_embedded': return true;
|
||||
case '_.Value': return true;
|
||||
default: return false;
|
||||
}
|
||||
} else {
|
||||
return state.cycleCheck(
|
||||
t.ref,
|
||||
ref => self.lookupType(ref, state.modulePath),
|
||||
t => t ? walk(t) : false,
|
||||
() => false);
|
||||
}
|
||||
}
|
||||
case 'record':
|
||||
for (const v of t.fields.values()) { if (walk(v)) return true; };
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return walk(t);
|
||||
}
|
||||
}
|
||||
|
||||
export class FunctionContext {
|
||||
readonly mod: ModuleContext;
|
||||
|
||||
tempCounter = 0;
|
||||
temps: Map<string, { type: Item, names: string[] }> = new Map();
|
||||
|
||||
captures: Capture[] = [];
|
||||
variantName: string | undefined = void 0;
|
||||
|
||||
constructor(mod: ModuleContext) {
|
||||
this.mod = mod;
|
||||
}
|
||||
|
||||
gentempname(): string {
|
||||
return '_tmp' + this.tempCounter++;
|
||||
}
|
||||
|
||||
gentemp(vartype: Type = ANY_TYPE): string {
|
||||
const typeitem = renderType(this.mod, vartype);
|
||||
const typestr = formatItems([typeitem], Infinity);
|
||||
const varname = this.gentempname();
|
||||
let e = this.temps.get(typestr);
|
||||
if (e === void 0) {
|
||||
e = { type: typeitem, names: [] };
|
||||
this.temps.set(typestr, e);
|
||||
}
|
||||
e.names.push(varname);
|
||||
return varname;
|
||||
}
|
||||
|
||||
block(f: () => Item[]): Item {
|
||||
const oldTemps = this.temps;
|
||||
this.temps = new Map();
|
||||
const items = f();
|
||||
const ts = this.temps;
|
||||
this.temps = oldTemps;
|
||||
return block(
|
||||
... Array.from(ts).map(([_typestr, { type, names }]) =>
|
||||
seq(`let `, commas(... names), `: (`, type, `) | undefined`)),
|
||||
... items);
|
||||
}
|
||||
|
||||
withCapture<R>(
|
||||
fieldName: string | undefined, sourceExpr: string, ks: (sourceExpr: string) => R): R
|
||||
{
|
||||
if (fieldName !== void 0) this.captures.push({ fieldName, sourceExpr });
|
||||
const result = ks(sourceExpr);
|
||||
if (fieldName !== void 0) this.captures.pop();
|
||||
return result;
|
||||
}
|
||||
|
||||
convertCapture(
|
||||
fieldName: string | undefined, sourceExpr: string, ks: () => Item[]): Item
|
||||
{
|
||||
return this.withCapture(fieldName, sourceExpr, sourceExpr =>
|
||||
seq(`if (${sourceExpr} !== void 0) `, this.block(() => ks())));
|
||||
}
|
||||
|
||||
buildCapturedCompound(dest: string): Item {
|
||||
const fields = [
|
||||
... variantInitFor(this.variantName),
|
||||
... this.captures.map(({ fieldName, sourceExpr }) =>
|
||||
keyvalue(fieldName, sourceExpr))
|
||||
];
|
||||
return seq(`${dest} = `, fields.length === 0 ? `null` : braces(... fields));
|
||||
}
|
||||
}
|
||||
|
||||
export class WalkState {
|
||||
modulePath: M.ModulePath;
|
||||
readonly seen: FlexSet<M.Ref>;
|
||||
|
||||
constructor(modulePath: M.ModulePath) {
|
||||
this.modulePath = modulePath;
|
||||
this.seen = new FlexSet(refCanonicalizer);
|
||||
}
|
||||
|
||||
cycleCheck<E, R>(
|
||||
r0: M.Ref,
|
||||
step: (ref: M.Ref) => E,
|
||||
ks: (e: E) => R,
|
||||
kf: () => R,
|
||||
): R {
|
||||
const r = M.Ref({
|
||||
module: r0.module.length ? r0.module : this.modulePath,
|
||||
name: r0.name
|
||||
});
|
||||
if (this.seen.has(r)) {
|
||||
return kf();
|
||||
} else {
|
||||
this.seen.add(r);
|
||||
const maybe_e = step(r);
|
||||
const saved = this.modulePath;
|
||||
this.modulePath = r.module;
|
||||
const result = ks(maybe_e);
|
||||
this.modulePath = saved;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function refCanonicalizer(r: M.Ref): string {
|
||||
return stringify([... r.module, r.name]);
|
||||
}
|
|
@ -0,0 +1,237 @@
|
|||
import { FunctionContext } from "./context";
|
||||
import * as M from '../meta';
|
||||
import { Item, seq } from "./block";
|
||||
import { simpleType, typeFor } from "./gentype";
|
||||
import { ANY_TYPE, Type } from "./type";
|
||||
|
||||
export function converterForDefinition(
|
||||
ctx: FunctionContext,
|
||||
p: M.Definition,
|
||||
src: string,
|
||||
dest: string): Item[]
|
||||
{
|
||||
switch (p._variant) {
|
||||
case 'or': {
|
||||
const alts = [p.pattern0, p.pattern1, ... p.patternN];
|
||||
function loop(i: number): Item[] {
|
||||
ctx.variantName = alts[i].variantLabel;
|
||||
return [... converterForPattern(ctx, alts[i].pattern, src, dest),
|
||||
... ((i < alts.length - 1)
|
||||
? [seq(`if (${dest} === void 0) `, ctx.block(() => loop(i + 1)))]
|
||||
: [])];
|
||||
}
|
||||
return loop(0);
|
||||
}
|
||||
case 'and': {
|
||||
const pcs = [p.pattern0, p.pattern1, ... p.patternN];
|
||||
function loop(i: number): Item[] {
|
||||
return (i < pcs.length)
|
||||
? converterFor(ctx, pcs[i], src, () => loop(i + 1))
|
||||
: [ctx.buildCapturedCompound(dest)];
|
||||
}
|
||||
return loop(0);
|
||||
}
|
||||
case 'Pattern':
|
||||
ctx.variantName = void 0;
|
||||
return converterForPattern(ctx, p.value, src, dest);
|
||||
}
|
||||
}
|
||||
|
||||
function converterForPattern(
|
||||
ctx: FunctionContext,
|
||||
p: M.Pattern,
|
||||
src: string,
|
||||
dest: string): Item[]
|
||||
{
|
||||
return converterFor(ctx, M.NamedPattern.anonymous(p), src, simpleValue => {
|
||||
if (simpleValue === void 0) {
|
||||
return [ctx.buildCapturedCompound(dest)];
|
||||
} else if (ctx.variantName !== void 0) {
|
||||
if (typeFor(ctx.mod.resolver(), p).kind === 'unit') {
|
||||
return [ctx.buildCapturedCompound(dest)];
|
||||
} else {
|
||||
return [ctx.withCapture('value',
|
||||
simpleValue,
|
||||
() => ctx.buildCapturedCompound(dest))];
|
||||
}
|
||||
} else {
|
||||
return [`${dest} = ${simpleValue}`];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function converterForTuple(ctx: FunctionContext,
|
||||
ps: M.NamedPattern[],
|
||||
src: string,
|
||||
knownArray: boolean,
|
||||
variablePattern: M.NamedSimplePattern | undefined,
|
||||
k: () => Item[]): Item[]
|
||||
{
|
||||
function loop(i: number): Item[] {
|
||||
if (i < ps.length) {
|
||||
return converterFor(ctx, ps[i], `${src}[${i}]`, () => loop(i + 1));
|
||||
} else {
|
||||
if (variablePattern === void 0) {
|
||||
return k();
|
||||
} else {
|
||||
const vN = ctx.gentemp(Type.array(ANY_TYPE));
|
||||
return [ps.length > 0 ? `${vN} = ${src}.slice(${ps.length})` : `${vN} = ${src}`,
|
||||
... converterFor(ctx, M.promoteNamedSimplePattern(variablePattern), vN, k, true)];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const lengthCheck = variablePattern === void 0
|
||||
? seq(` && ${src}.length === ${ps.length}`)
|
||||
: ((ps.length === 0) ? '' : seq(` && ${src}.length >= ${ps.length}`));
|
||||
|
||||
return knownArray
|
||||
? loop(0)
|
||||
: [seq(`if (_.Array.isArray(${src})`, lengthCheck, `) `, ctx.block(() => loop(0)))];
|
||||
}
|
||||
|
||||
function converterFor(
|
||||
ctx: FunctionContext,
|
||||
np: M.NamedPattern,
|
||||
src: string,
|
||||
ks: (dest: string | undefined) => Item[],
|
||||
knownArray = false): Item[]
|
||||
{
|
||||
let p = M.unnamePattern(np);
|
||||
let maybeName = M.nameFor(np);
|
||||
|
||||
if (p._variant === 'SimplePattern') {
|
||||
const dest = ctx.gentemp(simpleType(ctx.mod.resolver(), p.value));
|
||||
return [... converterForSimple(ctx, p.value, src, dest, knownArray),
|
||||
ctx.convertCapture(maybeName, dest, () => ks(dest))];
|
||||
} else {
|
||||
return converterForCompound(ctx, p.value, src, knownArray, () => ks(void 0));
|
||||
}
|
||||
}
|
||||
|
||||
export function converterForSimple(
|
||||
ctx: FunctionContext,
|
||||
p: M.SimplePattern,
|
||||
src: string,
|
||||
dest: string,
|
||||
knownArray: boolean): Item[]
|
||||
{
|
||||
switch (p._variant) {
|
||||
case 'any':
|
||||
return [`${dest} = ${src}`];
|
||||
case 'atom': {
|
||||
let test: Item;
|
||||
let valexp: Item = `${src}`;
|
||||
switch (p.atomKind._variant) {
|
||||
case 'Boolean': test = `typeof ${src} === 'boolean'`; break;
|
||||
case 'Float': test = `_.Float.isSingle(${src})`; valexp = `${src}.value`; break;
|
||||
case 'Double': test =`_.Float.isDouble(${src})`; valexp = `${src}.value`; break;
|
||||
case 'SignedInteger': test = `typeof ${src} === 'number'`; break;
|
||||
case 'String': test = `typeof ${src} === 'string'`; break;
|
||||
case 'ByteString': test = `_.Bytes.isBytes(${src})`; break;
|
||||
case 'Symbol': test = `typeof ${src} === 'symbol'`; break;
|
||||
}
|
||||
return [seq(`${dest} = `, test, ` ? `, valexp, ` : void 0`)];
|
||||
}
|
||||
case 'embedded':
|
||||
return [`${dest} = _.isEmbedded<_embedded>(${src}) ? ${src}.embeddedValue : void 0`];
|
||||
case 'lit':
|
||||
return [`${dest} = _.is(${src}, ${ctx.mod.literal(p.value)}) ? null : void 0`];
|
||||
|
||||
case 'seqof': {
|
||||
const kKnownArray = () => {
|
||||
const v = ctx.gentempname();
|
||||
return [
|
||||
seq(`${dest} = []`),
|
||||
seq(`for (const ${v} of ${src}) `, ctx.block(() => [
|
||||
... converterFor(ctx, M.anonymousSimplePattern(p.pattern), v, vv =>
|
||||
[`${dest}.push(${vv})`, `continue`]),
|
||||
seq(`${dest} = void 0`),
|
||||
seq(`break`)]))];
|
||||
};
|
||||
if (knownArray) {
|
||||
return kKnownArray();
|
||||
} else {
|
||||
return [`${dest} = void 0`,
|
||||
seq(`if (_.Array.isArray(${src})) `, ctx.block(kKnownArray))];
|
||||
}
|
||||
}
|
||||
case 'setof':
|
||||
return [`${dest} = void 0`,
|
||||
seq(`if (_.Set.isSet<_embedded>(${src})) `, ctx.block(() => {
|
||||
const v = ctx.gentempname();
|
||||
return [
|
||||
seq(`${dest} = new _.KeyedSet()`),
|
||||
seq(`for (const ${v} of ${src}) `, ctx.block(() => [
|
||||
... converterFor(ctx, M.anonymousSimplePattern(p.pattern), v, vv =>
|
||||
[`${dest}.add(${vv})`, `continue`]),
|
||||
seq(`${dest} = void 0`),
|
||||
seq(`break`)]))];
|
||||
}))];
|
||||
case 'dictof':
|
||||
return [`${dest} = void 0`,
|
||||
seq(`if (_.Dictionary.isDictionary<_embedded>(${src})) `, ctx.block(() => {
|
||||
const v = ctx.gentempname();
|
||||
const k = ctx.gentempname();
|
||||
return [
|
||||
seq(`${dest} = new _.KeyedDictionary()`),
|
||||
seq(`for (const [${k}, ${v}] of ${src}) `, ctx.block(() => [
|
||||
... converterFor(ctx, M.anonymousSimplePattern(p.key), k, kk =>
|
||||
converterFor(ctx, M.anonymousSimplePattern(p.value), v, vv =>
|
||||
[`${dest}.set(${kk}, ${vv})`, `continue`])),
|
||||
seq(`${dest} = void 0`),
|
||||
seq(`break`)]))];
|
||||
}))];
|
||||
case 'Ref':
|
||||
return ctx.mod.lookup(p.value,
|
||||
(_p, _t) => [`${dest} = to${p.value.name.description!}(${src})`],
|
||||
(modId, modPath, _p, _t) => {
|
||||
ctx.mod.imports.add([modId, modPath]);
|
||||
return [`${dest} = ${modId}.to${p.value.name.description!}(${src})`];
|
||||
});
|
||||
default:
|
||||
((_p: never) => {})(p);
|
||||
throw new Error("Unreachable");
|
||||
}
|
||||
}
|
||||
|
||||
function converterForCompound(
|
||||
ctx: FunctionContext,
|
||||
p: M.CompoundPattern,
|
||||
src: string,
|
||||
knownArray: boolean,
|
||||
ks: () => Item[]): Item[]
|
||||
{
|
||||
switch (p._variant) {
|
||||
case 'rec':
|
||||
return [seq(`if (_.Record.isRecord<_.Value<_embedded>, _.Tuple<_.Value<_embedded>>, _embedded>(${src})) `, ctx.block(() =>
|
||||
converterFor(ctx, p.label, `${src}.label`, () =>
|
||||
converterFor(ctx, p.fields, src, ks, true))))];
|
||||
case 'tuple':
|
||||
return converterForTuple(ctx, p.patterns, src, knownArray, void 0, ks);
|
||||
case 'tuplePrefix':
|
||||
return converterForTuple(ctx, p.fixed, src, knownArray, p.variable, ks);
|
||||
case 'dict': {
|
||||
const entries = Array.from(p.entries);
|
||||
function loop(i: number): Item[] {
|
||||
if (i < entries.length) {
|
||||
const [k, n] = entries[i];
|
||||
const tmpSrc = ctx.gentemp();
|
||||
return [seq(`if ((${tmpSrc} = ${src}.get(${ctx.mod.literal(k)})) !== void 0) `,
|
||||
ctx.block(() =>
|
||||
converterFor(
|
||||
ctx,
|
||||
M.promoteNamedSimplePattern(n),
|
||||
tmpSrc,
|
||||
() => loop(i + 1))))];
|
||||
} else {
|
||||
return ks();
|
||||
}
|
||||
}
|
||||
return [seq(`if (_.Dictionary.isDictionary<_embedded>(${src})) `, ctx.block(() => loop(0)))];
|
||||
}
|
||||
default:
|
||||
((_p: never) => {})(p);
|
||||
throw new Error("Unreachable");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
import * as M from '../meta';
|
||||
import { block, braces, Item, keyvalue, parens, seq } from "./block";
|
||||
import { FieldType, SimpleType, Type } from "./type";
|
||||
import { renderType } from "./rendertype";
|
||||
import { ModuleContext } from './context';
|
||||
|
||||
export function genConstructor(
|
||||
mod: ModuleContext,
|
||||
name: string,
|
||||
variant: string | undefined,
|
||||
arg: SimpleType,
|
||||
resultType: Type,
|
||||
resultTypeItem: Item,
|
||||
): Item {
|
||||
const formals: Array<[string, FieldType]> = [];
|
||||
let simpleValue = false;
|
||||
|
||||
function examine(t: FieldType, name: string): void {
|
||||
if (t.kind !== 'unit') {
|
||||
formals.push([name, t]);
|
||||
}
|
||||
}
|
||||
|
||||
if (arg.kind === 'record') {
|
||||
arg.fields.forEach(examine);
|
||||
} else {
|
||||
examine(arg, 'value');
|
||||
simpleValue = variant === void 0;
|
||||
}
|
||||
|
||||
const initializers: Item[] = (variant !== void 0)
|
||||
? [keyvalue('_variant', JSON.stringify(variant))]
|
||||
: [];
|
||||
formals.forEach(([n, _t]) => initializers.push(seq(JSON.stringify(n), ': ', M.jsId(n))));
|
||||
|
||||
const declArgs: Array<Item> = (formals.length > 1)
|
||||
? [seq(braces(...formals.map(f => M.jsId(f[0]))), ': ',
|
||||
braces(...formals.map(f => seq(M.jsId(f[0]), ': ', renderType(mod, f[1])))))]
|
||||
: formals.map(f => seq(M.jsId(f[0]), ': ', renderType(mod, f[1])));
|
||||
|
||||
return seq(`export function ${M.jsId(name)}`, mod.genericParametersFor(resultType),
|
||||
parens(... declArgs),
|
||||
': ', resultTypeItem, ' ', block(
|
||||
seq(`return `,
|
||||
((arg.kind === 'unit' && initializers.length === 0)
|
||||
? 'null'
|
||||
: (simpleValue
|
||||
? 'value'
|
||||
: braces(... initializers))))));
|
||||
}
|
|
@ -0,0 +1,97 @@
|
|||
import * as M from "../meta";
|
||||
import { ANY_TYPE, FieldType, FieldMap, SimpleType, Type } from "./type";
|
||||
|
||||
export type RefResolver = (ref: M.Ref) => FieldType;
|
||||
|
||||
export function typeForDefinition(resolver: RefResolver, d: M.Definition): Type {
|
||||
switch (d._variant) {
|
||||
case 'or':
|
||||
return Type.union(
|
||||
new Map([d.pattern0, d.pattern1, ... d.patternN].map(a =>
|
||||
[a.variantLabel, typeFor(resolver, a.pattern)])));
|
||||
case 'and':
|
||||
return typeForIntersection(resolver, [d.pattern0, d.pattern1, ... d.patternN]);
|
||||
case 'Pattern':
|
||||
return typeFor(resolver, d.value);
|
||||
}
|
||||
}
|
||||
|
||||
export function typeForIntersection(resolver: RefResolver, ps: M.NamedPattern[]): SimpleType {
|
||||
const fs = new Map();
|
||||
ps.forEach(p => gatherFields(fs, resolver, p));
|
||||
return fs.size > 0 ? Type.record(fs) : Type.unit();
|
||||
}
|
||||
|
||||
export function typeFor(resolver: RefResolver, p: M.Pattern): SimpleType {
|
||||
if (p._variant === 'SimplePattern') {
|
||||
return simpleType(resolver, p.value);
|
||||
} else {
|
||||
return typeForIntersection(resolver, [M.NamedPattern.anonymous(p)]);
|
||||
}
|
||||
}
|
||||
|
||||
export function simpleType(resolver: RefResolver, p: M.SimplePattern): FieldType {
|
||||
switch (p._variant) {
|
||||
case 'any':
|
||||
return ANY_TYPE;
|
||||
case 'atom':
|
||||
switch (p.atomKind._variant) {
|
||||
case 'Boolean': return Type.ref(`boolean`, null);
|
||||
case 'Float': return Type.ref(`number`, null);
|
||||
case 'Double': return Type.ref(`number`, null);
|
||||
case 'SignedInteger': return Type.ref(`number`, null);
|
||||
case 'String': return Type.ref(`string`, null);
|
||||
case 'ByteString': return Type.ref(`_.Bytes`, null);
|
||||
case 'Symbol': return Type.ref(`symbol`, null);
|
||||
}
|
||||
case 'embedded':
|
||||
return Type.ref(`_embedded`, null);
|
||||
case 'lit':
|
||||
return Type.unit();
|
||||
case 'seqof':
|
||||
return Type.array(simpleType(resolver, p.pattern));
|
||||
case 'setof':
|
||||
return Type.set(simpleType(resolver, p.pattern));
|
||||
case 'dictof':
|
||||
return Type.dictionary(simpleType(resolver, p.key), simpleType(resolver, p.value));
|
||||
case 'Ref':
|
||||
return resolver(p.value);
|
||||
default:
|
||||
((_p: never) => {})(p);
|
||||
throw new Error("Unreachable");
|
||||
}
|
||||
}
|
||||
|
||||
function compoundFields(fs: FieldMap, resolver: RefResolver, p: M.CompoundPattern): void {
|
||||
switch (p._variant) {
|
||||
case 'rec':
|
||||
gatherFields(fs, resolver, p.label);
|
||||
gatherFields(fs, resolver, p.fields);
|
||||
break;
|
||||
case 'tuple':
|
||||
p.patterns.forEach(pp => gatherFields(fs, resolver, pp));
|
||||
break;
|
||||
case 'tuplePrefix':
|
||||
p.fixed.forEach(pp => gatherFields(fs, resolver, pp));
|
||||
gatherFields(fs, resolver, M.promoteNamedSimplePattern(p.variable));
|
||||
break;
|
||||
case 'dict':
|
||||
p.entries.forEach((n, _k) =>
|
||||
gatherFields(fs, resolver, M.promoteNamedSimplePattern(n)));
|
||||
break;
|
||||
default:
|
||||
((_p: never) => {})(p);
|
||||
throw new Error("Unreachable");
|
||||
}
|
||||
}
|
||||
|
||||
function gatherFields(fs: FieldMap, resolver: RefResolver, n: M.NamedPattern): void {
|
||||
if (n._variant === 'named') {
|
||||
const t = simpleType(resolver, n.value.pattern);
|
||||
if (t.kind !== 'unit') {
|
||||
fs.set(n.value.name.description!, t);
|
||||
}
|
||||
} else if (n.value._variant === 'CompoundPattern') {
|
||||
compoundFields(fs, resolver, n.value.value);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,129 @@
|
|||
import * as M from '../meta';
|
||||
import { block, brackets, Item, parens, seq } from './block';
|
||||
import { FunctionContext } from "./context";
|
||||
|
||||
export function unconverterForDefinition(
|
||||
ctx: FunctionContext,
|
||||
def: M.Definition,
|
||||
src: string): Item[]
|
||||
{
|
||||
switch (def._variant) {
|
||||
case 'or':
|
||||
return [seq(`switch (${src}._variant) `, block(
|
||||
... [def.pattern0, def.pattern1, ... def.patternN].map(p =>
|
||||
seq(`case `, JSON.stringify(p.variantLabel), `: `, ctx.block(() => {
|
||||
const hasValueField = p.pattern._variant === 'SimplePattern';
|
||||
return [seq(`return `, unconverterFor(
|
||||
ctx, p.pattern, hasValueField ? `${src}.value` : src))];
|
||||
})))))];
|
||||
case 'and':
|
||||
return [seq(`return _.merge`, parens(
|
||||
`(a, b) => (a === b) ? a : void 0`,
|
||||
... [def.pattern0, def.pattern1, ... def.patternN].flatMap(p => {
|
||||
if (p._variant === 'anonymous' && p.value._variant === 'SimplePattern') {
|
||||
return [];
|
||||
} else {
|
||||
return [unconverterForNamed(ctx, p, src)];
|
||||
}
|
||||
})))];
|
||||
case 'Pattern':
|
||||
return [seq(`return `, unconverterFor(ctx, def.value, `${src}`))];
|
||||
}
|
||||
}
|
||||
|
||||
function unconverterFor(ctx: FunctionContext, p: M.Pattern, src: string): Item {
|
||||
switch (p._variant) {
|
||||
case 'SimplePattern':
|
||||
return ((p: M.SimplePattern) => {
|
||||
switch (p._variant) {
|
||||
case 'any':
|
||||
return `${src}`;
|
||||
case 'atom':
|
||||
switch (p.atomKind._variant) {
|
||||
case 'Float': return `_.Single(${src})`;
|
||||
case 'Double': return `_.Double(${src})`;
|
||||
default: return `${src}`;
|
||||
}
|
||||
case 'lit':
|
||||
return ctx.mod.literal(p.value);
|
||||
case 'embedded':
|
||||
return `_.embed(${src})`;
|
||||
case 'seqof':
|
||||
return seq(`${src}.map(v => `,
|
||||
unconverterFor(ctx, M.Pattern.SimplePattern(p.pattern), 'v'),
|
||||
`)`);
|
||||
case 'setof':
|
||||
return seq(`new _.Set<_embedded>`, parens(
|
||||
`_.Array.from(${src}.values()).map(v => `,
|
||||
unconverterFor(ctx, M.Pattern.SimplePattern(p.pattern), 'v'),
|
||||
`)`));
|
||||
case 'dictof':
|
||||
return seq(`new _.Dictionary<_embedded>`, parens(seq(
|
||||
`_.Array.from(${src}.entries()).map(([k, v]) => `,
|
||||
brackets(
|
||||
unconverterFor(ctx, M.Pattern.SimplePattern(p.key), 'k'),
|
||||
unconverterFor(ctx, M.Pattern.SimplePattern(p.value), 'v')),
|
||||
`)`)));
|
||||
case 'Ref':
|
||||
return ctx.mod.lookup(p.value,
|
||||
(_p, _t) => `from${p.value.name.description!}${ctx.mod.genericArgs()}(${src})`,
|
||||
(modId, modPath, _p, _t) => {
|
||||
ctx.mod.imports.add([modId, modPath]);
|
||||
return `${modId}.from${p.value.name.description!}${ctx.mod.genericArgs()}(${src})`;
|
||||
});
|
||||
}
|
||||
})(p.value);
|
||||
case 'CompoundPattern':
|
||||
return ((p: M.CompoundPattern) => {
|
||||
switch (p._variant) {
|
||||
case 'rec':
|
||||
return seq(`_.Record`, parens(
|
||||
unconverterForNamed(ctx, p.label, src),
|
||||
unconverterForNamed(ctx, p.fields, src)));
|
||||
case 'tuple':
|
||||
return brackets(... p.patterns.map(pp =>
|
||||
unconverterForNamed(ctx, pp, src)));
|
||||
case 'tuplePrefix': {
|
||||
const varExp =
|
||||
unconverterForNamed(ctx, M.promoteNamedSimplePattern(p.variable), src);
|
||||
if (p.fixed.length === 0) {
|
||||
return varExp;
|
||||
} else {
|
||||
return brackets(
|
||||
... p.fixed.map(pp => unconverterForNamed(ctx, pp, src)),
|
||||
seq(`... `, varExp));
|
||||
}
|
||||
}
|
||||
case 'dict':
|
||||
return seq(`new _.Dictionary<_embedded>`, parens(
|
||||
brackets(... Array.from(p.entries.entries()).map(([k, n]) =>
|
||||
brackets(
|
||||
ctx.mod.literal(k),
|
||||
unconverterForNamedSimple(ctx, n, src))))));
|
||||
}
|
||||
})(p.value);
|
||||
}
|
||||
}
|
||||
|
||||
function stepSource(src: string, key: string): string
|
||||
{
|
||||
return `${src}[${JSON.stringify(key)}]`;
|
||||
}
|
||||
|
||||
function unconverterForNamed(ctx: FunctionContext, p: M.NamedPattern, src: string): Item {
|
||||
if (p._variant === 'named') {
|
||||
const steppedSrc = stepSource(src, p.value.name.description!);
|
||||
return unconverterFor(ctx, M.Pattern.SimplePattern(p.value.pattern), steppedSrc);
|
||||
} else {
|
||||
return unconverterFor(ctx, p.value, src);
|
||||
}
|
||||
}
|
||||
|
||||
function unconverterForNamedSimple(ctx: FunctionContext, p: M.NamedSimplePattern, src: string): Item {
|
||||
if (p._variant === 'named') {
|
||||
const steppedSrc = stepSource(src, p.value.name.description!);
|
||||
return unconverterFor(ctx, M.Pattern.SimplePattern(p.value.pattern), steppedSrc);
|
||||
} else {
|
||||
return unconverterFor(ctx, M.Pattern.SimplePattern(p.value), src);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,68 @@
|
|||
export function isJsKeyword(s: string): boolean {
|
||||
return JS_KEYWORDS.has(s);
|
||||
}
|
||||
|
||||
export const JS_KEYWORDS = new Set([
|
||||
'abstract',
|
||||
'await',
|
||||
'boolean',
|
||||
'break',
|
||||
'byte',
|
||||
'case',
|
||||
'catch',
|
||||
'char',
|
||||
'class',
|
||||
'const',
|
||||
'continue',
|
||||
'debugger',
|
||||
'default',
|
||||
'delete',
|
||||
'do',
|
||||
'double',
|
||||
'else',
|
||||
'enum',
|
||||
'export',
|
||||
'extends',
|
||||
'false',
|
||||
'final',
|
||||
'finally',
|
||||
'float',
|
||||
'for',
|
||||
'function',
|
||||
'goto',
|
||||
'if',
|
||||
'implements',
|
||||
'import',
|
||||
'in',
|
||||
'instanceof',
|
||||
'int',
|
||||
'interface',
|
||||
'let',
|
||||
'long',
|
||||
'native',
|
||||
'new',
|
||||
'null',
|
||||
'package',
|
||||
'private',
|
||||
'protected',
|
||||
'public',
|
||||
'return',
|
||||
'short',
|
||||
'static',
|
||||
'super',
|
||||
'switch',
|
||||
'synchronized',
|
||||
'this',
|
||||
'throw',
|
||||
'throws',
|
||||
'transient',
|
||||
'true',
|
||||
'try',
|
||||
'typeof',
|
||||
'var',
|
||||
'void',
|
||||
'volatile',
|
||||
'while',
|
||||
'with',
|
||||
'yield',
|
||||
]);
|
|
@ -0,0 +1,60 @@
|
|||
import { SimpleType, Type } from "./type";
|
||||
import { anglebrackets, braces, Item, keyvalue, opseq, seq } from "./block";
|
||||
import { ModuleContext } from "./context";
|
||||
|
||||
export function variantInitFor(variantName: string | undefined) : Item[] {
|
||||
return variantName === void 0 ? [] : [variantFor(variantName)];
|
||||
}
|
||||
|
||||
export function variantFor(variantName: string): Item {
|
||||
return keyvalue('_variant', JSON.stringify(variantName));
|
||||
}
|
||||
|
||||
export function renderVariant(ctxt: ModuleContext, [variantName, t]: [string, SimpleType]): Item {
|
||||
let fields: Item[];
|
||||
switch (t.kind) {
|
||||
case 'unit':
|
||||
fields = [];
|
||||
break;
|
||||
case 'ref':
|
||||
case 'set':
|
||||
case 'dictionary':
|
||||
case 'array':
|
||||
fields = [keyvalue('value', renderType(ctxt, t))];
|
||||
break;
|
||||
case 'record':
|
||||
fields = Array.from(t.fields).map(([nn, tt]) => keyvalue(nn, renderType(ctxt, tt)));
|
||||
break;
|
||||
default:
|
||||
((_: never) => {})(t);
|
||||
throw new Error("Unreachable");
|
||||
}
|
||||
return braces(variantFor(variantName), ... fields);
|
||||
}
|
||||
|
||||
export function renderType(ctxt: ModuleContext, t: Type): Item {
|
||||
switch (t.kind) {
|
||||
case 'union': return opseq('never', ' | ', ...
|
||||
Array.from(t.variants).flatMap(entry => renderVariant(ctxt, entry)));
|
||||
case 'unit': return 'null';
|
||||
case 'ref':
|
||||
if (t.ref === null && t.typeName === '_embedded') {
|
||||
return t.typeName;
|
||||
} else {
|
||||
return seq(t.typeName, ctxt.genericArgsFor(t));
|
||||
}
|
||||
case 'set': return seq('_.KeyedSet', anglebrackets(
|
||||
renderType(ctxt, t.type),
|
||||
'_embedded'));
|
||||
case 'dictionary': return seq('_.KeyedDictionary', anglebrackets(
|
||||
renderType(ctxt, t.key),
|
||||
renderType(ctxt, t.value),
|
||||
'_embedded'));
|
||||
case 'array': return seq('Array', anglebrackets(renderType(ctxt, t.type)));
|
||||
case 'record': return braces(... Array.from(t.fields).map(([nn, tt]) =>
|
||||
keyvalue(nn, renderType(ctxt, tt))));
|
||||
default:
|
||||
((_: never) => {})(t);
|
||||
throw new Error("Unreachable");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,37 @@
|
|||
import * as M from '../meta';
|
||||
|
||||
export type Type =
|
||||
| { kind: 'union', variants: VariantMap } // zero: never
|
||||
| SimpleType
|
||||
|
||||
export type SimpleType = FieldType | RecordType
|
||||
|
||||
export type FieldType =
|
||||
| { kind: 'unit' }
|
||||
| { kind: 'array', type: FieldType }
|
||||
| { kind: 'set', type: FieldType }
|
||||
| { kind: 'dictionary', key: FieldType, value: FieldType }
|
||||
| RefType
|
||||
|
||||
export type RefType =
|
||||
| { kind: 'ref', typeName: string, ref: M.Ref | null } // ref === null for base types
|
||||
|
||||
export type RecordType =
|
||||
| { kind: 'record', fields: FieldMap }
|
||||
|
||||
export type VariantMap = Map<string, SimpleType>;
|
||||
export type FieldMap = Map<string, FieldType>;
|
||||
|
||||
export namespace Type {
|
||||
export const union = (variants: VariantMap): Type => ({ kind: 'union', variants });
|
||||
export const unit = (): FieldType => ({ kind: 'unit' });
|
||||
export const ref = (typeName: string, ref: M.Ref | null): RefType => (
|
||||
{ kind: 'ref', typeName, ref });
|
||||
export const array = (type: FieldType): FieldType => ({ kind: 'array', type });
|
||||
export const set = (type: FieldType): FieldType => ({ kind: 'set', type });
|
||||
export const dictionary = (key: FieldType, value: FieldType): FieldType => (
|
||||
{ kind: 'dictionary', key, value });
|
||||
export const record = (fields: FieldMap): RecordType => ({ kind: 'record', fields });
|
||||
}
|
||||
|
||||
export const ANY_TYPE: FieldType = Type.ref('_.Value', null);
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue