Compare commits
30 Commits
jazz-brows
...
jazz-run@0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b675249960 | ||
|
|
05198e4181 | ||
|
|
ec9cb40fa4 | ||
|
|
fae9b521b8 | ||
|
|
ec1e2e4539 | ||
|
|
9550dcd6e7 | ||
|
|
4547525579 | ||
|
|
856ba0c1fa | ||
|
|
29e05c4ad4 | ||
|
|
65719f21a3 | ||
|
|
05ff90c3c4 | ||
|
|
07408970bd | ||
|
|
4ba3ea6b4e | ||
|
|
c30fb098fe | ||
|
|
a703bc3102 | ||
|
|
18dc96c7b1 | ||
|
|
e9e7f45e02 | ||
|
|
49fb6311ad | ||
|
|
cdc5cbd6d6 | ||
|
|
f55097c480 | ||
|
|
e9695fa2eb | ||
|
|
0c11110567 | ||
|
|
f2db858221 | ||
|
|
a362cbba51 | ||
|
|
39c2586d3b | ||
|
|
e5eed7bd35 | ||
|
|
39ae497153 | ||
|
|
e0b5df7f9e | ||
|
|
54b2907f08 | ||
|
|
f3e0b1ed74 |
@@ -1,5 +1,22 @@
|
||||
# chat-rn-expo-clerk
|
||||
|
||||
## 1.0.105
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-expo@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
- jazz-react-native-media-images@0.13.13
|
||||
|
||||
## 1.0.104
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-expo@0.13.12
|
||||
- jazz-react-native-media-images@0.13.12
|
||||
|
||||
## 1.0.103
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "chat-rn-expo-clerk",
|
||||
"main": "index.js",
|
||||
"version": "1.0.103",
|
||||
"version": "1.0.105",
|
||||
"scripts": {
|
||||
"build": "expo export -p ios",
|
||||
"start": "expo start",
|
||||
|
||||
@@ -1,5 +1,20 @@
|
||||
# chat-rn-expo
|
||||
|
||||
## 1.0.92
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-expo@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 1.0.91
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-expo@0.13.12
|
||||
|
||||
## 1.0.90
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "chat-rn-expo",
|
||||
"version": "1.0.90",
|
||||
"version": "1.0.92",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"build": "expo export -p ios",
|
||||
|
||||
@@ -1,5 +1,26 @@
|
||||
# chat-rn
|
||||
|
||||
## 1.0.100
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ec9cb40]
|
||||
- cojson@0.13.13
|
||||
- cojson-transport-ws@0.13.13
|
||||
- jazz-react-native@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 1.0.99
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- Updated dependencies [65719f2]
|
||||
- jazz-tools@0.13.12
|
||||
- cojson@0.13.12
|
||||
- jazz-react-native@0.13.12
|
||||
- cojson-transport-ws@0.13.12
|
||||
|
||||
## 1.0.98
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "chat-rn",
|
||||
"version": "1.0.98",
|
||||
"version": "1.0.100",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"android": "react-native run-android",
|
||||
|
||||
@@ -1,5 +1,23 @@
|
||||
# chat-vue
|
||||
|
||||
## 0.0.84
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-browser@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
- jazz-vue@0.13.13
|
||||
|
||||
## 0.0.83
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- Updated dependencies [29e05c4]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-browser@0.13.12
|
||||
- jazz-vue@0.13.12
|
||||
|
||||
## 0.0.82
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "chat-vue",
|
||||
"version": "0.0.82",
|
||||
"version": "0.0.84",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,5 +1,22 @@
|
||||
# jazz-example-chat
|
||||
|
||||
## 0.0.182
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-inspector@0.13.13
|
||||
- jazz-react@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.0.181
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-inspector@0.13.12
|
||||
- jazz-react@0.13.12
|
||||
|
||||
## 0.0.180
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "jazz-example-chat",
|
||||
"private": true,
|
||||
"version": "0.0.180",
|
||||
"version": "0.0.182",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -1,5 +1,22 @@
|
||||
# minimal-auth-clerk
|
||||
|
||||
## 0.0.81
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.13.13
|
||||
- jazz-react-auth-clerk@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.0.80
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-react@0.13.12
|
||||
- jazz-react-auth-clerk@0.13.12
|
||||
|
||||
## 0.0.79
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "clerk",
|
||||
"private": true,
|
||||
"version": "0.0.79",
|
||||
"version": "0.0.81",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -1,5 +1,20 @@
|
||||
# file-share-svelte
|
||||
|
||||
## 0.0.64
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-svelte@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.0.63
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-svelte@0.13.12
|
||||
|
||||
## 0.0.62
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "file-share-svelte",
|
||||
"version": "0.0.62",
|
||||
"version": "0.0.64",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,5 +1,22 @@
|
||||
# jazz-tailwind-demo-auth-starter
|
||||
|
||||
## 0.0.21
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-inspector@0.13.13
|
||||
- jazz-react@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.0.20
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-inspector@0.13.12
|
||||
- jazz-react@0.13.12
|
||||
|
||||
## 0.0.19
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "filestream",
|
||||
"private": true,
|
||||
"version": "0.0.19",
|
||||
"version": "0.0.21",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -1,5 +1,20 @@
|
||||
# form
|
||||
|
||||
## 0.1.22
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.1.21
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-react@0.13.12
|
||||
|
||||
## 0.1.20
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "form",
|
||||
"private": true,
|
||||
"version": "0.1.20",
|
||||
"version": "0.1.22",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -1,5 +1,20 @@
|
||||
# image-upload
|
||||
|
||||
## 0.0.78
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.0.77
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-react@0.13.12
|
||||
|
||||
## 0.0.76
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "image-upload",
|
||||
"private": true,
|
||||
"version": "0.0.76",
|
||||
"version": "0.0.78",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -1,5 +1,23 @@
|
||||
# jazz-example-inspector
|
||||
|
||||
## 0.0.132
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ec9cb40]
|
||||
- cojson@0.13.13
|
||||
- cojson-transport-ws@0.13.13
|
||||
- jazz-inspector@0.13.13
|
||||
|
||||
## 0.0.131
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [65719f2]
|
||||
- cojson@0.13.12
|
||||
- jazz-inspector@0.13.12
|
||||
- cojson-transport-ws@0.13.12
|
||||
|
||||
## 0.0.130
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "jazz-inspector-app",
|
||||
"private": true,
|
||||
"version": "0.0.130",
|
||||
"version": "0.0.132",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -1,5 +1,20 @@
|
||||
# multi-cursors
|
||||
|
||||
## 0.0.74
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.0.73
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-react@0.13.12
|
||||
|
||||
## 0.0.72
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "multi-cursors",
|
||||
"private": true,
|
||||
"version": "0.0.72",
|
||||
"version": "0.0.74",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -1,5 +1,22 @@
|
||||
# multiauth
|
||||
|
||||
## 0.0.22
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.13.13
|
||||
- jazz-react-auth-clerk@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.0.21
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-react@0.13.12
|
||||
- jazz-react-auth-clerk@0.13.12
|
||||
|
||||
## 0.0.20
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "multiauth",
|
||||
"private": true,
|
||||
"version": "0.0.20",
|
||||
"version": "0.0.22",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -1,5 +1,22 @@
|
||||
# jazz-example-musicplayer
|
||||
|
||||
## 0.0.103
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-inspector@0.13.13
|
||||
- jazz-react@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.0.102
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-inspector@0.13.12
|
||||
- jazz-react@0.13.12
|
||||
|
||||
## 0.0.101
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "jazz-example-music-player",
|
||||
"private": true,
|
||||
"version": "0.0.101",
|
||||
"version": "0.0.103",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -1,5 +1,20 @@
|
||||
# organization
|
||||
|
||||
## 0.0.74
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.0.73
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-react@0.13.12
|
||||
|
||||
## 0.0.72
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "organization",
|
||||
"private": true,
|
||||
"version": "0.0.72",
|
||||
"version": "0.0.74",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -1,5 +1,20 @@
|
||||
# passkey-svelte
|
||||
|
||||
## 0.0.68
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-svelte@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.0.67
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-svelte@0.13.12
|
||||
|
||||
## 0.0.66
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "passkey-svelte",
|
||||
"version": "0.0.66",
|
||||
"version": "0.0.68",
|
||||
"type": "module",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
|
||||
@@ -1,5 +1,20 @@
|
||||
# minimal-auth-passkey
|
||||
|
||||
## 0.0.79
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.0.78
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-react@0.13.12
|
||||
|
||||
## 0.0.77
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "passkey",
|
||||
"private": true,
|
||||
"version": "0.0.77",
|
||||
"version": "0.0.79",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -1,5 +1,20 @@
|
||||
# passphrase
|
||||
|
||||
## 0.0.76
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.0.75
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-react@0.13.12
|
||||
|
||||
## 0.0.74
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "passphrase",
|
||||
"private": true,
|
||||
"version": "0.0.74",
|
||||
"version": "0.0.76",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -1,5 +1,20 @@
|
||||
# jazz-password-manager
|
||||
|
||||
## 0.0.100
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.0.99
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-react@0.13.12
|
||||
|
||||
## 0.0.98
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "jazz-password-manager",
|
||||
"private": true,
|
||||
"version": "0.0.98",
|
||||
"version": "0.0.100",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -1,5 +1,20 @@
|
||||
# jazz-example-pets
|
||||
|
||||
## 0.0.198
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.0.197
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-react@0.13.12
|
||||
|
||||
## 0.0.196
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "jazz-example-pets",
|
||||
"private": true,
|
||||
"version": "0.0.196",
|
||||
"version": "0.0.198",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -1,5 +1,20 @@
|
||||
# reactions
|
||||
|
||||
## 0.0.78
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.0.77
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-react@0.13.12
|
||||
|
||||
## 0.0.76
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "reactions",
|
||||
"private": true,
|
||||
"version": "0.0.76",
|
||||
"version": "0.0.78",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -1,5 +1,22 @@
|
||||
# richtext
|
||||
|
||||
## 0.0.68
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
- jazz-richtext-prosemirror@0.1.2
|
||||
|
||||
## 0.0.67
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-react@0.13.12
|
||||
- jazz-richtext-prosemirror@0.1.1
|
||||
|
||||
## 0.0.66
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "richtext",
|
||||
"private": true,
|
||||
"version": "0.0.66",
|
||||
"version": "0.0.68",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -1,5 +1,23 @@
|
||||
# todo-vue
|
||||
|
||||
## 0.0.82
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-browser@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
- jazz-vue@0.13.13
|
||||
|
||||
## 0.0.81
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- Updated dependencies [29e05c4]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-browser@0.13.12
|
||||
- jazz-vue@0.13.12
|
||||
|
||||
## 0.0.80
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "todo-vue",
|
||||
"version": "0.0.80",
|
||||
"version": "0.0.82",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,5 +1,20 @@
|
||||
# jazz-example-todo
|
||||
|
||||
## 0.0.197
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.0.196
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-react@0.13.12
|
||||
|
||||
## 0.0.195
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "jazz-example-todo",
|
||||
"private": true,
|
||||
"version": "0.0.195",
|
||||
"version": "0.0.197",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -1,5 +1,22 @@
|
||||
# version-history
|
||||
|
||||
## 0.0.76
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-inspector@0.13.13
|
||||
- jazz-react@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.0.75
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-inspector@0.13.12
|
||||
- jazz-react@0.13.12
|
||||
|
||||
## 0.0.74
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "version-history",
|
||||
"private": true,
|
||||
"version": "0.0.74",
|
||||
"version": "0.0.76",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -1,5 +1,21 @@
|
||||
# cojson-storage-indexeddb
|
||||
|
||||
## 0.13.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ec9cb40]
|
||||
- cojson@0.13.13
|
||||
- cojson-storage@0.13.13
|
||||
|
||||
## 0.13.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [65719f2]
|
||||
- cojson@0.13.12
|
||||
- cojson-storage@0.13.12
|
||||
|
||||
## 0.13.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "cojson-storage-indexeddb",
|
||||
"version": "0.13.11",
|
||||
"version": "0.13.13",
|
||||
"main": "dist/index.js",
|
||||
"type": "module",
|
||||
"types": "dist/index.d.ts",
|
||||
|
||||
@@ -1,5 +1,21 @@
|
||||
# cojson-storage-sqlite
|
||||
|
||||
## 0.13.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ec9cb40]
|
||||
- cojson@0.13.13
|
||||
- cojson-storage@0.13.13
|
||||
|
||||
## 0.13.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [65719f2]
|
||||
- cojson@0.13.12
|
||||
- cojson-storage@0.13.12
|
||||
|
||||
## 0.13.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
{
|
||||
"name": "cojson-storage-sqlite",
|
||||
"type": "module",
|
||||
"version": "0.13.11",
|
||||
"version": "0.13.13",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"better-sqlite3": "^11.7.0",
|
||||
"cojson": "workspace:0.13.11",
|
||||
"cojson": "workspace:0.13.13",
|
||||
"cojson-storage": "workspace:*"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -1,5 +1,19 @@
|
||||
# cojson-storage
|
||||
|
||||
## 0.13.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ec9cb40]
|
||||
- cojson@0.13.13
|
||||
|
||||
## 0.13.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [65719f2]
|
||||
- cojson@0.13.12
|
||||
|
||||
## 0.13.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "cojson-storage",
|
||||
"version": "0.13.11",
|
||||
"version": "0.13.13",
|
||||
"main": "dist/index.js",
|
||||
"type": "module",
|
||||
"types": "dist/index.d.ts",
|
||||
|
||||
@@ -1,5 +1,19 @@
|
||||
# cojson-transport-nodejs-ws
|
||||
|
||||
## 0.13.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ec9cb40]
|
||||
- cojson@0.13.13
|
||||
|
||||
## 0.13.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [65719f2]
|
||||
- cojson@0.13.12
|
||||
|
||||
## 0.13.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "cojson-transport-ws",
|
||||
"type": "module",
|
||||
"version": "0.13.11",
|
||||
"version": "0.13.13",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -1,5 +1,17 @@
|
||||
# cojson
|
||||
|
||||
## 0.13.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- ec9cb40: Remove .every() call on iterator to fix compat issues with React Native
|
||||
|
||||
## 0.13.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 65719f2: Simplified CoValue loading state management
|
||||
|
||||
## 0.13.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -25,7 +25,7 @@
|
||||
},
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"version": "0.13.11",
|
||||
"version": "0.13.13",
|
||||
"devDependencies": {
|
||||
"@opentelemetry/sdk-metrics": "^2.0.0",
|
||||
"typescript": "catalog:"
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { CoValueCore } from "./coValueCore.js";
|
||||
import { CoValueState } from "./coValueState.js";
|
||||
import { RawCoID } from "./ids.js";
|
||||
import { PeerID } from "./sync.js";
|
||||
|
||||
export class CoValuesStore {
|
||||
coValues = new Map<RawCoID, CoValueState>();
|
||||
@@ -9,19 +10,21 @@ export class CoValuesStore {
|
||||
let entry = this.coValues.get(id);
|
||||
|
||||
if (!entry) {
|
||||
entry = CoValueState.Unknown(id);
|
||||
entry = new CoValueState(id);
|
||||
this.coValues.set(id, entry);
|
||||
}
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
setAsAvailable(id: RawCoID, coValue: CoValueCore) {
|
||||
markAsAvailable(id: RawCoID, coValue: CoValueCore, fromPeerId: PeerID) {
|
||||
const entry = this.get(id);
|
||||
entry.dispatch({
|
||||
type: "available",
|
||||
coValue,
|
||||
});
|
||||
entry.markAvailable(coValue, fromPeerId);
|
||||
}
|
||||
|
||||
internalMarkMagicallyAvailable(id: RawCoID, coValue: CoValueCore) {
|
||||
const entry = this.get(id);
|
||||
entry.internalMarkMagicallyAvailable(coValue);
|
||||
}
|
||||
|
||||
getEntries() {
|
||||
|
||||
@@ -122,8 +122,6 @@ export class PeerState {
|
||||
}
|
||||
}
|
||||
|
||||
readonly erroredCoValues: Map<RawCoID, TryAddTransactionsError> = new Map();
|
||||
|
||||
get id() {
|
||||
return this.peer.id;
|
||||
}
|
||||
|
||||
@@ -116,11 +116,11 @@ export class SyncStateManager {
|
||||
|
||||
const entry = this.syncManager.local.coValuesStore.get(id);
|
||||
|
||||
if (entry.state.type !== "available") {
|
||||
if (!entry.isAvailable()) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const coValue = entry.state.coValue;
|
||||
const coValue = entry.core;
|
||||
const coValueSessions = coValue.knownState().sessions;
|
||||
|
||||
return {
|
||||
|
||||
@@ -136,8 +136,8 @@ export class CoValueCore {
|
||||
const groupId = header.ruleset.group;
|
||||
const entry = this.node.coValuesStore.get(groupId);
|
||||
|
||||
if (entry.state.type === "available") {
|
||||
this.groupInvalidationSubscription = entry.state.coValue.subscribe(
|
||||
if (entry.isAvailable()) {
|
||||
this.groupInvalidationSubscription = entry.core.subscribe(
|
||||
(_groupUpdate) => {
|
||||
this._cachedContent = undefined;
|
||||
this.notifyUpdate("immediate");
|
||||
|
||||
@@ -1,119 +1,35 @@
|
||||
import { ValueType } from "@opentelemetry/api";
|
||||
import { UpDownCounter, metrics } from "@opentelemetry/api";
|
||||
import { PeerState } from "./PeerState.js";
|
||||
import { CoValueCore } from "./coValueCore.js";
|
||||
import { CoValueCore, TryAddTransactionsError } from "./coValueCore.js";
|
||||
import { RawCoID } from "./ids.js";
|
||||
import { logger } from "./logger.js";
|
||||
import { PeerID } from "./sync.js";
|
||||
import { PeerID, emptyKnownState } from "./sync.js";
|
||||
|
||||
export const CO_VALUE_LOADING_CONFIG = {
|
||||
MAX_RETRIES: 2,
|
||||
TIMEOUT: 30_000,
|
||||
};
|
||||
|
||||
export class CoValueUnknownState {
|
||||
type = "unknown" as const;
|
||||
}
|
||||
|
||||
export class CoValueLoadingState {
|
||||
type = "loading" as const;
|
||||
export class CoValueState {
|
||||
private peers = new Map<
|
||||
PeerID,
|
||||
ReturnType<typeof createResolvablePromise<void>>
|
||||
| { type: "unknown" | "pending" | "available" | "unavailable" }
|
||||
| {
|
||||
type: "errored";
|
||||
error: TryAddTransactionsError;
|
||||
}
|
||||
>();
|
||||
private resolveResult: (value: CoValueCore | "unavailable") => void;
|
||||
|
||||
result: Promise<CoValueCore | "unavailable">;
|
||||
core: CoValueCore | null = null;
|
||||
id: RawCoID;
|
||||
|
||||
constructor(peersIds: Iterable<PeerID>) {
|
||||
this.peers = new Map();
|
||||
|
||||
for (const peerId of peersIds) {
|
||||
this.peers.set(peerId, createResolvablePromise<void>());
|
||||
}
|
||||
|
||||
const { resolve, promise } = createResolvablePromise<
|
||||
CoValueCore | "unavailable"
|
||||
>();
|
||||
|
||||
this.result = promise;
|
||||
this.resolveResult = resolve;
|
||||
}
|
||||
|
||||
markAsUnavailable(peerId: PeerID) {
|
||||
const entry = this.peers.get(peerId);
|
||||
|
||||
if (entry) {
|
||||
entry.resolve();
|
||||
}
|
||||
|
||||
this.peers.delete(peerId);
|
||||
|
||||
// If none of the peers have the coValue, we resolve to unavailable
|
||||
if (this.peers.size === 0) {
|
||||
this.resolve("unavailable");
|
||||
}
|
||||
}
|
||||
|
||||
resolve(value: CoValueCore | "unavailable") {
|
||||
this.resolveResult(value);
|
||||
for (const entry of this.peers.values()) {
|
||||
entry.resolve();
|
||||
}
|
||||
this.peers.clear();
|
||||
}
|
||||
|
||||
// Wait for a specific peer to have a known state
|
||||
waitForPeer(peerId: PeerID) {
|
||||
const entry = this.peers.get(peerId);
|
||||
|
||||
if (!entry) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
return entry.promise;
|
||||
}
|
||||
}
|
||||
|
||||
export class CoValueAvailableState {
|
||||
type = "available" as const;
|
||||
|
||||
constructor(public coValue: CoValueCore) {}
|
||||
}
|
||||
|
||||
export class CoValueUnavailableState {
|
||||
type = "unavailable" as const;
|
||||
}
|
||||
|
||||
type CoValueStateAction =
|
||||
| {
|
||||
type: "load-requested";
|
||||
peersIds: PeerID[];
|
||||
}
|
||||
| {
|
||||
type: "not-found-in-peer";
|
||||
peerId: PeerID;
|
||||
}
|
||||
| {
|
||||
type: "available";
|
||||
coValue: CoValueCore;
|
||||
};
|
||||
|
||||
type CoValueStateType =
|
||||
| CoValueUnknownState
|
||||
| CoValueLoadingState
|
||||
| CoValueAvailableState
|
||||
| CoValueUnavailableState;
|
||||
|
||||
export class CoValueState {
|
||||
promise?: Promise<CoValueCore | "unavailable">;
|
||||
private resolve?: (value: CoValueCore | "unavailable") => void;
|
||||
private listeners: Set<(state: CoValueState) => void> = new Set();
|
||||
private counter: UpDownCounter;
|
||||
|
||||
constructor(
|
||||
public id: RawCoID,
|
||||
public state: CoValueStateType,
|
||||
) {
|
||||
constructor(id: RawCoID) {
|
||||
this.id = id;
|
||||
|
||||
this.counter = metrics
|
||||
.getMeter("cojson")
|
||||
.createUpDownCounter("jazz.covalues.loaded", {
|
||||
@@ -122,128 +38,168 @@ export class CoValueState {
|
||||
valueType: ValueType.INT,
|
||||
});
|
||||
|
||||
this.counter.add(1, {
|
||||
state: this.state.type,
|
||||
});
|
||||
this.updateCounter(null);
|
||||
}
|
||||
|
||||
static Unknown(id: RawCoID) {
|
||||
return new CoValueState(id, new CoValueUnknownState());
|
||||
get highLevelState() {
|
||||
if (this.core) {
|
||||
return "available";
|
||||
} else if (this.peers.size === 0) {
|
||||
return "unknown";
|
||||
}
|
||||
|
||||
for (const peer of this.peers.values()) {
|
||||
if (peer.type === "pending") {
|
||||
return "loading";
|
||||
} else if (peer.type === "unknown") {
|
||||
return "unknown";
|
||||
}
|
||||
}
|
||||
|
||||
return "unavailable";
|
||||
}
|
||||
|
||||
static Loading(id: RawCoID, peersIds: Iterable<PeerID>) {
|
||||
return new CoValueState(id, new CoValueLoadingState(peersIds));
|
||||
isErroredInPeer(peerId: PeerID) {
|
||||
return this.peers.get(peerId)?.type === "errored";
|
||||
}
|
||||
|
||||
static Available(coValue: CoValueCore) {
|
||||
return new CoValueState(coValue.id, new CoValueAvailableState(coValue));
|
||||
isAvailable(): this is { type: "available"; core: CoValueCore } {
|
||||
return !!this.core;
|
||||
}
|
||||
|
||||
static Unavailable(id: RawCoID) {
|
||||
return new CoValueState(id, new CoValueUnavailableState());
|
||||
addListener(listener: (state: CoValueState) => void) {
|
||||
this.listeners.add(listener);
|
||||
listener(this);
|
||||
}
|
||||
|
||||
removeListener(listener: (state: CoValueState) => void) {
|
||||
this.listeners.delete(listener);
|
||||
}
|
||||
|
||||
private notifyListeners() {
|
||||
for (const listener of this.listeners) {
|
||||
listener(this);
|
||||
}
|
||||
}
|
||||
|
||||
async getCoValue() {
|
||||
if (this.state.type === "available") {
|
||||
return this.state.coValue;
|
||||
}
|
||||
if (this.state.type === "unavailable") {
|
||||
if (this.highLevelState === "unavailable") {
|
||||
return "unavailable";
|
||||
}
|
||||
|
||||
// If we don't have a resolved state we return a new promise
|
||||
// that will be resolved when the state will move to available or unavailable
|
||||
if (!this.promise) {
|
||||
const { promise, resolve } = createResolvablePromise<
|
||||
CoValueCore | "unavailable"
|
||||
>();
|
||||
return new Promise<CoValueCore>((resolve) => {
|
||||
const listener = (state: CoValueState) => {
|
||||
if (state.core) {
|
||||
resolve(state.core);
|
||||
this.removeListener(listener);
|
||||
}
|
||||
};
|
||||
|
||||
this.promise = promise;
|
||||
this.resolve = resolve;
|
||||
}
|
||||
|
||||
return this.promise;
|
||||
}
|
||||
|
||||
private moveToState(value: CoValueStateType) {
|
||||
this.counter.add(-1, {
|
||||
state: this.state.type,
|
||||
this.addListener(listener);
|
||||
});
|
||||
this.state = value;
|
||||
|
||||
this.counter.add(1, {
|
||||
state: this.state.type,
|
||||
});
|
||||
|
||||
if (!this.resolve) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the state is available we resolve the promise
|
||||
// and clear it to handle the possible transition from unavailable to available
|
||||
if (value.type === "available") {
|
||||
this.resolve(value.coValue);
|
||||
this.clearPromise();
|
||||
} else if (value.type === "unavailable") {
|
||||
this.resolve("unavailable");
|
||||
this.clearPromise();
|
||||
}
|
||||
}
|
||||
|
||||
private clearPromise() {
|
||||
this.promise = undefined;
|
||||
this.resolve = undefined;
|
||||
}
|
||||
|
||||
async loadFromPeers(peers: PeerState[]) {
|
||||
const state = this.state;
|
||||
|
||||
if (state.type === "loading" || state.type === "available") {
|
||||
return;
|
||||
}
|
||||
|
||||
if (peers.length === 0) {
|
||||
this.moveToState(new CoValueUnavailableState());
|
||||
return;
|
||||
}
|
||||
|
||||
const doLoad = async (peersToLoadFrom: PeerState[]) => {
|
||||
const peersWithoutErrors = getPeersWithoutErrors(
|
||||
peersToLoadFrom,
|
||||
this.id,
|
||||
);
|
||||
const loadAttempt = async (peersToLoadFrom: PeerState[]) => {
|
||||
const peersToActuallyLoadFrom = [];
|
||||
for (const peer of peersToLoadFrom) {
|
||||
const currentState = this.peers.get(peer.id);
|
||||
|
||||
// If we are in the loading state we move to a new loading state
|
||||
// to reset all the loading promises
|
||||
if (
|
||||
this.state.type === "loading" ||
|
||||
this.state.type === "unknown" ||
|
||||
this.state.type === "unavailable"
|
||||
) {
|
||||
this.moveToState(
|
||||
new CoValueLoadingState(peersWithoutErrors.map((p) => p.id)),
|
||||
);
|
||||
if (currentState?.type === "available") {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (currentState?.type === "errored") {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (
|
||||
currentState?.type === "unavailable" ||
|
||||
currentState?.type === "pending"
|
||||
) {
|
||||
if (peer.shouldRetryUnavailableCoValues()) {
|
||||
this.markPending(peer.id);
|
||||
peersToActuallyLoadFrom.push(peer);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!currentState || currentState?.type === "unknown") {
|
||||
this.markPending(peer.id);
|
||||
peersToActuallyLoadFrom.push(peer);
|
||||
}
|
||||
}
|
||||
|
||||
// Assign the current state to a variable to not depend on the state changes
|
||||
// that may happen while we wait for loadCoValueFromPeers to complete
|
||||
const currentState = this.state;
|
||||
for (const peer of peersToActuallyLoadFrom) {
|
||||
if (peer.closed) {
|
||||
this.markNotFoundInPeer(peer.id);
|
||||
continue;
|
||||
}
|
||||
|
||||
// If we entered successfully the loading state, we load the coValue from the peers
|
||||
//
|
||||
// We may not enter the loading state if the coValue has become available in between
|
||||
// of the retries
|
||||
if (currentState.type === "loading") {
|
||||
await loadCoValueFromPeers(this, peersWithoutErrors);
|
||||
peer
|
||||
.pushOutgoingMessage({
|
||||
action: "load",
|
||||
...(this.core ? this.core.knownState() : emptyKnownState(this.id)),
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.warn(`Failed to push load message to peer ${peer.id}`, {
|
||||
err,
|
||||
});
|
||||
});
|
||||
|
||||
const result = await currentState.result;
|
||||
return result !== "unavailable";
|
||||
/**
|
||||
* Use a very long timeout for storage peers, because under pressure
|
||||
* they may take a long time to consume the messages queue
|
||||
*
|
||||
* TODO: Track errors on storage and do not rely on timeout
|
||||
*/
|
||||
const timeoutDuration =
|
||||
peer.role === "storage"
|
||||
? CO_VALUE_LOADING_CONFIG.TIMEOUT * 10
|
||||
: CO_VALUE_LOADING_CONFIG.TIMEOUT;
|
||||
|
||||
const waitingForPeer = new Promise<void>((resolve) => {
|
||||
const markNotFound = () => {
|
||||
if (this.peers.get(peer.id)?.type === "pending") {
|
||||
this.markNotFoundInPeer(peer.id);
|
||||
}
|
||||
};
|
||||
|
||||
const timeout = setTimeout(markNotFound, timeoutDuration);
|
||||
const removeCloseListener = peer.addCloseListener(markNotFound);
|
||||
|
||||
const listener = (state: CoValueState) => {
|
||||
const peerState = state.peers.get(peer.id);
|
||||
if (
|
||||
state.isAvailable() || // might have become available from another peer e.g. through handleNewContent
|
||||
peerState?.type === "available" ||
|
||||
peerState?.type === "errored" ||
|
||||
peerState?.type === "unavailable"
|
||||
) {
|
||||
state.removeListener(listener);
|
||||
removeCloseListener();
|
||||
clearTimeout(timeout);
|
||||
resolve();
|
||||
}
|
||||
};
|
||||
|
||||
this.addListener(listener);
|
||||
});
|
||||
|
||||
await waitingForPeer;
|
||||
}
|
||||
|
||||
return currentState.type === "available";
|
||||
};
|
||||
|
||||
await doLoad(peers);
|
||||
await loadAttempt(peers);
|
||||
|
||||
if (this.isAvailable()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Retry loading from peers that have the retry flag enabled
|
||||
const peersWithRetry = peers.filter((p) =>
|
||||
@@ -251,129 +207,74 @@ export class CoValueState {
|
||||
);
|
||||
|
||||
if (peersWithRetry.length > 0) {
|
||||
const waitingForCoValue = new Promise<void>((resolve) => {
|
||||
const listener = (state: CoValueState) => {
|
||||
if (state.isAvailable()) {
|
||||
resolve();
|
||||
this.removeListener(listener);
|
||||
}
|
||||
};
|
||||
|
||||
this.addListener(listener);
|
||||
});
|
||||
|
||||
// We want to exit early if the coValue becomes available in between the retries
|
||||
await Promise.race([
|
||||
this.getCoValue(),
|
||||
waitingForCoValue,
|
||||
runWithRetry(
|
||||
() => doLoad(peersWithRetry),
|
||||
() => loadAttempt(peersWithRetry),
|
||||
CO_VALUE_LOADING_CONFIG.MAX_RETRIES,
|
||||
),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
// If after the retries the coValue is still loading, we consider the load failed
|
||||
if (this.state.type === "loading") {
|
||||
this.moveToState(new CoValueUnavailableState());
|
||||
private updateCounter(previousState: string | null) {
|
||||
const newState = this.highLevelState;
|
||||
|
||||
if (previousState !== newState) {
|
||||
if (previousState) {
|
||||
this.counter.add(-1, { state: previousState });
|
||||
}
|
||||
this.counter.add(1, { state: newState });
|
||||
}
|
||||
}
|
||||
|
||||
dispatch(action: CoValueStateAction) {
|
||||
const currentState = this.state;
|
||||
|
||||
switch (action.type) {
|
||||
case "available":
|
||||
if (currentState.type === "loading") {
|
||||
currentState.resolve(action.coValue);
|
||||
}
|
||||
|
||||
// It should be always possible to move to the available state
|
||||
this.moveToState(new CoValueAvailableState(action.coValue));
|
||||
|
||||
break;
|
||||
case "not-found-in-peer":
|
||||
if (currentState.type === "loading") {
|
||||
currentState.markAsUnavailable(action.peerId);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
markNotFoundInPeer(peerId: PeerID) {
|
||||
const previousState = this.highLevelState;
|
||||
this.peers.set(peerId, { type: "unavailable" });
|
||||
this.updateCounter(previousState);
|
||||
this.notifyListeners();
|
||||
}
|
||||
}
|
||||
|
||||
async function loadCoValueFromPeers(
|
||||
coValueEntry: CoValueState,
|
||||
peers: PeerState[],
|
||||
) {
|
||||
for (const peer of peers) {
|
||||
if (peer.closed) {
|
||||
coValueEntry.dispatch({
|
||||
type: "not-found-in-peer",
|
||||
peerId: peer.id,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
// TODO: rename to "provided"
|
||||
markAvailable(coValue: CoValueCore, fromPeerId: PeerID) {
|
||||
const previousState = this.highLevelState;
|
||||
this.core = coValue;
|
||||
this.peers.set(fromPeerId, { type: "available" });
|
||||
this.updateCounter(previousState);
|
||||
this.notifyListeners();
|
||||
}
|
||||
|
||||
if (coValueEntry.state.type === "available") {
|
||||
/**
|
||||
* We don't need to wait for the message to be delivered here.
|
||||
*
|
||||
* This way when the coValue becomes available because it's cached we don't wait for the server
|
||||
* peer to consume the messages queue before moving forward.
|
||||
*/
|
||||
peer
|
||||
.pushOutgoingMessage({
|
||||
action: "load",
|
||||
...coValueEntry.state.coValue.knownState(),
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.warn(`Failed to push load message to peer ${peer.id}`, {
|
||||
err,
|
||||
});
|
||||
});
|
||||
} else {
|
||||
/**
|
||||
* We only wait for the load state to be resolved.
|
||||
*/
|
||||
peer
|
||||
.pushOutgoingMessage({
|
||||
action: "load",
|
||||
id: coValueEntry.id,
|
||||
header: false,
|
||||
sessions: {},
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.warn(`Failed to push load message to peer ${peer.id}`, {
|
||||
err,
|
||||
});
|
||||
});
|
||||
}
|
||||
internalMarkMagicallyAvailable(coValue: CoValueCore) {
|
||||
const previousState = this.highLevelState;
|
||||
this.core = coValue;
|
||||
this.updateCounter(previousState);
|
||||
this.notifyListeners();
|
||||
}
|
||||
|
||||
if (coValueEntry.state.type === "loading") {
|
||||
const { promise, resolve } = createResolvablePromise<void>();
|
||||
markErrored(peerId: PeerID, error: TryAddTransactionsError) {
|
||||
const previousState = this.highLevelState;
|
||||
this.peers.set(peerId, { type: "errored", error });
|
||||
this.updateCounter(previousState);
|
||||
this.notifyListeners();
|
||||
}
|
||||
|
||||
/**
|
||||
* Use a very long timeout for storage peers, because under pressure
|
||||
* they may take a long time to consume the messages queue
|
||||
*
|
||||
* TODO: Track errors on storage and do not rely on timeout
|
||||
*/
|
||||
const timeoutDuration =
|
||||
peer.role === "storage"
|
||||
? CO_VALUE_LOADING_CONFIG.TIMEOUT * 10
|
||||
: CO_VALUE_LOADING_CONFIG.TIMEOUT;
|
||||
|
||||
const handleTimeoutOrClose = () => {
|
||||
if (coValueEntry.state.type === "loading") {
|
||||
logger.warn("Failed to load coValue from peer", {
|
||||
coValueId: coValueEntry.id,
|
||||
peerId: peer.id,
|
||||
peerRole: peer.role,
|
||||
});
|
||||
coValueEntry.dispatch({
|
||||
type: "not-found-in-peer",
|
||||
peerId: peer.id,
|
||||
});
|
||||
resolve();
|
||||
}
|
||||
};
|
||||
|
||||
const timeout = setTimeout(handleTimeoutOrClose, timeoutDuration);
|
||||
const closeListener = peer.addCloseListener(handleTimeoutOrClose);
|
||||
|
||||
await Promise.race([promise, coValueEntry.state.waitForPeer(peer.id)]);
|
||||
clearTimeout(timeout);
|
||||
closeListener();
|
||||
}
|
||||
private markPending(peerId: PeerID) {
|
||||
const previousState = this.highLevelState;
|
||||
this.peers.set(peerId, { type: "pending" });
|
||||
this.updateCounter(previousState);
|
||||
this.notifyListeners();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -400,29 +301,6 @@ async function runWithRetry<T>(fn: () => Promise<T>, maxRetries: number) {
|
||||
}
|
||||
}
|
||||
|
||||
function createResolvablePromise<T>() {
|
||||
let resolve!: (value: T) => void;
|
||||
|
||||
const promise = new Promise<T>((res) => {
|
||||
resolve = res;
|
||||
});
|
||||
|
||||
return { promise, resolve };
|
||||
}
|
||||
|
||||
function sleep(ms: number) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
function getPeersWithoutErrors(peers: PeerState[], coValueId: RawCoID) {
|
||||
return peers.filter((p) => {
|
||||
if (p.erroredCoValues.has(coValueId)) {
|
||||
logger.warn(
|
||||
`Skipping load on errored coValue ${coValueId} from peer ${p.id}`,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -186,8 +186,8 @@ export class RawGroup<
|
||||
const child = store.get(id);
|
||||
|
||||
if (
|
||||
child.state.type === "unknown" ||
|
||||
child.state.type === "unavailable"
|
||||
child.highLevelState === "unknown" ||
|
||||
child.highLevelState === "unavailable"
|
||||
) {
|
||||
child.loadFromPeers(peers).catch(() => {
|
||||
logger.error(`Failed to load child group ${id}`);
|
||||
|
||||
@@ -79,8 +79,6 @@ type Value = JsonValue | AnyRawCoValue;
|
||||
import { CO_VALUE_LOADING_CONFIG } from "./coValueState.js";
|
||||
import { logger } from "./logger.js";
|
||||
import { getPriorityFromHeader } from "./priority.js";
|
||||
import { FileSystem } from "./storage/FileSystem.js";
|
||||
import { BlockFilename, LSMStorage, WalFilename } from "./storage/index.js";
|
||||
|
||||
/** @hidden */
|
||||
export const cojsonInternals = {
|
||||
@@ -143,7 +141,6 @@ export {
|
||||
CryptoProvider,
|
||||
SyncMessage,
|
||||
isRawCoID,
|
||||
LSMStorage,
|
||||
emptyKnownState,
|
||||
RawCoPlainText,
|
||||
stringifyOpID,
|
||||
@@ -154,9 +151,6 @@ export {
|
||||
|
||||
export type {
|
||||
Value,
|
||||
FileSystem,
|
||||
BlockFilename,
|
||||
WalFilename,
|
||||
IncomingSyncStream,
|
||||
OutgoingSyncQueue,
|
||||
DisconnectedError,
|
||||
|
||||
@@ -126,7 +126,7 @@ export class LocalNode {
|
||||
);
|
||||
|
||||
nodeWithAccount.account = controlledAccount;
|
||||
nodeWithAccount.coValuesStore.setAsAvailable(
|
||||
nodeWithAccount.coValuesStore.internalMarkMagicallyAvailable(
|
||||
controlledAccount.id,
|
||||
controlledAccount.core,
|
||||
);
|
||||
@@ -139,10 +139,8 @@ export class LocalNode {
|
||||
// we shouldn't need this, but it fixes account data not syncing for new accounts
|
||||
function syncAllCoValuesAfterCreateAccount() {
|
||||
for (const coValueEntry of nodeWithAccount.coValuesStore.getValues()) {
|
||||
if (coValueEntry.state.type === "available") {
|
||||
void nodeWithAccount.syncManager.syncCoValue(
|
||||
coValueEntry.state.coValue,
|
||||
);
|
||||
if (coValueEntry.isAvailable()) {
|
||||
void nodeWithAccount.syncManager.syncCoValue(coValueEntry.core);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -208,7 +206,10 @@ export class LocalNode {
|
||||
node.syncManager.local = node;
|
||||
|
||||
controlledAccount.core.node = node;
|
||||
node.coValuesStore.setAsAvailable(accountID, controlledAccount.core);
|
||||
node.coValuesStore.internalMarkMagicallyAvailable(
|
||||
accountID,
|
||||
controlledAccount.core,
|
||||
);
|
||||
controlledAccount.core._cachedContent = undefined;
|
||||
|
||||
const profileID = account.get("profile");
|
||||
@@ -245,7 +246,7 @@ export class LocalNode {
|
||||
}
|
||||
|
||||
const coValue = new CoValueCore(header, this);
|
||||
this.coValuesStore.setAsAvailable(coValue.id, coValue);
|
||||
this.coValuesStore.internalMarkMagicallyAvailable(coValue.id, coValue);
|
||||
|
||||
void this.syncManager.syncCoValue(coValue);
|
||||
|
||||
@@ -265,10 +266,17 @@ export class LocalNode {
|
||||
|
||||
const entry = this.coValuesStore.get(id);
|
||||
|
||||
if (entry.state.type === "unknown" || entry.state.type === "unavailable") {
|
||||
if (
|
||||
entry.highLevelState === "unknown" ||
|
||||
entry.highLevelState === "unavailable"
|
||||
) {
|
||||
const peers =
|
||||
this.syncManager.getServerAndStoragePeers(skipLoadingFromPeer);
|
||||
|
||||
if (peers.length === 0) {
|
||||
return "unavailable";
|
||||
}
|
||||
|
||||
await entry.loadFromPeers(peers).catch((e) => {
|
||||
logger.error("Error loading from peers", {
|
||||
id,
|
||||
@@ -309,8 +317,8 @@ export class LocalNode {
|
||||
getLoaded<T extends RawCoValue>(id: CoID<T>): T | undefined {
|
||||
const entry = this.coValuesStore.get(id);
|
||||
|
||||
if (entry.state.type === "available") {
|
||||
return entry.state.coValue.getCurrentContent() as T;
|
||||
if (entry.isAvailable()) {
|
||||
return entry.core.getCurrentContent() as T;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
@@ -439,12 +447,12 @@ export class LocalNode {
|
||||
expectCoValueLoaded(id: RawCoID, expectation?: string): CoValueCore {
|
||||
const entry = this.coValuesStore.get(id);
|
||||
|
||||
if (entry.state.type !== "available") {
|
||||
if (!entry.isAvailable()) {
|
||||
throw new Error(
|
||||
`${expectation ? expectation + ": " : ""}CoValue ${id} not yet loaded. Current state: ${entry.state.type}`,
|
||||
`${expectation ? expectation + ": " : ""}CoValue ${id} not yet loaded. Current state: ${JSON.stringify(entry)}`,
|
||||
);
|
||||
}
|
||||
return entry.state.coValue;
|
||||
return entry.core;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
@@ -638,15 +646,13 @@ export class LocalNode {
|
||||
while (coValuesToCopy.length > 0) {
|
||||
const [coValueID, entry] = coValuesToCopy[coValuesToCopy.length - 1]!;
|
||||
|
||||
if (entry.state.type !== "available") {
|
||||
if (!entry.isAvailable()) {
|
||||
coValuesToCopy.pop();
|
||||
continue;
|
||||
} else {
|
||||
const allDepsCopied = entry.state.coValue
|
||||
const allDepsCopied = entry.core
|
||||
.getDependedOnCoValues()
|
||||
.every(
|
||||
(dep) => newNode.coValuesStore.get(dep).state.type === "available",
|
||||
);
|
||||
.every((dep) => newNode.coValuesStore.get(dep).isAvailable());
|
||||
|
||||
if (!allDepsCopied) {
|
||||
// move to end of queue
|
||||
@@ -655,12 +661,15 @@ export class LocalNode {
|
||||
}
|
||||
|
||||
const newCoValue = new CoValueCore(
|
||||
entry.state.coValue.header,
|
||||
entry.core.header,
|
||||
newNode,
|
||||
new Map(entry.state.coValue.sessionLogs),
|
||||
new Map(entry.core.sessionLogs),
|
||||
);
|
||||
|
||||
newNode.coValuesStore.setAsAvailable(coValueID, newCoValue);
|
||||
newNode.coValuesStore.internalMarkMagicallyAvailable(
|
||||
coValueID,
|
||||
newCoValue,
|
||||
);
|
||||
|
||||
coValuesToCopy.pop();
|
||||
}
|
||||
|
||||
@@ -1,113 +0,0 @@
|
||||
import { CryptoProvider, StreamingHash } from "../crypto/crypto.js";
|
||||
import { RawCoID } from "../ids.js";
|
||||
import { CoValueChunk } from "./index.js";
|
||||
|
||||
export type BlockFilename = `L${number}-${string}-${string}-H${number}.jsonl`;
|
||||
|
||||
export type BlockHeader = { id: RawCoID; start: number; length: number }[];
|
||||
|
||||
export type WalEntry = { id: RawCoID } & CoValueChunk;
|
||||
|
||||
export type WalFilename = `wal-${number}.jsonl`;
|
||||
|
||||
export interface FileSystem<WriteHandle, ReadHandle> {
|
||||
crypto: CryptoProvider;
|
||||
createFile(filename: string): Promise<WriteHandle>;
|
||||
append(handle: WriteHandle, data: Uint8Array): Promise<void>;
|
||||
close(handle: ReadHandle | WriteHandle): Promise<void>;
|
||||
closeAndRename(handle: WriteHandle, filename: BlockFilename): Promise<void>;
|
||||
openToRead(filename: string): Promise<{ handle: ReadHandle; size: number }>;
|
||||
read(handle: ReadHandle, offset: number, length: number): Promise<Uint8Array>;
|
||||
listFiles(): Promise<string[]>;
|
||||
removeFile(filename: BlockFilename | WalFilename): Promise<void>;
|
||||
}
|
||||
|
||||
export const textEncoder = new TextEncoder();
|
||||
export const textDecoder = new TextDecoder();
|
||||
|
||||
export async function readChunk<RH, FS extends FileSystem<unknown, RH>>(
|
||||
handle: RH,
|
||||
header: { start: number; length: number },
|
||||
fs: FS,
|
||||
): Promise<CoValueChunk> {
|
||||
const chunkBytes = await fs.read(handle, header.start, header.length);
|
||||
|
||||
const chunk = JSON.parse(textDecoder.decode(chunkBytes));
|
||||
return chunk;
|
||||
}
|
||||
|
||||
export async function readHeader<RH, FS extends FileSystem<unknown, RH>>(
|
||||
filename: string,
|
||||
handle: RH,
|
||||
size: number,
|
||||
fs: FS,
|
||||
): Promise<BlockHeader> {
|
||||
const headerLength = Number(filename.match(/-H(\d+)\.jsonl$/)![1]!);
|
||||
|
||||
const headerBytes = await fs.read(handle, size - headerLength, headerLength);
|
||||
|
||||
const header = JSON.parse(textDecoder.decode(headerBytes));
|
||||
return header;
|
||||
}
|
||||
|
||||
export async function writeBlock<WH, RH, FS extends FileSystem<WH, RH>>(
|
||||
chunks: Map<RawCoID, CoValueChunk>,
|
||||
level: number,
|
||||
blockNumber: number,
|
||||
fs: FS,
|
||||
): Promise<BlockFilename> {
|
||||
if (chunks.size === 0) {
|
||||
throw new Error("No chunks to write");
|
||||
}
|
||||
|
||||
const blockHeader: BlockHeader = [];
|
||||
|
||||
let offset = 0;
|
||||
|
||||
const file = await fs.createFile(
|
||||
"wipBlock" + Math.random().toString(36).substring(7) + ".tmp.jsonl",
|
||||
);
|
||||
const hash = new StreamingHash(fs.crypto);
|
||||
|
||||
const chunksSortedById = Array.from(chunks).sort(([id1], [id2]) =>
|
||||
id1.localeCompare(id2),
|
||||
);
|
||||
|
||||
for (const [id, chunk] of chunksSortedById) {
|
||||
const encodedBytes = hash.update(chunk);
|
||||
const encodedBytesWithNewline = new Uint8Array(encodedBytes.length + 1);
|
||||
encodedBytesWithNewline.set(encodedBytes);
|
||||
encodedBytesWithNewline[encodedBytes.length] = 10;
|
||||
await fs.append(file, encodedBytesWithNewline);
|
||||
const length = encodedBytesWithNewline.length;
|
||||
blockHeader.push({ id, start: offset, length });
|
||||
offset += length;
|
||||
}
|
||||
|
||||
const headerBytes = textEncoder.encode(JSON.stringify(blockHeader));
|
||||
await fs.append(file, headerBytes);
|
||||
|
||||
const filename: BlockFilename = `L${level}-${(blockNumber + "").padStart(
|
||||
3,
|
||||
"0",
|
||||
)}-${hash.digest().replace("hash_", "").slice(0, 15)}-H${
|
||||
headerBytes.length
|
||||
}.jsonl`;
|
||||
await fs.closeAndRename(file, filename);
|
||||
|
||||
return filename;
|
||||
}
|
||||
|
||||
export async function writeToWal<WH, RH, FS extends FileSystem<WH, RH>>(
|
||||
handle: WH,
|
||||
fs: FS,
|
||||
id: RawCoID,
|
||||
chunk: CoValueChunk,
|
||||
) {
|
||||
const walEntry: WalEntry = {
|
||||
id,
|
||||
...chunk,
|
||||
};
|
||||
const bytes = textEncoder.encode(JSON.stringify(walEntry) + "\n");
|
||||
return fs.append(handle, bytes);
|
||||
}
|
||||
@@ -1,137 +0,0 @@
|
||||
import { MAX_RECOMMENDED_TX_SIZE } from "../coValueCore.js";
|
||||
import { RawCoID, SessionID } from "../ids.js";
|
||||
import { getPriorityFromHeader } from "../priority.js";
|
||||
import { CoValueKnownState, NewContentMessage } from "../sync.js";
|
||||
import { CoValueChunk } from "./index.js";
|
||||
|
||||
export function contentSinceChunk(
|
||||
id: RawCoID,
|
||||
chunk: CoValueChunk,
|
||||
known?: CoValueKnownState,
|
||||
): NewContentMessage[] {
|
||||
const newContentPieces: NewContentMessage[] = [];
|
||||
|
||||
newContentPieces.push({
|
||||
id: id,
|
||||
action: "content",
|
||||
header: known?.header ? undefined : chunk.header,
|
||||
new: {},
|
||||
priority: getPriorityFromHeader(chunk.header),
|
||||
});
|
||||
|
||||
for (const [sessionID, sessionsEntry] of Object.entries(
|
||||
chunk.sessionEntries,
|
||||
)) {
|
||||
for (const entry of sessionsEntry) {
|
||||
const knownStart = known?.sessions[sessionID as SessionID] || 0;
|
||||
|
||||
if (entry.after + entry.transactions.length <= knownStart) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const actuallyNewTransactions = entry.transactions.slice(
|
||||
Math.max(0, knownStart - entry.after),
|
||||
);
|
||||
|
||||
const newAfter =
|
||||
entry.after +
|
||||
(actuallyNewTransactions.length - entry.transactions.length);
|
||||
|
||||
let newContentEntry = newContentPieces[0]?.new[sessionID as SessionID];
|
||||
|
||||
if (!newContentEntry) {
|
||||
newContentEntry = {
|
||||
after: newAfter,
|
||||
lastSignature: entry.lastSignature,
|
||||
newTransactions: actuallyNewTransactions,
|
||||
};
|
||||
newContentPieces[0]!.new[sessionID as SessionID] = newContentEntry;
|
||||
} else {
|
||||
newContentEntry.newTransactions.push(...actuallyNewTransactions);
|
||||
newContentEntry.lastSignature = entry.lastSignature;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return newContentPieces;
|
||||
}
|
||||
|
||||
export function chunkToKnownState(id: RawCoID, chunk: CoValueChunk) {
|
||||
const ourKnown: CoValueKnownState = {
|
||||
id,
|
||||
header: !!chunk.header,
|
||||
sessions: {},
|
||||
};
|
||||
|
||||
for (const [sessionID, sessionEntries] of Object.entries(
|
||||
chunk.sessionEntries,
|
||||
)) {
|
||||
for (const entry of sessionEntries) {
|
||||
ourKnown.sessions[sessionID as SessionID] =
|
||||
entry.after + entry.transactions.length;
|
||||
}
|
||||
}
|
||||
return ourKnown;
|
||||
}
|
||||
|
||||
export function mergeChunks(
|
||||
chunkA: CoValueChunk,
|
||||
chunkB: CoValueChunk,
|
||||
): "nonContigous" | CoValueChunk {
|
||||
const header = chunkA.header || chunkB.header;
|
||||
|
||||
const newSessions = { ...chunkA.sessionEntries };
|
||||
for (const sessionID in chunkB.sessionEntries) {
|
||||
// figure out if we can merge the chunks
|
||||
const sessionEntriesA = chunkA.sessionEntries[sessionID];
|
||||
const sessionEntriesB = chunkB.sessionEntries[sessionID]!;
|
||||
|
||||
if (!sessionEntriesA) {
|
||||
newSessions[sessionID] = sessionEntriesB;
|
||||
continue;
|
||||
}
|
||||
|
||||
const lastEntryOfA = sessionEntriesA[sessionEntriesA.length - 1]!;
|
||||
const firstEntryOfB = sessionEntriesB[0]!;
|
||||
|
||||
if (
|
||||
lastEntryOfA.after + lastEntryOfA.transactions.length ===
|
||||
firstEntryOfB.after
|
||||
) {
|
||||
const newEntries = [];
|
||||
let bytesSinceLastSignature = 0;
|
||||
for (const entry of sessionEntriesA.concat(sessionEntriesB)) {
|
||||
const entryByteLength = entry.transactions.reduce(
|
||||
(sum, tx) =>
|
||||
sum +
|
||||
(tx.privacy === "private"
|
||||
? tx.encryptedChanges.length
|
||||
: tx.changes.length),
|
||||
0,
|
||||
);
|
||||
if (
|
||||
newEntries.length === 0 ||
|
||||
bytesSinceLastSignature + entryByteLength > MAX_RECOMMENDED_TX_SIZE
|
||||
) {
|
||||
newEntries.push({
|
||||
after: entry.after,
|
||||
lastSignature: entry.lastSignature,
|
||||
transactions: entry.transactions,
|
||||
});
|
||||
bytesSinceLastSignature = 0;
|
||||
} else {
|
||||
const lastNewEntry = newEntries[newEntries.length - 1]!;
|
||||
lastNewEntry.transactions.push(...entry.transactions);
|
||||
lastNewEntry.lastSignature = entry.lastSignature;
|
||||
|
||||
bytesSinceLastSignature += entry.transactions.length;
|
||||
}
|
||||
}
|
||||
newSessions[sessionID] = newEntries;
|
||||
} else {
|
||||
return "nonContigous" as const;
|
||||
}
|
||||
}
|
||||
|
||||
return { header, sessionEntries: newSessions };
|
||||
}
|
||||
@@ -1,531 +0,0 @@
|
||||
import { CoID, RawCoValue } from "../coValue.js";
|
||||
import { CoValueHeader, Transaction } from "../coValueCore.js";
|
||||
import { Signature } from "../crypto/crypto.js";
|
||||
import { RawCoID } from "../ids.js";
|
||||
import { logger } from "../logger.js";
|
||||
import { connectedPeers } from "../streamUtils.js";
|
||||
import {
|
||||
CoValueKnownState,
|
||||
IncomingSyncStream,
|
||||
NewContentMessage,
|
||||
OutgoingSyncQueue,
|
||||
Peer,
|
||||
} from "../sync.js";
|
||||
import {
|
||||
BlockFilename,
|
||||
FileSystem,
|
||||
WalEntry,
|
||||
WalFilename,
|
||||
readChunk,
|
||||
readHeader,
|
||||
textDecoder,
|
||||
writeBlock,
|
||||
writeToWal,
|
||||
} from "./FileSystem.js";
|
||||
import {
|
||||
chunkToKnownState,
|
||||
contentSinceChunk,
|
||||
mergeChunks,
|
||||
} from "./chunksAndKnownStates.js";
|
||||
export type { BlockFilename, WalFilename } from "./FileSystem.js";
|
||||
|
||||
const MAX_N_LEVELS = 3;
|
||||
|
||||
export type CoValueChunk = {
|
||||
header?: CoValueHeader;
|
||||
sessionEntries: {
|
||||
[sessionID: string]: {
|
||||
after: number;
|
||||
lastSignature: Signature;
|
||||
transactions: Transaction[];
|
||||
}[];
|
||||
};
|
||||
};
|
||||
|
||||
export class LSMStorage<WH, RH, FS extends FileSystem<WH, RH>> {
|
||||
currentWal: WH | undefined;
|
||||
coValues: {
|
||||
[id: RawCoID]: CoValueChunk | undefined;
|
||||
};
|
||||
fileCache: string[] | undefined;
|
||||
headerCache = new Map<
|
||||
BlockFilename,
|
||||
{ [id: RawCoID]: { start: number; length: number } }
|
||||
>();
|
||||
blockFileHandles = new Map<
|
||||
BlockFilename,
|
||||
Promise<{ handle: RH; size: number }>
|
||||
>();
|
||||
|
||||
constructor(
|
||||
public fs: FS,
|
||||
public fromLocalNode: IncomingSyncStream,
|
||||
public toLocalNode: OutgoingSyncQueue,
|
||||
) {
|
||||
this.coValues = {};
|
||||
this.currentWal = undefined;
|
||||
|
||||
let nMsg = 0;
|
||||
|
||||
const processMessages = async () => {
|
||||
for await (const msg of fromLocalNode) {
|
||||
try {
|
||||
if (msg === "Disconnected" || msg === "PingTimeout") {
|
||||
throw new Error("Unexpected Disconnected message");
|
||||
}
|
||||
if (msg.action === "done") {
|
||||
return;
|
||||
}
|
||||
|
||||
if (msg.action === "content") {
|
||||
await this.handleNewContent(msg);
|
||||
} else if (msg.action === "load" || msg.action === "known") {
|
||||
await this.sendNewContent(msg.id, msg, undefined);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error(`Error reading from localNode, handling msg`, {
|
||||
msg,
|
||||
err: e,
|
||||
});
|
||||
}
|
||||
nMsg++;
|
||||
}
|
||||
};
|
||||
|
||||
processMessages().catch((e) =>
|
||||
logger.error("Error in processMessages in storage", { err: e }),
|
||||
);
|
||||
|
||||
setTimeout(
|
||||
() =>
|
||||
this.compact().catch((e) => {
|
||||
logger.error("Error while compacting", { err: e });
|
||||
}),
|
||||
20000,
|
||||
);
|
||||
}
|
||||
|
||||
async sendNewContent(
|
||||
id: RawCoID,
|
||||
known: CoValueKnownState | undefined,
|
||||
asDependencyOf: RawCoID | undefined,
|
||||
) {
|
||||
let coValue = this.coValues[id];
|
||||
|
||||
if (!coValue) {
|
||||
coValue = await this.loadCoValue(id, this.fs);
|
||||
}
|
||||
|
||||
if (!coValue) {
|
||||
this.toLocalNode
|
||||
.push({
|
||||
id: id,
|
||||
action: "known",
|
||||
header: false,
|
||||
sessions: {},
|
||||
asDependencyOf,
|
||||
})
|
||||
.catch((e) => logger.error("Error while pushing known", { err: e }));
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (!known?.header && coValue.header?.ruleset.type === "ownedByGroup") {
|
||||
await this.sendNewContent(
|
||||
coValue.header.ruleset.group,
|
||||
undefined,
|
||||
asDependencyOf || id,
|
||||
);
|
||||
} else if (!known?.header && coValue.header?.ruleset.type === "group") {
|
||||
const dependedOnAccountsAndGroups = new Set();
|
||||
for (const session of Object.values(coValue.sessionEntries)) {
|
||||
for (const entry of session) {
|
||||
for (const tx of entry.transactions) {
|
||||
if (tx.privacy === "trusting") {
|
||||
const parsedChanges = JSON.parse(tx.changes);
|
||||
for (const change of parsedChanges) {
|
||||
if (change.op === "set" && change.key.startsWith("co_")) {
|
||||
dependedOnAccountsAndGroups.add(change.key);
|
||||
}
|
||||
if (
|
||||
change.op === "set" &&
|
||||
change.key.startsWith("parent_co_")
|
||||
) {
|
||||
dependedOnAccountsAndGroups.add(
|
||||
change.key.replace("parent_", ""),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const accountOrGroup of dependedOnAccountsAndGroups) {
|
||||
await this.sendNewContent(
|
||||
accountOrGroup as CoID<RawCoValue>,
|
||||
undefined,
|
||||
asDependencyOf || id,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const newContentMessages = contentSinceChunk(id, coValue, known).map(
|
||||
(message) => ({ ...message, asDependencyOf }),
|
||||
);
|
||||
|
||||
const ourKnown: CoValueKnownState = chunkToKnownState(id, coValue);
|
||||
|
||||
this.toLocalNode
|
||||
.push({
|
||||
action: "known",
|
||||
...ourKnown,
|
||||
asDependencyOf,
|
||||
})
|
||||
.catch((e) => logger.error("Error while pushing known", { err: e }));
|
||||
|
||||
for (const message of newContentMessages) {
|
||||
if (Object.keys(message.new).length === 0) continue;
|
||||
this.toLocalNode
|
||||
.push(message)
|
||||
.catch((e) =>
|
||||
logger.error("Error while pushing new content", { err: e }),
|
||||
);
|
||||
}
|
||||
|
||||
this.coValues[id] = coValue;
|
||||
}
|
||||
|
||||
async withWAL(handler: (wal: WH) => Promise<void>) {
|
||||
if (!this.currentWal) {
|
||||
this.currentWal = await this.fs.createFile(
|
||||
`wal-${Date.now()}-${Math.random().toString(36).slice(2)}.jsonl`,
|
||||
);
|
||||
}
|
||||
await handler(this.currentWal);
|
||||
}
|
||||
|
||||
async handleNewContent(newContent: NewContentMessage) {
|
||||
const coValue = this.coValues[newContent.id];
|
||||
|
||||
const newContentAsChunk: CoValueChunk = {
|
||||
header: newContent.header,
|
||||
sessionEntries: Object.fromEntries(
|
||||
Object.entries(newContent.new).map(([sessionID, newInSession]) => [
|
||||
sessionID,
|
||||
[
|
||||
{
|
||||
after: newInSession.after,
|
||||
lastSignature: newInSession.lastSignature,
|
||||
transactions: newInSession.newTransactions,
|
||||
},
|
||||
],
|
||||
]),
|
||||
),
|
||||
};
|
||||
|
||||
if (!coValue) {
|
||||
if (newContent.header) {
|
||||
await this.withWAL((wal) =>
|
||||
writeToWal(wal, this.fs, newContent.id, newContentAsChunk),
|
||||
);
|
||||
|
||||
this.coValues[newContent.id] = newContentAsChunk;
|
||||
} else {
|
||||
logger.warn("Incontiguous incoming update for " + newContent.id);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
const merged = mergeChunks(coValue, newContentAsChunk);
|
||||
if (merged === "nonContigous") {
|
||||
console.warn(
|
||||
"Non-contigous new content for " + newContent.id,
|
||||
Object.entries(coValue.sessionEntries).map(([session, entries]) =>
|
||||
entries.map((entry) => ({
|
||||
session: session,
|
||||
after: entry.after,
|
||||
length: entry.transactions.length,
|
||||
})),
|
||||
),
|
||||
Object.entries(newContentAsChunk.sessionEntries).map(
|
||||
([session, entries]) =>
|
||||
entries.map((entry) => ({
|
||||
session: session,
|
||||
after: entry.after,
|
||||
length: entry.transactions.length,
|
||||
})),
|
||||
),
|
||||
);
|
||||
} else {
|
||||
await this.withWAL((wal) =>
|
||||
writeToWal(wal, this.fs, newContent.id, newContentAsChunk),
|
||||
);
|
||||
|
||||
this.coValues[newContent.id] = merged;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async getBlockHandle(
|
||||
blockFile: BlockFilename,
|
||||
fs: FS,
|
||||
): Promise<{ handle: RH; size: number }> {
|
||||
if (!this.blockFileHandles.has(blockFile)) {
|
||||
this.blockFileHandles.set(blockFile, fs.openToRead(blockFile));
|
||||
}
|
||||
|
||||
return this.blockFileHandles.get(blockFile)!;
|
||||
}
|
||||
|
||||
async loadCoValue(id: RawCoID, fs: FS): Promise<CoValueChunk | undefined> {
|
||||
const files = this.fileCache || (await fs.listFiles());
|
||||
this.fileCache = files;
|
||||
const blockFiles = (
|
||||
files.filter((name) => name.startsWith("L")) as BlockFilename[]
|
||||
).sort();
|
||||
|
||||
let result;
|
||||
|
||||
for (const blockFile of blockFiles) {
|
||||
let cachedHeader:
|
||||
| { [id: RawCoID]: { start: number; length: number } }
|
||||
| undefined = this.headerCache.get(blockFile);
|
||||
|
||||
const { handle, size } = await this.getBlockHandle(blockFile, fs);
|
||||
|
||||
if (!cachedHeader) {
|
||||
cachedHeader = {};
|
||||
const header = await readHeader(blockFile, handle, size, fs);
|
||||
for (const entry of header) {
|
||||
cachedHeader[entry.id] = {
|
||||
start: entry.start,
|
||||
length: entry.length,
|
||||
};
|
||||
}
|
||||
|
||||
this.headerCache.set(blockFile, cachedHeader);
|
||||
}
|
||||
const headerEntry = cachedHeader[id];
|
||||
|
||||
if (headerEntry) {
|
||||
const nextChunk = await readChunk(handle, headerEntry, fs);
|
||||
if (result) {
|
||||
const merged = mergeChunks(result, nextChunk);
|
||||
|
||||
if (merged === "nonContigous") {
|
||||
console.warn(
|
||||
"Non-contigous chunks while loading " + id,
|
||||
result,
|
||||
nextChunk,
|
||||
);
|
||||
} else {
|
||||
result = merged;
|
||||
}
|
||||
} else {
|
||||
result = nextChunk;
|
||||
}
|
||||
}
|
||||
|
||||
// await fs.close(handle);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async compact() {
|
||||
const fileNames = await this.fs.listFiles();
|
||||
|
||||
const walFiles = fileNames.filter((name) =>
|
||||
name.startsWith("wal-"),
|
||||
) as WalFilename[];
|
||||
walFiles.sort();
|
||||
|
||||
const coValues = new Map<RawCoID, CoValueChunk>();
|
||||
|
||||
if (walFiles.length === 0) return;
|
||||
|
||||
const oldWal = this.currentWal;
|
||||
this.currentWal = undefined;
|
||||
|
||||
if (oldWal) {
|
||||
await this.fs.close(oldWal);
|
||||
}
|
||||
|
||||
for (const fileName of walFiles) {
|
||||
const { handle, size }: { handle: RH; size: number } =
|
||||
await this.fs.openToRead(fileName);
|
||||
if (size === 0) {
|
||||
await this.fs.close(handle);
|
||||
continue;
|
||||
}
|
||||
const bytes = await this.fs.read(handle, 0, size);
|
||||
|
||||
const decoded = textDecoder.decode(bytes);
|
||||
const lines = decoded.split("\n");
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.length === 0) continue;
|
||||
const chunk = JSON.parse(line) as WalEntry;
|
||||
|
||||
const existingChunk = coValues.get(chunk.id);
|
||||
|
||||
if (existingChunk) {
|
||||
const merged = mergeChunks(existingChunk, chunk);
|
||||
if (merged === "nonContigous") {
|
||||
console.log(
|
||||
"Non-contigous chunks in " + chunk.id + ", " + fileName,
|
||||
existingChunk,
|
||||
chunk,
|
||||
);
|
||||
} else {
|
||||
coValues.set(chunk.id, merged);
|
||||
}
|
||||
} else {
|
||||
coValues.set(chunk.id, chunk);
|
||||
}
|
||||
}
|
||||
|
||||
await this.fs.close(handle);
|
||||
}
|
||||
|
||||
const highestBlockNumber = fileNames.reduce((acc, name) => {
|
||||
if (name.startsWith("L" + MAX_N_LEVELS)) {
|
||||
const num = parseInt(name.split("-")[1]!);
|
||||
if (num > acc) {
|
||||
return num;
|
||||
}
|
||||
}
|
||||
return acc;
|
||||
}, 0);
|
||||
|
||||
await writeBlock(coValues, MAX_N_LEVELS, highestBlockNumber + 1, this.fs);
|
||||
|
||||
for (const walFile of walFiles) {
|
||||
await this.fs.removeFile(walFile);
|
||||
}
|
||||
this.fileCache = undefined;
|
||||
|
||||
const fileNames2 = await this.fs.listFiles();
|
||||
|
||||
const blockFiles = (
|
||||
fileNames2.filter((name) => name.startsWith("L")) as BlockFilename[]
|
||||
).sort();
|
||||
|
||||
const blockFilesByLevelInOrder: {
|
||||
[level: number]: BlockFilename[];
|
||||
} = {};
|
||||
|
||||
for (const blockFile of blockFiles) {
|
||||
const level = parseInt(blockFile.split("-")[0]!.slice(1));
|
||||
if (!blockFilesByLevelInOrder[level]) {
|
||||
blockFilesByLevelInOrder[level] = [];
|
||||
}
|
||||
blockFilesByLevelInOrder[level]!.push(blockFile);
|
||||
}
|
||||
|
||||
for (let level = MAX_N_LEVELS; level > 0; level--) {
|
||||
const nBlocksDesired = Math.pow(2, level);
|
||||
const blocksInLevel = blockFilesByLevelInOrder[level];
|
||||
|
||||
if (blocksInLevel && blocksInLevel.length > nBlocksDesired) {
|
||||
const coValues = new Map<RawCoID, CoValueChunk>();
|
||||
|
||||
for (const blockFile of blocksInLevel) {
|
||||
const { handle, size }: { handle: RH; size: number } =
|
||||
await this.getBlockHandle(blockFile, this.fs);
|
||||
|
||||
if (size === 0) {
|
||||
continue;
|
||||
}
|
||||
const header = await readHeader(blockFile, handle, size, this.fs);
|
||||
for (const entry of header) {
|
||||
const chunk = await readChunk(handle, entry, this.fs);
|
||||
|
||||
const existingChunk = coValues.get(entry.id);
|
||||
|
||||
if (existingChunk) {
|
||||
const merged = mergeChunks(existingChunk, chunk);
|
||||
if (merged === "nonContigous") {
|
||||
console.log(
|
||||
"Non-contigous chunks in " + entry.id + ", " + blockFile,
|
||||
existingChunk,
|
||||
chunk,
|
||||
);
|
||||
} else {
|
||||
coValues.set(entry.id, merged);
|
||||
}
|
||||
} else {
|
||||
coValues.set(entry.id, chunk);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let levelBelow = blockFilesByLevelInOrder[level - 1];
|
||||
if (!levelBelow) {
|
||||
levelBelow = [];
|
||||
blockFilesByLevelInOrder[level - 1] = levelBelow;
|
||||
}
|
||||
|
||||
const highestBlockNumberInLevelBelow = levelBelow.reduce(
|
||||
(acc, name) => {
|
||||
const num = parseInt(name.split("-")[1]!);
|
||||
if (num > acc) {
|
||||
return num;
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
0,
|
||||
);
|
||||
|
||||
const newBlockName = await writeBlock(
|
||||
coValues,
|
||||
level - 1,
|
||||
highestBlockNumberInLevelBelow + 1,
|
||||
this.fs,
|
||||
);
|
||||
levelBelow.push(newBlockName);
|
||||
|
||||
// delete blocks that went into this one
|
||||
for (const blockFile of blocksInLevel) {
|
||||
const handle = await this.getBlockHandle(blockFile, this.fs);
|
||||
await this.fs.close(handle.handle);
|
||||
await this.fs.removeFile(blockFile);
|
||||
this.blockFileHandles.delete(blockFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setTimeout(
|
||||
() =>
|
||||
this.compact().catch((e) => {
|
||||
logger.error("Error while compacting", { err: e });
|
||||
}),
|
||||
5000,
|
||||
);
|
||||
}
|
||||
|
||||
static asPeer<WH, RH, FS extends FileSystem<WH, RH>>({
|
||||
fs,
|
||||
trace,
|
||||
localNodeName = "local",
|
||||
}: {
|
||||
fs: FS;
|
||||
trace?: boolean;
|
||||
localNodeName?: string;
|
||||
}): Peer {
|
||||
const [localNodeAsPeer, storageAsPeer] = connectedPeers(
|
||||
localNodeName,
|
||||
"storage",
|
||||
{
|
||||
peer1role: "client",
|
||||
peer2role: "storage",
|
||||
trace,
|
||||
crashOnClose: true,
|
||||
},
|
||||
);
|
||||
|
||||
new LSMStorage(fs, localNodeAsPeer.incoming, localNodeAsPeer.outgoing);
|
||||
|
||||
// return { ...storageAsPeer, priority: 200 };
|
||||
return storageAsPeer;
|
||||
}
|
||||
}
|
||||
@@ -163,7 +163,7 @@ export class SyncManager {
|
||||
}
|
||||
|
||||
async handleSyncMessage(msg: SyncMessage, peer: PeerState) {
|
||||
if (peer.erroredCoValues.has(msg.id)) {
|
||||
if (this.local.coValuesStore.get(msg.id).isErroredInPeer(peer.id)) {
|
||||
logger.warn(
|
||||
`Skipping message ${msg.action} on errored coValue ${msg.id} from peer ${peer.id}`,
|
||||
);
|
||||
@@ -251,8 +251,8 @@ export class SyncManager {
|
||||
for (const id of coValue.getDependedOnCoValues()) {
|
||||
const entry = this.local.coValuesStore.get(id);
|
||||
|
||||
if (entry.state.type === "available") {
|
||||
buildOrderedCoValueList(entry.state.coValue);
|
||||
if (entry.isAvailable()) {
|
||||
buildOrderedCoValueList(entry.core);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -260,23 +260,20 @@ export class SyncManager {
|
||||
};
|
||||
|
||||
for (const entry of this.local.coValuesStore.getValues()) {
|
||||
switch (entry.state.type) {
|
||||
case "unavailable":
|
||||
// If the coValue is unavailable and we never tried this peer
|
||||
// we try to load it from the peer
|
||||
if (!peer.toldKnownState.has(entry.id)) {
|
||||
await entry.loadFromPeers([peer]).catch((e: unknown) => {
|
||||
logger.error("Error sending load", { err: e });
|
||||
});
|
||||
}
|
||||
break;
|
||||
case "available":
|
||||
const coValue = entry.state.coValue;
|
||||
if (!entry.isAvailable()) {
|
||||
// If the coValue is unavailable and we never tried this peer
|
||||
// we try to load it from the peer
|
||||
if (!peer.toldKnownState.has(entry.id)) {
|
||||
await entry.loadFromPeers([peer]).catch((e: unknown) => {
|
||||
logger.error("Error sending load", { err: e });
|
||||
});
|
||||
}
|
||||
} else {
|
||||
const coValue = entry.core;
|
||||
|
||||
// Build the list of coValues ordered by dependency
|
||||
// so we can send the load message in the correct order
|
||||
buildOrderedCoValueList(coValue);
|
||||
break;
|
||||
// Build the list of coValues ordered by dependency
|
||||
// so we can send the load message in the correct order
|
||||
buildOrderedCoValueList(coValue);
|
||||
}
|
||||
|
||||
// Fill the missing known states with empty known states
|
||||
@@ -399,7 +396,10 @@ export class SyncManager {
|
||||
peer.setKnownState(msg.id, knownStateIn(msg));
|
||||
const entry = this.local.coValuesStore.get(msg.id);
|
||||
|
||||
if (entry.state.type === "unknown" || entry.state.type === "unavailable") {
|
||||
if (
|
||||
entry.highLevelState === "unknown" ||
|
||||
entry.highLevelState === "unavailable"
|
||||
) {
|
||||
const eligiblePeers = this.getServerAndStoragePeers(peer.id);
|
||||
|
||||
if (eligiblePeers.length === 0) {
|
||||
@@ -426,7 +426,7 @@ export class SyncManager {
|
||||
}
|
||||
}
|
||||
|
||||
if (entry.state.type === "loading") {
|
||||
if (entry.highLevelState === "loading") {
|
||||
// We need to return from handleLoad immediately and wait for the CoValue to be loaded
|
||||
// in a new task, otherwise we might block further incoming content messages that would
|
||||
// resolve the CoValue as available. This can happen when we receive fresh
|
||||
@@ -456,7 +456,7 @@ export class SyncManager {
|
||||
err: e,
|
||||
});
|
||||
});
|
||||
} else if (entry.state.type === "available") {
|
||||
} else if (entry.isAvailable()) {
|
||||
await this.sendNewContentIncludingDependencies(msg.id, peer);
|
||||
} else {
|
||||
this.trySendToPeer(peer, {
|
||||
@@ -475,20 +475,13 @@ export class SyncManager {
|
||||
|
||||
// The header is a boolean value that tells us if the other peer do have information about the header.
|
||||
// If it's false in this point it means that the coValue is unavailable on the other peer.
|
||||
if (entry.state.type !== "available") {
|
||||
const availableOnPeer = peer.optimisticKnownStates.get(msg.id)?.header;
|
||||
const availableOnPeer = peer.optimisticKnownStates.get(msg.id)?.header;
|
||||
|
||||
if (!availableOnPeer) {
|
||||
entry.dispatch({
|
||||
type: "not-found-in-peer",
|
||||
peerId: peer.id,
|
||||
});
|
||||
}
|
||||
|
||||
return;
|
||||
if (!availableOnPeer) {
|
||||
entry.markNotFoundInPeer(peer.id);
|
||||
}
|
||||
|
||||
if (entry.state.type === "available") {
|
||||
if (entry.isAvailable()) {
|
||||
await this.sendNewContentIncludingDependencies(msg.id, peer);
|
||||
}
|
||||
}
|
||||
@@ -511,7 +504,7 @@ export class SyncManager {
|
||||
|
||||
let coValue: CoValueCore;
|
||||
|
||||
if (entry.state.type !== "available") {
|
||||
if (!entry.isAvailable()) {
|
||||
if (!msg.header) {
|
||||
this.trySendToPeer(peer, {
|
||||
action: "known",
|
||||
@@ -533,12 +526,9 @@ export class SyncManager {
|
||||
|
||||
coValue = new CoValueCore(msg.header, this.local);
|
||||
|
||||
entry.dispatch({
|
||||
type: "available",
|
||||
coValue,
|
||||
});
|
||||
entry.markAvailable(coValue, peer.id);
|
||||
} else {
|
||||
coValue = entry.state.coValue;
|
||||
coValue = entry.core;
|
||||
}
|
||||
|
||||
let invalidStateAssumed = false;
|
||||
@@ -581,7 +571,7 @@ export class SyncManager {
|
||||
id: msg.id,
|
||||
err: result.error,
|
||||
});
|
||||
peer.erroredCoValues.set(msg.id, result.error);
|
||||
entry.markErrored(peer.id, result.error);
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -671,7 +661,8 @@ export class SyncManager {
|
||||
async actuallySyncCoValue(coValue: CoValueCore) {
|
||||
for (const peer of this.peersInPriorityOrder()) {
|
||||
if (peer.closed) continue;
|
||||
if (peer.erroredCoValues.has(coValue.id)) continue;
|
||||
if (this.local.coValuesStore.get(coValue.id).isErroredInPeer(peer.id))
|
||||
continue;
|
||||
|
||||
if (peer.optimisticKnownStates.has(coValue.id)) {
|
||||
await this.sendNewContentIncludingDependencies(coValue.id, peer);
|
||||
@@ -726,7 +717,8 @@ export class SyncManager {
|
||||
const coValues = this.local.coValuesStore.getValues();
|
||||
const validCoValues = Array.from(coValues).filter(
|
||||
(coValue) =>
|
||||
coValue.state.type === "available" || coValue.state.type === "loading",
|
||||
coValue.highLevelState === "available" ||
|
||||
coValue.highLevelState === "loading",
|
||||
);
|
||||
|
||||
return Promise.all(
|
||||
|
||||
@@ -40,10 +40,10 @@ describe("CoValueState", () => {
|
||||
const mockCoValueId = "co_test123" as RawCoID;
|
||||
|
||||
test("should create unknown state", async () => {
|
||||
const state = CoValueState.Unknown(mockCoValueId);
|
||||
const state = new CoValueState(mockCoValueId);
|
||||
|
||||
expect(state.id).toBe(mockCoValueId);
|
||||
expect(state.state.type).toBe("unknown");
|
||||
expect(state.highLevelState).toBe("unknown");
|
||||
expect(
|
||||
await metricReader.getMetricValue("jazz.covalues.loaded", {
|
||||
state: "unknown",
|
||||
@@ -52,11 +52,14 @@ describe("CoValueState", () => {
|
||||
});
|
||||
|
||||
test("should create loading state", async () => {
|
||||
const peerIds = ["peer1", "peer2"];
|
||||
const state = CoValueState.Loading(mockCoValueId, peerIds);
|
||||
const state = new CoValueState(mockCoValueId);
|
||||
state.loadFromPeers([
|
||||
createMockPeerState({ id: "peer1", role: "server" }),
|
||||
createMockPeerState({ id: "peer2", role: "server" }),
|
||||
]);
|
||||
|
||||
expect(state.id).toBe(mockCoValueId);
|
||||
expect(state.state.type).toBe("loading");
|
||||
expect(state.highLevelState).toBe("loading");
|
||||
expect(
|
||||
await metricReader.getMetricValue("jazz.covalues.loaded", {
|
||||
state: "loading",
|
||||
@@ -66,11 +69,12 @@ describe("CoValueState", () => {
|
||||
|
||||
test("should create available state", async () => {
|
||||
const mockCoValue = createMockCoValueCore(mockCoValueId);
|
||||
const state = CoValueState.Available(mockCoValue);
|
||||
const state = new CoValueState(mockCoValueId);
|
||||
state.internalMarkMagicallyAvailable(mockCoValue);
|
||||
|
||||
expect(state.id).toBe(mockCoValueId);
|
||||
assert(state.state.type === "available");
|
||||
expect(state.state.coValue).toBe(mockCoValue);
|
||||
expect(state.highLevelState).toBe("available");
|
||||
expect(state.core).toBe(mockCoValue);
|
||||
await expect(state.getCoValue()).resolves.toEqual(mockCoValue);
|
||||
expect(
|
||||
await metricReader.getMetricValue("jazz.covalues.loaded", {
|
||||
@@ -81,7 +85,11 @@ describe("CoValueState", () => {
|
||||
|
||||
test("should handle found action", async () => {
|
||||
const mockCoValue = createMockCoValueCore(mockCoValueId);
|
||||
const state = CoValueState.Loading(mockCoValueId, ["peer1", "peer2"]);
|
||||
const state = new CoValueState(mockCoValueId);
|
||||
state.loadFromPeers([
|
||||
createMockPeerState({ id: "peer1", role: "server" }),
|
||||
createMockPeerState({ id: "peer2", role: "server" }),
|
||||
]);
|
||||
|
||||
expect(
|
||||
await metricReader.getMetricValue("jazz.covalues.loaded", {
|
||||
@@ -96,10 +104,7 @@ describe("CoValueState", () => {
|
||||
|
||||
const stateValuePromise = state.getCoValue();
|
||||
|
||||
state.dispatch({
|
||||
type: "available",
|
||||
coValue: mockCoValue,
|
||||
});
|
||||
state.internalMarkMagicallyAvailable(mockCoValue);
|
||||
|
||||
const result = await state.getCoValue();
|
||||
expect(result).toBe(mockCoValue);
|
||||
@@ -117,17 +122,6 @@ describe("CoValueState", () => {
|
||||
).toBe(0);
|
||||
});
|
||||
|
||||
test("should ignore actions when not in loading state", () => {
|
||||
const state = CoValueState.Unknown(mockCoValueId);
|
||||
|
||||
state.dispatch({
|
||||
type: "not-found-in-peer",
|
||||
peerId: "peer1",
|
||||
});
|
||||
|
||||
expect(state.state.type).toBe("unknown");
|
||||
});
|
||||
|
||||
test("should retry loading from peers when unsuccessful", async () => {
|
||||
vi.useFakeTimers();
|
||||
|
||||
@@ -137,10 +131,7 @@ describe("CoValueState", () => {
|
||||
role: "server",
|
||||
},
|
||||
async () => {
|
||||
state.dispatch({
|
||||
type: "not-found-in-peer",
|
||||
peerId: "peer1",
|
||||
});
|
||||
state.markNotFoundInPeer("peer1");
|
||||
},
|
||||
);
|
||||
const peer2 = createMockPeerState(
|
||||
@@ -149,15 +140,12 @@ describe("CoValueState", () => {
|
||||
role: "server",
|
||||
},
|
||||
async () => {
|
||||
state.dispatch({
|
||||
type: "not-found-in-peer",
|
||||
peerId: "peer2",
|
||||
});
|
||||
state.markNotFoundInPeer("peer2");
|
||||
},
|
||||
);
|
||||
const mockPeers = [peer1, peer2] as unknown as PeerState[];
|
||||
|
||||
const state = CoValueState.Unknown(mockCoValueId);
|
||||
const state = new CoValueState(mockCoValueId);
|
||||
const loadPromise = state.loadFromPeers(mockPeers);
|
||||
|
||||
// Should attempt CO_VALUE_LOADING_CONFIG.MAX_RETRIES retries
|
||||
@@ -173,7 +161,7 @@ describe("CoValueState", () => {
|
||||
expect(peer2.pushOutgoingMessage).toHaveBeenCalledTimes(
|
||||
CO_VALUE_LOADING_CONFIG.MAX_RETRIES,
|
||||
);
|
||||
expect(state.state.type).toBe("unavailable");
|
||||
expect(state.highLevelState).toBe("unavailable");
|
||||
await expect(state.getCoValue()).resolves.toBe("unavailable");
|
||||
|
||||
vi.useRealTimers();
|
||||
@@ -188,11 +176,7 @@ describe("CoValueState", () => {
|
||||
role: "server",
|
||||
},
|
||||
async () => {
|
||||
peer1.erroredCoValues.set(mockCoValueId, new Error("test") as any);
|
||||
state.dispatch({
|
||||
type: "not-found-in-peer",
|
||||
peerId: "peer1",
|
||||
});
|
||||
state.markErrored("peer1", {} as any);
|
||||
},
|
||||
);
|
||||
const peer2 = createMockPeerState(
|
||||
@@ -201,16 +185,13 @@ describe("CoValueState", () => {
|
||||
role: "server",
|
||||
},
|
||||
async () => {
|
||||
state.dispatch({
|
||||
type: "not-found-in-peer",
|
||||
peerId: "peer2",
|
||||
});
|
||||
state.markNotFoundInPeer("peer2");
|
||||
},
|
||||
);
|
||||
|
||||
const mockPeers = [peer1, peer2] as unknown as PeerState[];
|
||||
|
||||
const state = CoValueState.Unknown(mockCoValueId);
|
||||
const state = new CoValueState(mockCoValueId);
|
||||
const loadPromise = state.loadFromPeers(mockPeers);
|
||||
|
||||
// Should attempt CO_VALUE_LOADING_CONFIG.MAX_RETRIES retries
|
||||
@@ -224,7 +205,7 @@ describe("CoValueState", () => {
|
||||
expect(peer2.pushOutgoingMessage).toHaveBeenCalledTimes(
|
||||
CO_VALUE_LOADING_CONFIG.MAX_RETRIES,
|
||||
);
|
||||
expect(state.state.type).toBe("unavailable");
|
||||
expect(state.highLevelState).toBe("unavailable");
|
||||
await expect(state.getCoValue()).resolves.toBe("unavailable");
|
||||
|
||||
vi.useRealTimers();
|
||||
@@ -239,10 +220,7 @@ describe("CoValueState", () => {
|
||||
role: "storage",
|
||||
},
|
||||
async () => {
|
||||
state.dispatch({
|
||||
type: "not-found-in-peer",
|
||||
peerId: "peer1",
|
||||
});
|
||||
state.markNotFoundInPeer("peer1");
|
||||
},
|
||||
);
|
||||
const peer2 = createMockPeerState(
|
||||
@@ -251,15 +229,12 @@ describe("CoValueState", () => {
|
||||
role: "server",
|
||||
},
|
||||
async () => {
|
||||
state.dispatch({
|
||||
type: "not-found-in-peer",
|
||||
peerId: "peer2",
|
||||
});
|
||||
state.markNotFoundInPeer("peer2");
|
||||
},
|
||||
);
|
||||
const mockPeers = [peer1, peer2] as unknown as PeerState[];
|
||||
|
||||
const state = CoValueState.Unknown(mockCoValueId);
|
||||
const state = new CoValueState(mockCoValueId);
|
||||
const loadPromise = state.loadFromPeers(mockPeers);
|
||||
|
||||
// Should attempt CO_VALUE_LOADING_CONFIG.MAX_RETRIES retries
|
||||
@@ -273,7 +248,7 @@ describe("CoValueState", () => {
|
||||
expect(peer2.pushOutgoingMessage).toHaveBeenCalledTimes(
|
||||
CO_VALUE_LOADING_CONFIG.MAX_RETRIES,
|
||||
);
|
||||
expect(state.state.type).toBe("unavailable");
|
||||
expect(state.highLevelState).toBe("unavailable");
|
||||
await expect(state.getCoValue()).resolves.toEqual("unavailable");
|
||||
|
||||
vi.useRealTimers();
|
||||
@@ -293,17 +268,11 @@ describe("CoValueState", () => {
|
||||
},
|
||||
async () => {
|
||||
retries++;
|
||||
state.dispatch({
|
||||
type: "not-found-in-peer",
|
||||
peerId: "peer1",
|
||||
});
|
||||
state.markNotFoundInPeer("peer1");
|
||||
|
||||
if (retries === 2) {
|
||||
setTimeout(() => {
|
||||
state.dispatch({
|
||||
type: "available",
|
||||
coValue: createMockCoValueCore(mockCoValueId),
|
||||
});
|
||||
state.markAvailable(createMockCoValueCore(mockCoValueId), "peer1");
|
||||
}, 100);
|
||||
}
|
||||
},
|
||||
@@ -311,7 +280,7 @@ describe("CoValueState", () => {
|
||||
|
||||
const mockPeers = [peer1] as unknown as PeerState[];
|
||||
|
||||
const state = CoValueState.Unknown(mockCoValueId);
|
||||
const state = new CoValueState(mockCoValueId);
|
||||
const loadPromise = state.loadFromPeers(mockPeers);
|
||||
|
||||
// Should attempt CO_VALUE_LOADING_CONFIG.MAX_RETRIES retries
|
||||
@@ -322,7 +291,7 @@ describe("CoValueState", () => {
|
||||
await loadPromise;
|
||||
|
||||
expect(peer1.pushOutgoingMessage).toHaveBeenCalledTimes(2);
|
||||
expect(state.state.type).toBe("available");
|
||||
expect(state.highLevelState).toBe("available");
|
||||
await expect(state.getCoValue()).resolves.toEqual({ id: mockCoValueId });
|
||||
vi.useRealTimers();
|
||||
});
|
||||
@@ -336,16 +305,13 @@ describe("CoValueState", () => {
|
||||
role: "server",
|
||||
},
|
||||
async () => {
|
||||
state.dispatch({
|
||||
type: "not-found-in-peer",
|
||||
peerId: "peer1",
|
||||
});
|
||||
state.markNotFoundInPeer("peer1");
|
||||
},
|
||||
);
|
||||
|
||||
const mockPeers = [peer1] as unknown as PeerState[];
|
||||
|
||||
const state = CoValueState.Unknown(mockCoValueId);
|
||||
const state = new CoValueState(mockCoValueId);
|
||||
const loadPromise = state.loadFromPeers(mockPeers);
|
||||
|
||||
// Should attempt CO_VALUE_LOADING_CONFIG.MAX_RETRIES retries
|
||||
@@ -353,17 +319,14 @@ describe("CoValueState", () => {
|
||||
await vi.runAllTimersAsync();
|
||||
}
|
||||
|
||||
state.dispatch({
|
||||
type: "available",
|
||||
coValue: createMockCoValueCore(mockCoValueId),
|
||||
});
|
||||
state.internalMarkMagicallyAvailable(createMockCoValueCore(mockCoValueId));
|
||||
|
||||
await loadPromise;
|
||||
|
||||
expect(peer1.pushOutgoingMessage).toHaveBeenCalledTimes(
|
||||
CO_VALUE_LOADING_CONFIG.MAX_RETRIES,
|
||||
);
|
||||
expect(state.state.type).toBe("available");
|
||||
expect(state.highLevelState).toBe("available");
|
||||
await expect(state.getCoValue()).resolves.toEqual({ id: mockCoValueId });
|
||||
|
||||
vi.useRealTimers();
|
||||
@@ -383,22 +346,17 @@ describe("CoValueState", () => {
|
||||
},
|
||||
async () => {
|
||||
if (run > 2) {
|
||||
state.dispatch({
|
||||
type: "available",
|
||||
coValue: createMockCoValueCore(mockCoValueId),
|
||||
});
|
||||
state.markAvailable(createMockCoValueCore(mockCoValueId), "peer1");
|
||||
} else {
|
||||
state.markNotFoundInPeer("peer1");
|
||||
run++;
|
||||
}
|
||||
state.dispatch({
|
||||
type: "not-found-in-peer",
|
||||
peerId: "peer1",
|
||||
});
|
||||
run++;
|
||||
},
|
||||
);
|
||||
|
||||
const mockPeers = [peer1] as unknown as PeerState[];
|
||||
|
||||
const state = CoValueState.Unknown(mockCoValueId);
|
||||
const state = new CoValueState(mockCoValueId);
|
||||
const loadPromise = state.loadFromPeers(mockPeers);
|
||||
|
||||
for (let i = 0; i < CO_VALUE_LOADING_CONFIG.MAX_RETRIES; i++) {
|
||||
@@ -407,7 +365,7 @@ describe("CoValueState", () => {
|
||||
await loadPromise;
|
||||
|
||||
expect(peer1.pushOutgoingMessage).toHaveBeenCalledTimes(3);
|
||||
expect(state.state.type).toBe("available");
|
||||
expect(state.highLevelState).toBe("available");
|
||||
await expect(state.getCoValue()).resolves.toEqual({ id: mockCoValueId });
|
||||
|
||||
vi.useRealTimers();
|
||||
@@ -424,26 +382,20 @@ describe("CoValueState", () => {
|
||||
role: "storage",
|
||||
},
|
||||
async () => {
|
||||
state.dispatch({
|
||||
type: "available",
|
||||
coValue: mockCoValue,
|
||||
});
|
||||
state.markAvailable(mockCoValue, "peer1");
|
||||
},
|
||||
);
|
||||
const peer2 = createMockPeerState(
|
||||
{
|
||||
id: "peer1",
|
||||
id: "peer2",
|
||||
role: "server",
|
||||
},
|
||||
async () => {
|
||||
state.dispatch({
|
||||
type: "not-found-in-peer",
|
||||
peerId: "peer2",
|
||||
});
|
||||
state.markNotFoundInPeer("peer2");
|
||||
},
|
||||
);
|
||||
|
||||
const state = CoValueState.Unknown(mockCoValueId);
|
||||
const state = new CoValueState(mockCoValueId);
|
||||
const loadPromise = state.loadFromPeers([peer1, peer2]);
|
||||
|
||||
for (let i = 0; i < CO_VALUE_LOADING_CONFIG.MAX_RETRIES; i++) {
|
||||
@@ -457,7 +409,7 @@ describe("CoValueState", () => {
|
||||
action: "load",
|
||||
...mockCoValue.knownState(),
|
||||
});
|
||||
expect(state.state.type).toBe("available");
|
||||
expect(state.highLevelState).toBe("available");
|
||||
await expect(state.getCoValue()).resolves.toEqual({ id: mockCoValueId });
|
||||
|
||||
vi.useRealTimers();
|
||||
@@ -479,20 +431,17 @@ describe("CoValueState", () => {
|
||||
);
|
||||
const peer2 = createMockPeerState(
|
||||
{
|
||||
id: "peer1",
|
||||
id: "peer2",
|
||||
role: "server",
|
||||
},
|
||||
async () => {
|
||||
state.dispatch({
|
||||
type: "available",
|
||||
coValue: mockCoValue,
|
||||
});
|
||||
state.markAvailable(mockCoValue, "peer2");
|
||||
},
|
||||
);
|
||||
|
||||
peer1.closed = true;
|
||||
|
||||
const state = CoValueState.Unknown(mockCoValueId);
|
||||
const state = new CoValueState(mockCoValueId);
|
||||
const loadPromise = state.loadFromPeers([peer1, peer2]);
|
||||
|
||||
for (let i = 0; i < CO_VALUE_LOADING_CONFIG.MAX_RETRIES; i++) {
|
||||
@@ -503,7 +452,7 @@ describe("CoValueState", () => {
|
||||
expect(peer1.pushOutgoingMessage).toHaveBeenCalledTimes(0);
|
||||
expect(peer2.pushOutgoingMessage).toHaveBeenCalledTimes(1);
|
||||
|
||||
expect(state.state.type).toBe("available");
|
||||
expect(state.highLevelState).toBe("available");
|
||||
await expect(state.getCoValue()).resolves.toEqual({ id: mockCoValueId });
|
||||
|
||||
vi.useRealTimers();
|
||||
@@ -520,7 +469,7 @@ describe("CoValueState", () => {
|
||||
async () => {},
|
||||
);
|
||||
|
||||
const state = CoValueState.Unknown(mockCoValueId);
|
||||
const state = new CoValueState(mockCoValueId);
|
||||
const loadPromise = state.loadFromPeers([peer1]);
|
||||
|
||||
for (let i = 0; i < CO_VALUE_LOADING_CONFIG.MAX_RETRIES * 2; i++) {
|
||||
@@ -532,7 +481,7 @@ describe("CoValueState", () => {
|
||||
CO_VALUE_LOADING_CONFIG.MAX_RETRIES,
|
||||
);
|
||||
|
||||
expect(state.state.type).toBe("unavailable");
|
||||
expect(state.highLevelState).toBe("unavailable");
|
||||
await expect(state.getCoValue()).resolves.toEqual("unavailable");
|
||||
|
||||
vi.useRealTimers();
|
||||
|
||||
@@ -492,8 +492,8 @@ describe("writeOnly", () => {
|
||||
);
|
||||
|
||||
node2.node.coValuesStore.coValues.delete(map.id);
|
||||
expect(node2.node.coValuesStore.get(map.id)).toEqual(
|
||||
CoValueState.Unknown(map.id),
|
||||
expect(node2.node.coValuesStore.get(map.id)?.highLevelState).toBe(
|
||||
"unknown",
|
||||
);
|
||||
|
||||
const mapOnNode2 = await loadCoValueOrFail(node2.node, map.id);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { CoValueCore } from "../exports";
|
||||
import { CoValueCore, LocalNode, RawControlledAccount } from "../exports";
|
||||
import {
|
||||
CoValueKnownState,
|
||||
NewContentMessage,
|
||||
@@ -63,6 +63,17 @@ export function toSimplifiedMessages(
|
||||
return messages.map((m) => toDebugString(m.from, m.to, m.msg));
|
||||
}
|
||||
|
||||
export function nodeRelatedKnownCoValues(node: LocalNode, name: string) {
|
||||
const account = node.account as RawControlledAccount;
|
||||
const profileID = account.get("profile");
|
||||
const profile = profileID && node.expectCoValueLoaded(profileID);
|
||||
return {
|
||||
[`${name}Account`]: account,
|
||||
[`${name}Profile`]: profile,
|
||||
[`${name}ProfileGroup`]: profile?.getGroup().core,
|
||||
};
|
||||
}
|
||||
|
||||
export function debugMessages(
|
||||
coValues: Record<string, CoValueCore>,
|
||||
messages: {
|
||||
|
||||
@@ -79,10 +79,10 @@ describe("peer reconciliation", () => {
|
||||
|
||||
const mapOnSyncServer = jazzCloud.node.coValuesStore.get(map.id);
|
||||
|
||||
assert(mapOnSyncServer.state.type === "available");
|
||||
assert(mapOnSyncServer.isAvailable());
|
||||
|
||||
expect(
|
||||
expectMap(mapOnSyncServer.state.coValue.getCurrentContent()).get("hello"),
|
||||
expectMap(mapOnSyncServer.core.getCurrentContent()).get("hello"),
|
||||
).toEqual("updated");
|
||||
|
||||
expect(
|
||||
@@ -126,10 +126,10 @@ describe("peer reconciliation", () => {
|
||||
|
||||
const mapOnSyncServer = jazzCloud.node.coValuesStore.get(map.id);
|
||||
|
||||
assert(mapOnSyncServer.state.type === "available");
|
||||
assert(mapOnSyncServer.isAvailable());
|
||||
|
||||
expect(
|
||||
expectMap(mapOnSyncServer.state.coValue.getCurrentContent()).get("hello"),
|
||||
expectMap(mapOnSyncServer.core.getCurrentContent()).get("hello"),
|
||||
).toEqual("updated");
|
||||
|
||||
expect(peer.outgoing).toMatchObject({
|
||||
@@ -180,7 +180,7 @@ describe("peer reconciliation", () => {
|
||||
await waitFor(() => {
|
||||
const mapOnSyncServer = jazzCloud.node.coValuesStore.get(map.id);
|
||||
|
||||
expect(mapOnSyncServer.state.type).toBe("available");
|
||||
expect(mapOnSyncServer.highLevelState).toBe("available");
|
||||
});
|
||||
|
||||
expect(
|
||||
@@ -225,10 +225,10 @@ describe("peer reconciliation", () => {
|
||||
const mapOnSyncServer = jazzCloud.node.coValuesStore.get(map.id);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mapOnSyncServer.state.type).toBe("available");
|
||||
expect(mapOnSyncServer.isAvailable()).toBe(true);
|
||||
});
|
||||
|
||||
assert(mapOnSyncServer.state.type === "available");
|
||||
assert(mapOnSyncServer.isAvailable());
|
||||
|
||||
expect(
|
||||
SyncMessagesLog.getMessages({
|
||||
@@ -245,7 +245,7 @@ describe("peer reconciliation", () => {
|
||||
`);
|
||||
|
||||
expect(
|
||||
expectMap(mapOnSyncServer.state.coValue.getCurrentContent()).get("hello"),
|
||||
expectMap(mapOnSyncServer.core.getCurrentContent()).get("hello"),
|
||||
).toEqual("updated");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,23 +1,11 @@
|
||||
import {
|
||||
assert,
|
||||
afterEach,
|
||||
beforeEach,
|
||||
describe,
|
||||
expect,
|
||||
test,
|
||||
vi,
|
||||
} from "vitest";
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
|
||||
import { expectMap } from "../coValue.js";
|
||||
import type { CoValueHeader, TryAddTransactionsError } from "../coValueCore.js";
|
||||
import type { RawAccountID } from "../coValues/account.js";
|
||||
import { type MapOpPayload, RawCoMap } from "../coValues/coMap.js";
|
||||
import { RawCoMap } from "../coValues/coMap.js";
|
||||
import type { RawGroup } from "../coValues/group.js";
|
||||
import { WasmCrypto } from "../crypto/WasmCrypto.js";
|
||||
import { stableStringify } from "../jsonStringify.js";
|
||||
import { LocalNode } from "../localNode.js";
|
||||
import { getPriorityFromHeader } from "../priority.js";
|
||||
import { connectedPeers, newQueuePair } from "../streamUtils.js";
|
||||
import type { LoadMessage, SyncMessage } from "../sync.js";
|
||||
import type { LoadMessage } from "../sync.js";
|
||||
import {
|
||||
blockMessageTypeOnOutgoingPeer,
|
||||
connectTwoPeers,
|
||||
@@ -759,9 +747,7 @@ describe("SyncManager.addPeer", () => {
|
||||
|
||||
await map.core.waitForSync();
|
||||
|
||||
expect(jazzCloud.node.coValuesStore.get(map.id).state.type).toBe(
|
||||
"available",
|
||||
);
|
||||
expect(jazzCloud.node.coValuesStore.get(map.id).isAvailable()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1083,14 +1069,13 @@ describe("SyncManager.handleSyncMessage", () => {
|
||||
test("should ignore messages for errored coValues", async () => {
|
||||
const client = await setupTestAccount();
|
||||
|
||||
const { peerState } = client.connectToSyncServer();
|
||||
const { peer, peerState } = client.connectToSyncServer();
|
||||
|
||||
// Add a coValue to the errored set
|
||||
const erroredId = "co_z123" as const;
|
||||
peerState.erroredCoValues.set(
|
||||
erroredId,
|
||||
new Error("Test error") as unknown as TryAddTransactionsError,
|
||||
);
|
||||
client.node.coValuesStore.get(erroredId).markErrored(peer.id, {
|
||||
message: "Test error",
|
||||
} as any);
|
||||
|
||||
const message = {
|
||||
action: "load" as const,
|
||||
|
||||
@@ -1,5 +1,25 @@
|
||||
# jazz-auth-clerk
|
||||
|
||||
## 0.13.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ec9cb40]
|
||||
- cojson@0.13.13
|
||||
- jazz-browser@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.13.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- Updated dependencies [29e05c4]
|
||||
- Updated dependencies [65719f2]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-browser@0.13.12
|
||||
- cojson@0.13.12
|
||||
|
||||
## 0.13.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
{
|
||||
"name": "jazz-auth-clerk",
|
||||
"version": "0.13.11",
|
||||
"version": "0.13.13",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"cojson": "workspace:0.13.11",
|
||||
"jazz-browser": "workspace:0.13.11",
|
||||
"jazz-tools": "workspace:0.13.11"
|
||||
"cojson": "workspace:0.13.13",
|
||||
"jazz-browser": "workspace:0.13.13",
|
||||
"jazz-tools": "workspace:0.13.13"
|
||||
},
|
||||
"scripts": {
|
||||
"format-and-lint": "biome check .",
|
||||
|
||||
@@ -1,5 +1,21 @@
|
||||
# jazz-browser-media-images
|
||||
|
||||
## 0.13.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-browser@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.13.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- Updated dependencies [29e05c4]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-browser@0.13.12
|
||||
|
||||
## 0.13.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "jazz-browser-media-images",
|
||||
"version": "0.13.11",
|
||||
"version": "0.13.13",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
@@ -8,8 +8,8 @@
|
||||
"dependencies": {
|
||||
"@types/image-blob-reduce": "^4.1.1",
|
||||
"image-blob-reduce": "^4.1.0",
|
||||
"jazz-browser": "workspace:0.13.11",
|
||||
"jazz-tools": "workspace:0.13.11",
|
||||
"jazz-browser": "workspace:0.13.13",
|
||||
"jazz-tools": "workspace:0.13.13",
|
||||
"pica": "^9.0.1"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -1,5 +1,27 @@
|
||||
# jazz-browser
|
||||
|
||||
## 0.13.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ec9cb40]
|
||||
- cojson@0.13.13
|
||||
- cojson-storage-indexeddb@0.13.13
|
||||
- cojson-transport-ws@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.13.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 29e05c4: Removed singleTabOPFS storage type
|
||||
- Updated dependencies [4547525]
|
||||
- Updated dependencies [65719f2]
|
||||
- jazz-tools@0.13.12
|
||||
- cojson@0.13.12
|
||||
- cojson-storage-indexeddb@0.13.12
|
||||
- cojson-transport-ws@0.13.12
|
||||
|
||||
## 0.13.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "jazz-browser",
|
||||
"version": "0.13.11",
|
||||
"version": "0.13.13",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
|
||||
@@ -1,348 +0,0 @@
|
||||
import { BlockFilename, CryptoProvider, FileSystem, WalFilename } from "cojson";
|
||||
|
||||
export class OPFSFilesystem
|
||||
implements
|
||||
FileSystem<
|
||||
{ id: number; filename: string },
|
||||
{ id: number; filename: string }
|
||||
>
|
||||
{
|
||||
opfsWorker: Worker;
|
||||
callbacks: Map<number, (event: MessageEvent) => void> = new Map();
|
||||
nextRequestId = 0;
|
||||
|
||||
constructor(public crypto: CryptoProvider) {
|
||||
this.opfsWorker = new Worker(
|
||||
URL.createObjectURL(
|
||||
new Blob([opfsWorkerJSSrc], { type: "text/javascript" }),
|
||||
),
|
||||
);
|
||||
this.opfsWorker.onmessage = (event) => {
|
||||
// console.log("Received from OPFS worker", event.data);
|
||||
const handler = this.callbacks.get(event.data.requestId);
|
||||
if (handler) {
|
||||
handler(event);
|
||||
this.callbacks.delete(event.data.requestId);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
listFiles(): Promise<string[]> {
|
||||
return new Promise((resolve) => {
|
||||
const requestId = this.nextRequestId++;
|
||||
performance.mark("listFiles" + requestId + "_listFiles");
|
||||
this.callbacks.set(requestId, (event) => {
|
||||
performance.mark("listFilesEnd" + requestId + "_listFiles");
|
||||
performance.measure(
|
||||
"listFiles" + requestId + "_listFiles",
|
||||
"listFiles" + requestId + "_listFiles",
|
||||
"listFilesEnd" + requestId + "_listFiles",
|
||||
);
|
||||
resolve(event.data.fileNames);
|
||||
});
|
||||
this.opfsWorker.postMessage({ type: "listFiles", requestId });
|
||||
});
|
||||
}
|
||||
|
||||
openToRead(
|
||||
filename: string,
|
||||
): Promise<{ handle: { id: number; filename: string }; size: number }> {
|
||||
return new Promise((resolve) => {
|
||||
const requestId = this.nextRequestId++;
|
||||
performance.mark("openToRead" + "_" + filename);
|
||||
this.callbacks.set(requestId, (event) => {
|
||||
resolve({
|
||||
handle: { id: event.data.handle, filename },
|
||||
size: event.data.size,
|
||||
});
|
||||
performance.mark("openToReadEnd" + "_" + filename);
|
||||
performance.measure(
|
||||
"openToRead" + "_" + filename,
|
||||
"openToRead" + "_" + filename,
|
||||
"openToReadEnd" + "_" + filename,
|
||||
);
|
||||
});
|
||||
this.opfsWorker.postMessage({
|
||||
type: "openToRead",
|
||||
filename,
|
||||
requestId,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
createFile(filename: string): Promise<{ id: number; filename: string }> {
|
||||
return new Promise((resolve) => {
|
||||
const requestId = this.nextRequestId++;
|
||||
performance.mark("createFile" + "_" + filename);
|
||||
this.callbacks.set(requestId, (event) => {
|
||||
performance.mark("createFileEnd" + "_" + filename);
|
||||
performance.measure(
|
||||
"createFile" + "_" + filename,
|
||||
"createFile" + "_" + filename,
|
||||
"createFileEnd" + "_" + filename,
|
||||
);
|
||||
resolve({ id: event.data.handle, filename });
|
||||
});
|
||||
this.opfsWorker.postMessage({
|
||||
type: "createFile",
|
||||
filename,
|
||||
requestId,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
openToWrite(filename: string): Promise<{ id: number; filename: string }> {
|
||||
return new Promise((resolve) => {
|
||||
const requestId = this.nextRequestId++;
|
||||
performance.mark("openToWrite" + "_" + filename);
|
||||
this.callbacks.set(requestId, (event) => {
|
||||
performance.mark("openToWriteEnd" + "_" + filename);
|
||||
performance.measure(
|
||||
"openToWrite" + "_" + filename,
|
||||
"openToWrite" + "_" + filename,
|
||||
"openToWriteEnd" + "_" + filename,
|
||||
);
|
||||
resolve({ id: event.data.handle, filename });
|
||||
});
|
||||
this.opfsWorker.postMessage({
|
||||
type: "openToWrite",
|
||||
filename,
|
||||
requestId,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
append(
|
||||
handle: { id: number; filename: string },
|
||||
data: Uint8Array,
|
||||
): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
const requestId = this.nextRequestId++;
|
||||
performance.mark("append" + "_" + handle.filename);
|
||||
this.callbacks.set(requestId, (_) => {
|
||||
performance.mark("appendEnd" + "_" + handle.filename);
|
||||
performance.measure(
|
||||
"append" + "_" + handle.filename,
|
||||
"append" + "_" + handle.filename,
|
||||
"appendEnd" + "_" + handle.filename,
|
||||
);
|
||||
resolve(undefined);
|
||||
});
|
||||
this.opfsWorker.postMessage({
|
||||
type: "append",
|
||||
handle: handle.id,
|
||||
data,
|
||||
requestId,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
read(
|
||||
handle: { id: number; filename: string },
|
||||
offset: number,
|
||||
length: number,
|
||||
): Promise<Uint8Array> {
|
||||
return new Promise((resolve) => {
|
||||
const requestId = this.nextRequestId++;
|
||||
performance.mark("read" + "_" + handle.filename);
|
||||
this.callbacks.set(requestId, (event) => {
|
||||
performance.mark("readEnd" + "_" + handle.filename);
|
||||
performance.measure(
|
||||
"read" + "_" + handle.filename,
|
||||
"read" + "_" + handle.filename,
|
||||
"readEnd" + "_" + handle.filename,
|
||||
);
|
||||
resolve(event.data.data);
|
||||
});
|
||||
this.opfsWorker.postMessage({
|
||||
type: "read",
|
||||
handle: handle.id,
|
||||
offset,
|
||||
length,
|
||||
requestId,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
close(handle: { id: number; filename: string }): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
const requestId = this.nextRequestId++;
|
||||
performance.mark("close" + "_" + handle.filename);
|
||||
this.callbacks.set(requestId, (_) => {
|
||||
performance.mark("closeEnd" + "_" + handle.filename);
|
||||
performance.measure(
|
||||
"close" + "_" + handle.filename,
|
||||
"close" + "_" + handle.filename,
|
||||
"closeEnd" + "_" + handle.filename,
|
||||
);
|
||||
resolve(undefined);
|
||||
});
|
||||
this.opfsWorker.postMessage({
|
||||
type: "close",
|
||||
handle: handle.id,
|
||||
requestId,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
closeAndRename(
|
||||
handle: { id: number; filename: string },
|
||||
filename: BlockFilename,
|
||||
): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
const requestId = this.nextRequestId++;
|
||||
performance.mark("closeAndRename" + "_" + handle.filename);
|
||||
this.callbacks.set(requestId, () => {
|
||||
performance.mark("closeAndRenameEnd" + "_" + handle.filename);
|
||||
performance.measure(
|
||||
"closeAndRename" + "_" + handle.filename,
|
||||
"closeAndRename" + "_" + handle.filename,
|
||||
"closeAndRenameEnd" + "_" + handle.filename,
|
||||
);
|
||||
resolve(undefined);
|
||||
});
|
||||
this.opfsWorker.postMessage({
|
||||
type: "closeAndRename",
|
||||
handle: handle.id,
|
||||
filename,
|
||||
requestId,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
removeFile(filename: BlockFilename | WalFilename): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
const requestId = this.nextRequestId++;
|
||||
performance.mark("removeFile" + "_" + filename);
|
||||
this.callbacks.set(requestId, () => {
|
||||
performance.mark("removeFileEnd" + "_" + filename);
|
||||
performance.measure(
|
||||
"removeFile" + "_" + filename,
|
||||
"removeFile" + "_" + filename,
|
||||
"removeFileEnd" + "_" + filename,
|
||||
);
|
||||
resolve(undefined);
|
||||
});
|
||||
this.opfsWorker.postMessage({
|
||||
type: "removeFile",
|
||||
filename,
|
||||
requestId,
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const opfsWorkerJSSrc = `
|
||||
|
||||
let rootDirHandle;
|
||||
const handlesByRequest = new Map();
|
||||
const handlesByFilename = new Map();
|
||||
const filenamesForHandles = new Map();
|
||||
|
||||
onmessage = async function handleEvent(event) {
|
||||
rootDirHandle = rootDirHandle || await navigator.storage.getDirectory();
|
||||
// console.log("Received in OPFS worker", {...event.data, data: event.data.data ? "some data of length " + event.data.data.length : undefined});
|
||||
if (event.data.type === "listFiles") {
|
||||
const fileNames = [];
|
||||
for await (const entry of rootDirHandle.values()) {
|
||||
if (entry.kind === "file") {
|
||||
fileNames.push(entry.name);
|
||||
}
|
||||
}
|
||||
postMessage({requestId: event.data.requestId, fileNames});
|
||||
} else if (event.data.type === "openToRead" || event.data.type === "openToWrite") {
|
||||
let syncHandle;
|
||||
const existingHandle = handlesByFilename.get(event.data.filename);
|
||||
if (existingHandle) {
|
||||
throw new Error("Handle already exists for file: " + event.data.filename);
|
||||
} else {
|
||||
const handle = await rootDirHandle.getFileHandle(event.data.filename);
|
||||
try {
|
||||
syncHandle = await handle.createSyncAccessHandle();
|
||||
} catch (e) {
|
||||
throw new Error("Couldn't open file for reading: " + event.data.filename, {cause: e});
|
||||
}
|
||||
}
|
||||
handlesByRequest.set(event.data.requestId, syncHandle);
|
||||
handlesByFilename.set(event.data.filename, syncHandle);
|
||||
filenamesForHandles.set(syncHandle, event.data.filename);
|
||||
let size;
|
||||
try {
|
||||
size = syncHandle.getSize();
|
||||
} catch (e) {
|
||||
throw new Error("Couldn't get size of file: " + event.data.filename, {cause: e});
|
||||
}
|
||||
postMessage({requestId: event.data.requestId, handle: event.data.requestId, size});
|
||||
} else if (event.data.type === "createFile") {
|
||||
const handle = await rootDirHandle.getFileHandle(event.data.filename, {
|
||||
create: true,
|
||||
});
|
||||
let syncHandle;
|
||||
try {
|
||||
syncHandle = await handle.createSyncAccessHandle();
|
||||
} catch (e) {
|
||||
throw new Error("Couldn't create file: " + event.data.filename, {cause: e});
|
||||
}
|
||||
handlesByRequest.set(event.data.requestId, syncHandle);
|
||||
handlesByFilename.set(event.data.filename, syncHandle);
|
||||
filenamesForHandles.set(syncHandle, event.data.filename);
|
||||
postMessage({requestId: event.data.requestId, handle: event.data.requestId, result: "done"});
|
||||
} else if (event.data.type === "append") {
|
||||
const writable = handlesByRequest.get(event.data.handle);
|
||||
writable.write(event.data.data, {at: writable.getSize()});
|
||||
writable.flush();
|
||||
postMessage({requestId: event.data.requestId, result: "done"});
|
||||
} else if (event.data.type === "read") {
|
||||
const readable = handlesByRequest.get(event.data.handle);
|
||||
const buffer = new Uint8Array(event.data.length);
|
||||
const read = readable.read(buffer, {at: event.data.offset});
|
||||
if (read < event.data.length) {
|
||||
throw new Error("Couldn't read enough");
|
||||
}
|
||||
postMessage({requestId: event.data.requestId, data: buffer, result: "done"});
|
||||
} else if (event.data.type === "close") {
|
||||
const handle = handlesByRequest.get(event.data.handle);
|
||||
// console.log("Closing handle", filenamesForHandles.get(handle), event.data.handle, handle);
|
||||
handle.flush();
|
||||
handle.close();
|
||||
handlesByRequest.delete(handle);
|
||||
const filename = filenamesForHandles.get(handle);
|
||||
handlesByFilename.delete(filename);
|
||||
filenamesForHandles.delete(handle);
|
||||
postMessage({requestId: event.data.requestId, result: "done"});
|
||||
} else if (event.data.type === "closeAndRename") {
|
||||
const handle = handlesByRequest.get(event.data.handle);
|
||||
handle.flush();
|
||||
const buffer = new Uint8Array(handle.getSize());
|
||||
const read = handle.read(buffer, {at: 0});
|
||||
if (read < buffer.length) {
|
||||
throw new Error("Couldn't read enough " + read + ", " + handle.getSize());
|
||||
}
|
||||
handle.close();
|
||||
const oldFilename = filenamesForHandles.get(handle);
|
||||
await rootDirHandle.removeEntry(oldFilename);
|
||||
|
||||
const newHandle = await rootDirHandle.getFileHandle(event.data.filename, { create: true });
|
||||
let writable;
|
||||
try {
|
||||
writable = await newHandle.createSyncAccessHandle();
|
||||
} catch (e) {
|
||||
throw new Error("Couldn't create file (to rename to): " + event.data.filename, { cause: e })
|
||||
}
|
||||
writable.write(buffer);
|
||||
writable.close();
|
||||
postMessage({requestId: event.data.requestId, result: "done"});
|
||||
} else if (event.data.type === "removeFile") {
|
||||
try {
|
||||
await rootDirHandle.removeEntry(event.data.filename);
|
||||
} catch(e) {
|
||||
throw new Error("Couldn't remove file: " + event.data.filename, { cause: e });
|
||||
}
|
||||
postMessage({requestId: event.data.requestId, result: "done"});
|
||||
} else {
|
||||
console.error("Unknown event type", event.data.type);
|
||||
}
|
||||
};
|
||||
|
||||
//# sourceURL=opfsWorker.js
|
||||
`;
|
||||
@@ -1,4 +1,4 @@
|
||||
import { LSMStorage, LocalNode, Peer, RawAccountID } from "cojson";
|
||||
import { LocalNode, Peer, RawAccountID } from "cojson";
|
||||
import { IDBStorage } from "cojson-storage-indexeddb";
|
||||
import { WebSocketPeerWithReconnection } from "cojson-transport-ws";
|
||||
import { WasmCrypto } from "cojson/crypto/WasmCrypto";
|
||||
@@ -19,7 +19,6 @@ import {
|
||||
createAnonymousJazzContext,
|
||||
} from "jazz-tools";
|
||||
import { createJazzContext } from "jazz-tools";
|
||||
import { OPFSFilesystem } from "./OPFSFilesystem.js";
|
||||
import { StorageConfig, getStorageOptions } from "./storageOptions.js";
|
||||
import { setupInspector } from "./utils/export-account-inspector.js";
|
||||
|
||||
@@ -50,19 +49,10 @@ async function setupPeers(options: BaseBrowserContextOptions) {
|
||||
const crypto = options.crypto || (await WasmCrypto.create());
|
||||
let node: LocalNode | undefined = undefined;
|
||||
|
||||
const { useSingleTabOPFS, useIndexedDB } = getStorageOptions(options.storage);
|
||||
const { useIndexedDB } = getStorageOptions(options.storage);
|
||||
|
||||
const peersToLoadFrom: Peer[] = [];
|
||||
|
||||
if (useSingleTabOPFS) {
|
||||
peersToLoadFrom.push(
|
||||
await LSMStorage.asPeer({
|
||||
fs: new OPFSFilesystem(crypto),
|
||||
// trace: true,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (useIndexedDB) {
|
||||
peersToLoadFrom.push(await IDBStorage.asPeer());
|
||||
}
|
||||
|
||||
@@ -1,23 +1,17 @@
|
||||
type StorageOption = "indexedDB" | "singleTabOPFS";
|
||||
type CombinedStorageOption = ["singleTabOPFS", "indexedDB"];
|
||||
type StorageOption = "indexedDB";
|
||||
type CombinedStorageOption = ["indexedDB"];
|
||||
export type StorageConfig =
|
||||
| StorageOption
|
||||
| CombinedStorageOption
|
||||
| [StorageOption];
|
||||
|
||||
export function getStorageOptions(storage?: StorageConfig): {
|
||||
useSingleTabOPFS: boolean;
|
||||
useIndexedDB: boolean;
|
||||
} {
|
||||
const useSingleTabOPFS =
|
||||
(Array.isArray(storage) && storage.includes("singleTabOPFS")) ||
|
||||
storage === "singleTabOPFS";
|
||||
|
||||
const useIndexedDB =
|
||||
!storage ||
|
||||
(Array.isArray(storage) && storage.includes("indexedDB")) ||
|
||||
storage === "indexedDB" ||
|
||||
!useSingleTabOPFS;
|
||||
storage === "indexedDB";
|
||||
|
||||
return { useSingleTabOPFS, useIndexedDB };
|
||||
return { useIndexedDB };
|
||||
}
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
import { describe, expect, test } from "vitest";
|
||||
import { getStorageOptions } from "../storageOptions.js";
|
||||
import { StorageConfig } from "../storageOptions.js";
|
||||
|
||||
describe("getStorageOptions", () => {
|
||||
test("should default to indexedDB only when no storage option is provided", () => {
|
||||
const result = getStorageOptions();
|
||||
expect(result).toEqual({
|
||||
useSingleTabOPFS: false,
|
||||
useIndexedDB: true,
|
||||
});
|
||||
});
|
||||
@@ -14,23 +12,6 @@ describe("getStorageOptions", () => {
|
||||
test("should enable only indexedDB when 'indexedDB' is provided", () => {
|
||||
const result = getStorageOptions("indexedDB");
|
||||
expect(result).toEqual({
|
||||
useSingleTabOPFS: false,
|
||||
useIndexedDB: true,
|
||||
});
|
||||
});
|
||||
|
||||
test("should enable only singleTabOPFS when 'singleTabOPFS' is provided", () => {
|
||||
const result = getStorageOptions("singleTabOPFS");
|
||||
expect(result).toEqual({
|
||||
useSingleTabOPFS: true,
|
||||
useIndexedDB: false,
|
||||
});
|
||||
});
|
||||
|
||||
test("should enable both when array with both options is provided", () => {
|
||||
const result = getStorageOptions(["singleTabOPFS", "indexedDB"]);
|
||||
expect(result).toEqual({
|
||||
useSingleTabOPFS: true,
|
||||
useIndexedDB: true,
|
||||
});
|
||||
});
|
||||
@@ -38,37 +19,15 @@ describe("getStorageOptions", () => {
|
||||
test("should enable only indexedDB when array with only indexedDB is provided", () => {
|
||||
const result = getStorageOptions(["indexedDB"]);
|
||||
expect(result).toEqual({
|
||||
useSingleTabOPFS: false,
|
||||
useIndexedDB: true,
|
||||
});
|
||||
});
|
||||
|
||||
test("should enable only singleTabOPFS when array with only singleTabOPFS is provided", () => {
|
||||
const result = getStorageOptions(["singleTabOPFS"]);
|
||||
expect(result).toEqual({
|
||||
useSingleTabOPFS: true,
|
||||
useIndexedDB: false,
|
||||
});
|
||||
});
|
||||
|
||||
test("should fallback to indexedDB when singleTabOPFS is not available", () => {
|
||||
// Testing the fallback behavior when storage is undefined
|
||||
const result = getStorageOptions(undefined);
|
||||
expect(result.useIndexedDB).toBe(true);
|
||||
|
||||
// Testing with an empty array (invalid case but should fallback safely)
|
||||
const result2 = getStorageOptions([] as unknown as StorageConfig);
|
||||
expect(result2.useIndexedDB).toBe(true);
|
||||
});
|
||||
|
||||
// Type checking tests
|
||||
test("should handle type checking for StorageConfig", () => {
|
||||
// These should compile without type errors
|
||||
getStorageOptions("indexedDB");
|
||||
getStorageOptions("singleTabOPFS");
|
||||
getStorageOptions(["singleTabOPFS", "indexedDB"]);
|
||||
getStorageOptions(["indexedDB"]);
|
||||
getStorageOptions(["singleTabOPFS"]);
|
||||
getStorageOptions(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,30 @@
|
||||
# jazz-browser
|
||||
|
||||
## 0.13.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ec9cb40]
|
||||
- cojson@0.13.13
|
||||
- cojson-transport-ws@0.13.13
|
||||
- jazz-auth-clerk@0.13.13
|
||||
- jazz-react-core@0.13.13
|
||||
- jazz-react-native-core@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.13.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- Updated dependencies [65719f2]
|
||||
- jazz-tools@0.13.12
|
||||
- cojson@0.13.12
|
||||
- jazz-auth-clerk@0.13.12
|
||||
- jazz-react-core@0.13.12
|
||||
- jazz-react-native-core@0.13.12
|
||||
- cojson-transport-ws@0.13.12
|
||||
|
||||
## 0.13.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "jazz-expo",
|
||||
"version": "0.13.11",
|
||||
"version": "0.13.13",
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
"module": "./dist/index.js",
|
||||
|
||||
@@ -1,5 +1,24 @@
|
||||
# jazz-inspector
|
||||
|
||||
## 0.13.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ec9cb40]
|
||||
- cojson@0.13.13
|
||||
- jazz-react-core@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.13.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- Updated dependencies [65719f2]
|
||||
- jazz-tools@0.13.12
|
||||
- cojson@0.13.12
|
||||
- jazz-react-core@0.13.12
|
||||
|
||||
## 0.13.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "jazz-inspector",
|
||||
"version": "0.13.11",
|
||||
"version": "0.13.13",
|
||||
"type": "module",
|
||||
"main": "./dist/app.js",
|
||||
"types": "./dist/app.d.ts",
|
||||
|
||||
@@ -1,5 +1,24 @@
|
||||
# jazz-autosub
|
||||
|
||||
## 0.13.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ec9cb40]
|
||||
- cojson@0.13.13
|
||||
- cojson-transport-ws@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.13.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- Updated dependencies [65719f2]
|
||||
- jazz-tools@0.13.12
|
||||
- cojson@0.13.12
|
||||
- cojson-transport-ws@0.13.12
|
||||
|
||||
## 0.13.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"types": "dist/index.d.ts",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"version": "0.13.11",
|
||||
"version": "0.13.13",
|
||||
"dependencies": {
|
||||
"cojson": "workspace:*",
|
||||
"cojson-transport-ws": "workspace:*",
|
||||
|
||||
@@ -1,5 +1,29 @@
|
||||
# jazz-browser-media-images
|
||||
|
||||
## 0.13.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ec9cb40]
|
||||
- cojson@0.13.13
|
||||
- jazz-auth-clerk@0.13.13
|
||||
- jazz-browser@0.13.13
|
||||
- jazz-react@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.13.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- Updated dependencies [29e05c4]
|
||||
- Updated dependencies [65719f2]
|
||||
- jazz-tools@0.13.12
|
||||
- jazz-browser@0.13.12
|
||||
- cojson@0.13.12
|
||||
- jazz-auth-clerk@0.13.12
|
||||
- jazz-react@0.13.12
|
||||
|
||||
## 0.13.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "jazz-react-auth-clerk",
|
||||
"version": "0.13.11",
|
||||
"version": "0.13.13",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
|
||||
@@ -1,5 +1,22 @@
|
||||
# jazz-react-core
|
||||
|
||||
## 0.13.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ec9cb40]
|
||||
- cojson@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.13.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- Updated dependencies [65719f2]
|
||||
- jazz-tools@0.13.12
|
||||
- cojson@0.13.12
|
||||
|
||||
## 0.13.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "jazz-react-core",
|
||||
"version": "0.13.11",
|
||||
"version": "0.13.13",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
|
||||
@@ -1,5 +1,28 @@
|
||||
# jazz-browser
|
||||
|
||||
## 0.13.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ec9cb40]
|
||||
- cojson@0.13.13
|
||||
- cojson-storage@0.13.13
|
||||
- cojson-transport-ws@0.13.13
|
||||
- jazz-react-core@0.13.13
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.13.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- Updated dependencies [65719f2]
|
||||
- jazz-tools@0.13.12
|
||||
- cojson@0.13.12
|
||||
- jazz-react-core@0.13.12
|
||||
- cojson-storage@0.13.12
|
||||
- cojson-transport-ws@0.13.12
|
||||
|
||||
## 0.13.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "jazz-react-native-core",
|
||||
"type": "module",
|
||||
"version": "0.13.11",
|
||||
"version": "0.13.13",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"module": "./dist/index.js",
|
||||
|
||||
@@ -1,5 +1,18 @@
|
||||
# jazz-browser-media-images
|
||||
|
||||
## 0.13.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-tools@0.13.13
|
||||
|
||||
## 0.13.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4547525]
|
||||
- jazz-tools@0.13.12
|
||||
|
||||
## 0.13.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user