Compare commits
106 Commits
cojson@0.7
...
cojson@0.7
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c14a0e05be | ||
|
|
016dd3a5dd | ||
|
|
5c4ca9103c | ||
|
|
b4aad92907 | ||
|
|
56d1e095a1 | ||
|
|
6dee9aae49 | ||
|
|
a10bff981e | ||
|
|
e333f7884a | ||
|
|
8ea7bf237b | ||
|
|
5e8409fa08 | ||
|
|
23354c1767 | ||
|
|
0efb69d0db | ||
|
|
0462c4e41b | ||
|
|
70a5673197 | ||
|
|
9ec3203485 | ||
|
|
1a46f9b2e1 | ||
|
|
77bb26a8d7 | ||
|
|
2a36dcf592 | ||
|
|
fc2bcadbe2 | ||
|
|
46b0cc1adb | ||
|
|
d75d1c6a3f | ||
|
|
13b236aeed | ||
|
|
1c0a61b0b2 | ||
|
|
ceb92438f4 | ||
|
|
9bdd62ed4c | ||
|
|
3f5ef7e799 | ||
|
|
e7a573fa94 | ||
|
|
364060eaa7 | ||
|
|
a3ddc3d5e0 | ||
|
|
185f747adb | ||
|
|
895d281088 | ||
|
|
b44e4354f7 | ||
|
|
3fcb0665ec | ||
|
|
be49d33ce5 | ||
|
|
c7dae1608b | ||
|
|
b020c5868b | ||
|
|
eae42d3afe | ||
|
|
a816e2436e | ||
|
|
b09e35e372 | ||
|
|
d2c8121c9c | ||
|
|
380bb88ffa | ||
|
|
e0e3726b3c | ||
|
|
c2253a7979 | ||
|
|
9d244226ec | ||
|
|
71df5e3a59 | ||
|
|
3a738dad88 | ||
|
|
56d301cfde | ||
|
|
5efec6d5ea | ||
|
|
32769b24f1 | ||
|
|
6ab53c263d | ||
|
|
e7f3e4e242 | ||
|
|
8bb5201647 | ||
|
|
a9fc94f53d | ||
|
|
ca7c0510d1 | ||
|
|
1bf16f0859 | ||
|
|
21b503c188 | ||
|
|
0053e9796c | ||
|
|
e84941b1b1 | ||
|
|
57f6f8d67e | ||
|
|
5b8e69d973 | ||
|
|
7213b1bfa3 | ||
|
|
11f0770f08 | ||
|
|
44e6dc3ae8 | ||
|
|
b5d20d2488 | ||
|
|
0185545838 | ||
|
|
8c8f85859c | ||
|
|
104384409e | ||
|
|
179827ae56 | ||
|
|
6645829876 | ||
|
|
68cb302722 | ||
|
|
8dc33f2790 | ||
|
|
5f64ba326c | ||
|
|
7ccb15107c | ||
|
|
b102964743 | ||
|
|
216d50a09c | ||
|
|
07ea59fdcb | ||
|
|
932a84a47f | ||
|
|
34dda7bdbd | ||
|
|
49fa153581 | ||
|
|
c80b827775 | ||
|
|
a2bf9f988a | ||
|
|
ac27b2d5c2 | ||
|
|
c813518fdc | ||
|
|
d5034ed5c3 | ||
|
|
cf2c29a365 | ||
|
|
d948823db6 | ||
|
|
060ad4630d | ||
|
|
0ddceac4c0 | ||
|
|
a862cb8819 | ||
|
|
4246aed7db | ||
|
|
41554e0e0b | ||
|
|
93c4d8155e | ||
|
|
24eefd49f1 | ||
|
|
e712f1e8ef | ||
|
|
33db0fd654 | ||
|
|
478ded93de | ||
|
|
89ad1fb79d | ||
|
|
1ba40806ec | ||
|
|
73ae281e4a | ||
|
|
a35353c987 | ||
|
|
1cb91003cc | ||
|
|
d850022491 | ||
|
|
93792ab6f6 | ||
|
|
95dfe7af6a | ||
|
|
734258eb17 | ||
|
|
f3bcf96fad |
@@ -12,7 +12,7 @@
|
||||
"jazz-react",
|
||||
"jazz-nodejs",
|
||||
"jazz-run",
|
||||
"cojson-transport-nodejs-ws",
|
||||
"cojson-transport-ws",
|
||||
"cojson-storage-indexeddb",
|
||||
"cojson-storage-sqlite"
|
||||
]
|
||||
|
||||
86
.github/workflows/build-and-deploy.yaml
vendored
86
.github/workflows/build-and-deploy.yaml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
example: ["chat", "pets", "todo"]
|
||||
example: ["chat", "pets", "todo", "inspector"]
|
||||
# example: ["twit", "chat", "counter-js-auth0", "pets", "twit", "file-drop", "inspector"]
|
||||
|
||||
steps:
|
||||
@@ -53,59 +53,12 @@ jobs:
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
build-homepage:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: true
|
||||
|
||||
- uses: pnpm/action-setup@v2
|
||||
with:
|
||||
version: 8
|
||||
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 16
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: gardencmp
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Pnpm Install (root)
|
||||
run: |
|
||||
pnpm install
|
||||
working-directory: .
|
||||
|
||||
- name: Pnpm Install & Build (homepage)
|
||||
run: |
|
||||
pnpm install
|
||||
pnpm build;
|
||||
working-directory: ./homepage/homepage
|
||||
|
||||
- name: Docker Build & Push
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: ./homepage
|
||||
push: true
|
||||
tags: ghcr.io/gardencmp/${{github.event.repository.name}}-homepage-jazz:${{github.head_ref || github.ref_name}}-${{github.sha}}-${{github.run_number}}-${{github.run_attempt}}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
deploy-examples:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-examples
|
||||
strategy:
|
||||
matrix:
|
||||
example: ["chat", "pets", "todo"]
|
||||
example: ["chat", "pets", "todo", "inspector"]
|
||||
# example: ["twit", "chat", "counter-js-auth0", "pets", "twit", "file-drop", "inspector"]
|
||||
|
||||
steps:
|
||||
@@ -135,37 +88,4 @@ jobs:
|
||||
envsubst '${DOCKER_USER} ${DOCKER_PASSWORD} ${DOCKER_TAG} ${BRANCH_SUFFIX} ${BRANCH_SUBDOMAIN}' < job-template.nomad > job-instance.nomad;
|
||||
cat job-instance.nomad;
|
||||
NOMAD_ADDR=${{ secrets.NOMAD_ADDR }} nomad job run job-instance.nomad;
|
||||
working-directory: ./examples/${{ matrix.example }}
|
||||
|
||||
deploy-homepage:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-homepage
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: true
|
||||
- uses: gacts/install-nomad@v1
|
||||
- name: Tailscale
|
||||
uses: tailscale/github-action@v1
|
||||
with:
|
||||
authkey: ${{ secrets.TAILSCALE_AUTHKEY }}
|
||||
|
||||
- name: Deploy on Nomad
|
||||
run: |
|
||||
if [ "${{github.ref_name}}" == "main" ]; then
|
||||
export BRANCH_SUFFIX="";
|
||||
export BRANCH_SUBDOMAIN="";
|
||||
else
|
||||
export BRANCH_SUFFIX=-${{github.head_ref || github.ref_name}};
|
||||
export BRANCH_SUBDOMAIN=${{github.head_ref || github.ref_name}}.;
|
||||
fi
|
||||
|
||||
export DOCKER_USER=gardencmp;
|
||||
export DOCKER_PASSWORD=${{ secrets.DOCKER_PULL_PAT }};
|
||||
export DOCKER_TAG=ghcr.io/gardencmp/${{github.event.repository.name}}-homepage-jazz:${{github.head_ref || github.ref_name}}-${{github.sha}}-${{github.run_number}}-${{github.run_attempt}};
|
||||
|
||||
envsubst '${DOCKER_USER} ${DOCKER_PASSWORD} ${DOCKER_TAG} ${BRANCH_SUFFIX} ${BRANCH_SUBDOMAIN}' < job-template.nomad > job-instance.nomad;
|
||||
cat job-instance.nomad;
|
||||
NOMAD_ADDR=${{ secrets.NOMAD_ADDR }} nomad job run job-instance.nomad;
|
||||
working-directory: ./homepage
|
||||
working-directory: ./examples/${{ matrix.example }}
|
||||
@@ -1,2 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
. "$(dirname "$0")/_/husky.sh"
|
||||
@@ -1,5 +1,160 @@
|
||||
# jazz-example-chat
|
||||
|
||||
## 0.0.75
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.28
|
||||
- jazz-react@0.7.28
|
||||
- jazz-tools@0.7.28
|
||||
|
||||
## 0.0.74
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.7.27
|
||||
|
||||
## 0.0.73
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.26
|
||||
- jazz-react@0.7.26
|
||||
- jazz-tools@0.7.26
|
||||
|
||||
## 0.0.72
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.25
|
||||
- jazz-react@0.7.25
|
||||
|
||||
## 0.0.71
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.24
|
||||
- jazz-react@0.7.24
|
||||
|
||||
## 0.0.70
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.23
|
||||
- jazz-react@0.7.23
|
||||
- jazz-tools@0.7.23
|
||||
|
||||
## 0.0.69
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.7.22
|
||||
|
||||
## 0.0.68
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.21
|
||||
- jazz-react@0.7.21
|
||||
|
||||
## 0.0.67
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.20
|
||||
- jazz-react@0.7.20
|
||||
|
||||
## 0.0.66
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.19
|
||||
- jazz-react@0.7.19
|
||||
|
||||
## 0.0.65
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.18
|
||||
- jazz-react@0.7.18
|
||||
- jazz-tools@0.7.18
|
||||
|
||||
## 0.0.64
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.17
|
||||
- jazz-react@0.7.17
|
||||
- jazz-tools@0.7.17
|
||||
|
||||
## 0.0.63
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.16
|
||||
- jazz-react@0.7.16
|
||||
|
||||
## 0.0.62
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-react@0.7.15
|
||||
|
||||
## 0.0.61
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.14
|
||||
- jazz-tools@0.7.14
|
||||
- jazz-react@0.7.14
|
||||
|
||||
## 0.0.60
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.13
|
||||
- jazz-react@0.7.13
|
||||
|
||||
## 0.0.59
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.12
|
||||
- jazz-react@0.7.12
|
||||
|
||||
## 0.0.58
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.11
|
||||
- jazz-react@0.7.11
|
||||
- jazz-tools@0.7.11
|
||||
|
||||
## 0.0.57
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.10
|
||||
- jazz-react@0.7.10
|
||||
- jazz-tools@0.7.10
|
||||
|
||||
## 0.0.56
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "jazz-example-chat",
|
||||
"private": true,
|
||||
"version": "0.0.56",
|
||||
"version": "0.0.75",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -3,6 +3,7 @@ import { createJazzReactContext, DemoAuth } from "jazz-react";
|
||||
import { createRoot } from "react-dom/client";
|
||||
import { useIframeHashRouter } from "hash-slash";
|
||||
import { ChatScreen } from "./chatScreen.tsx";
|
||||
import { StrictMode } from "react";
|
||||
|
||||
export class Message extends CoMap {
|
||||
text = co.string;
|
||||
@@ -39,4 +40,4 @@ function App() {
|
||||
}
|
||||
|
||||
createRoot(document.getElementById("root")!)
|
||||
.render(<Jazz.Provider><App/></Jazz.Provider>);
|
||||
.render(<StrictMode><Jazz.Provider><App/></Jazz.Provider></StrictMode>);
|
||||
72
examples/inspector/CHANGELOG.md
Normal file
72
examples/inspector/CHANGELOG.md
Normal file
@@ -0,0 +1,72 @@
|
||||
# jazz-example-chat
|
||||
|
||||
## 0.0.54
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.28
|
||||
- cojson-transport-ws@0.7.28
|
||||
|
||||
## 0.0.53
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson-transport-ws@0.7.27
|
||||
|
||||
## 0.0.52
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.26
|
||||
- cojson-transport-ws@0.7.26
|
||||
|
||||
## 0.0.51
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.23
|
||||
- cojson-transport-ws@0.7.23
|
||||
|
||||
## 0.0.50
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson-transport-ws@0.7.22
|
||||
|
||||
## 0.0.49
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.18
|
||||
- cojson-transport-ws@0.7.18
|
||||
|
||||
## 0.0.48
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.17
|
||||
- cojson-transport-ws@0.7.17
|
||||
|
||||
## 0.0.47
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.6.7
|
||||
- jazz-react@0.5.5
|
||||
- jazz-react-auth-local@0.4.18
|
||||
|
||||
## 0.0.46
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-react@0.5.0
|
||||
- jazz-react-auth-local@0.4.16
|
||||
17
examples/inspector/index.html
Normal file
17
examples/inspector/index.html
Normal file
@@ -0,0 +1,17 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/png" href="/jazz-logo.png" />
|
||||
<link rel="stylesheet" href="/src/index.css" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Jazz Inspector</title>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/app.tsx"></script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "inspector",
|
||||
"name": "jazz-inspector",
|
||||
"private": true,
|
||||
"version": "0.0.47",
|
||||
"version": "0.0.54",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
@@ -17,9 +17,8 @@
|
||||
"class-variance-authority": "^0.7.0",
|
||||
"clsx": "^2.0.0",
|
||||
"hash-slash": "workspace:*",
|
||||
"jazz-react": "workspace:*",
|
||||
"jazz-react-auth-local": "workspace:*",
|
||||
"cojson": "workspace:*",
|
||||
"cojson-transport-ws": "workspace:*",
|
||||
"lucide-react": "^0.274.0",
|
||||
"qrcode": "^1.5.3",
|
||||
"react": "^18.2.0",
|
||||
|
Before Width: | Height: | Size: 7.3 KiB After Width: | Height: | Size: 7.3 KiB |
4
examples/inspector/src/app.tsx
Normal file
4
examples/inspector/src/app.tsx
Normal file
@@ -0,0 +1,4 @@
|
||||
import ReactDOM from "react-dom/client";
|
||||
import App from "./viewer/new-app";
|
||||
|
||||
ReactDOM.createRoot(document.getElementById("root")!).render(<App />);
|
||||
92
examples/inspector/src/index.css
Normal file
92
examples/inspector/src/index.css
Normal file
@@ -0,0 +1,92 @@
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
@layer base {
|
||||
:root {
|
||||
--background: 0 0% 100%;
|
||||
--foreground: 20 14.3% 4.1%;
|
||||
|
||||
--card: 0 0% 100%;
|
||||
--card-foreground: 20 14.3% 4.1%;
|
||||
|
||||
--popover: 0 0% 100%;
|
||||
--popover-foreground: 20 14.3% 4.1%;
|
||||
|
||||
--primary: 24 9.8% 10%;
|
||||
--primary-foreground: 60 9.1% 97.8%;
|
||||
|
||||
--secondary: 60 4.8% 95.9%;
|
||||
--secondary-foreground: 24 9.8% 10%;
|
||||
|
||||
--muted: 60 4.8% 95.9%;
|
||||
--muted-foreground: 25 5.3% 44.7%;
|
||||
|
||||
--accent: 60 4.8% 95.9%;
|
||||
--accent-foreground: 24 9.8% 10%;
|
||||
|
||||
--destructive: 0 84.2% 60.2%;
|
||||
--destructive-foreground: 60 9.1% 97.8%;
|
||||
|
||||
--border: 20 5.9% 90%;
|
||||
--input: 20 5.9% 90%;
|
||||
--ring: 20 14.3% 4.1%;
|
||||
|
||||
--radius: 0.5rem;
|
||||
}
|
||||
|
||||
.dark {
|
||||
--background: 20 14.3% 4.1%;
|
||||
--foreground: 60 9.1% 97.8%;
|
||||
|
||||
--card: 20 14.3% 4.1%;
|
||||
--card-foreground: 60 9.1% 97.8%;
|
||||
|
||||
--popover: 20 14.3% 4.1%;
|
||||
--popover-foreground: 60 9.1% 97.8%;
|
||||
|
||||
--primary: 60 9.1% 97.8%;
|
||||
--primary-foreground: 24 9.8% 10%;
|
||||
|
||||
--secondary: 12 6.5% 15.1%;
|
||||
--secondary-foreground: 60 9.1% 97.8%;
|
||||
|
||||
--muted: 12 6.5% 15.1%;
|
||||
--muted-foreground: 24 5.4% 63.9%;
|
||||
|
||||
--accent: 12 6.5% 15.1%;
|
||||
--accent-foreground: 60 9.1% 97.8%;
|
||||
|
||||
--destructive: 0 62.8% 30.6%;
|
||||
--destructive-foreground: 60 9.1% 97.8%;
|
||||
|
||||
--border: 12 6.5% 15.1%;
|
||||
--input: 12 6.5% 15.1%;
|
||||
--ring: 24 5.7% 82.9%;
|
||||
}
|
||||
}
|
||||
|
||||
@layer base {
|
||||
* {
|
||||
@apply border-border;
|
||||
}
|
||||
body {
|
||||
@apply bg-background text-foreground;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
}
|
||||
|
||||
.animate-in {
|
||||
animation: slideIn 0.3s ease-out;
|
||||
}
|
||||
@keyframes slideIn {
|
||||
from {
|
||||
transform: translateZ(400px) translateY(30px) scale(1.05);
|
||||
opacity: 0.4;
|
||||
}
|
||||
to {
|
||||
transform: translateZ(0) scale(1);
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
18
examples/inspector/src/link-icon.tsx
Normal file
18
examples/inspector/src/link-icon.tsx
Normal file
@@ -0,0 +1,18 @@
|
||||
export function LinkIcon() {
|
||||
return (
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
strokeWidth={1.5}
|
||||
stroke="currentColor"
|
||||
className="w-3 h-3"
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
d="M13.19 8.688a4.5 4.5 0 0 1 1.242 7.244l-4.5 4.5a4.5 4.5 0 0 1-6.364-6.364l1.757-1.757m13.35-.622 1.757-1.757a4.5 4.5 0 0 0-6.364-6.364l-4.5 4.5a4.5 4.5 0 0 0 1.242 7.244"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
42
examples/inspector/src/viewer/breadcrumbs.tsx
Normal file
42
examples/inspector/src/viewer/breadcrumbs.tsx
Normal file
@@ -0,0 +1,42 @@
|
||||
import React from "react";
|
||||
import { PageInfo } from "./types";
|
||||
|
||||
interface BreadcrumbsProps {
|
||||
path: PageInfo[];
|
||||
onBreadcrumbClick: (index: number) => void;
|
||||
}
|
||||
|
||||
export const Breadcrumbs: React.FC<BreadcrumbsProps> = ({
|
||||
path,
|
||||
onBreadcrumbClick,
|
||||
}) => {
|
||||
return (
|
||||
<div className="z-20 relative bg-indigo-400/10 backdrop-blur-sm rounded-lg inline-flex px-2 py-1 whitespace-pre transition-all items-center space-x-1 min-h-10">
|
||||
<button
|
||||
onClick={() => onBreadcrumbClick(-1)}
|
||||
className="flex items-center justify-center p-1 rounded-sm hover:bg-indigo-500/10 transition-colors"
|
||||
aria-label="Go to home"
|
||||
>
|
||||
<img src="jazz-logo.png" alt="Jazz Logo" className="size-5" />
|
||||
</button>
|
||||
{path.map((page, index) => {
|
||||
return (
|
||||
<span
|
||||
key={index}
|
||||
className="inline-block first:pl-1 last:pr-1"
|
||||
>
|
||||
{index === 0 ? null : (
|
||||
<span className="text-indigo-500/30">{" / "}</span>
|
||||
)}
|
||||
<button
|
||||
onClick={() => onBreadcrumbClick(index)}
|
||||
className="text-indigo-700 hover:underline"
|
||||
>
|
||||
{index === 0 ? page.name || "Root" : page.name}
|
||||
</button>
|
||||
</span>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
353
examples/inspector/src/viewer/co-stream-view.tsx
Normal file
353
examples/inspector/src/viewer/co-stream-view.tsx
Normal file
@@ -0,0 +1,353 @@
|
||||
import {
|
||||
CoID,
|
||||
LocalNode,
|
||||
RawBinaryCoStream,
|
||||
RawCoStream,
|
||||
RawCoValue,
|
||||
} from "cojson";
|
||||
import { JsonObject, JsonValue } from "cojson/src/jsonValue";
|
||||
import { PageInfo } from "./types";
|
||||
import { base64URLtoBytes } from "cojson/src/base64url";
|
||||
import { useEffect, useState } from "react";
|
||||
import { ArrowDownToLine } from "lucide-react";
|
||||
import {
|
||||
BinaryStreamItem,
|
||||
BinaryStreamStart,
|
||||
CoStreamItem,
|
||||
} from "cojson/src/coValues/coStream";
|
||||
import { AccountOrGroupPreview } from "./value-renderer";
|
||||
|
||||
// typeguard for BinaryStreamStart
|
||||
function isBinaryStreamStart(item: unknown): item is BinaryStreamStart {
|
||||
return (
|
||||
typeof item === "object" &&
|
||||
item !== null &&
|
||||
"type" in item &&
|
||||
item.type === "start"
|
||||
);
|
||||
}
|
||||
|
||||
function detectCoStreamType(value: RawCoStream | RawBinaryCoStream) {
|
||||
const firstKey = Object.keys(value.items)[0];
|
||||
if (!firstKey)
|
||||
return {
|
||||
type: "unknown",
|
||||
};
|
||||
|
||||
const items = value.items[firstKey as never]?.map((v) => v.value);
|
||||
|
||||
if (!items)
|
||||
return {
|
||||
type: "unknown",
|
||||
};
|
||||
const firstItem = items[0];
|
||||
if (!firstItem)
|
||||
return {
|
||||
type: "unknown",
|
||||
};
|
||||
// This is a binary stream
|
||||
if (isBinaryStreamStart(firstItem)) {
|
||||
return {
|
||||
type: "binary",
|
||||
items: items as BinaryStreamItem[],
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
type: "coStream",
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async function getBlobFromCoStream({
|
||||
items,
|
||||
onlyFirstChunk = false,
|
||||
}: {
|
||||
items: BinaryStreamItem[];
|
||||
onlyFirstChunk?: boolean;
|
||||
}) {
|
||||
if (onlyFirstChunk && items.length > 1) {
|
||||
items = items.slice(0, 2);
|
||||
}
|
||||
|
||||
const chunks: Uint8Array[] = [];
|
||||
|
||||
const binary_U_prefixLength = 8;
|
||||
|
||||
let lastProgressUpdate = Date.now();
|
||||
|
||||
for (const item of items.slice(1)) {
|
||||
if (item.type === "end") {
|
||||
break;
|
||||
}
|
||||
|
||||
if (item.type !== "chunk") {
|
||||
console.error("Invalid binary stream chunk", item);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const chunk = base64URLtoBytes(item.chunk.slice(binary_U_prefixLength));
|
||||
// totalLength += chunk.length;
|
||||
chunks.push(chunk);
|
||||
|
||||
if (Date.now() - lastProgressUpdate > 100) {
|
||||
lastProgressUpdate = Date.now();
|
||||
}
|
||||
}
|
||||
const defaultMime = "mimeType" in items[0] ? items[0].mimeType : null;
|
||||
|
||||
const blob = new Blob(chunks, defaultMime ? { type: defaultMime } : {});
|
||||
|
||||
const mimeType =
|
||||
defaultMime === "" ? await detectPDFMimeType(blob) : defaultMime;
|
||||
|
||||
return {
|
||||
blob,
|
||||
mimeType: mimeType as string,
|
||||
unfinishedChunks: items.length > 1,
|
||||
totalSize:
|
||||
"totalSizeBytes" in items[0]
|
||||
? (items[0].totalSizeBytes as number)
|
||||
: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
const detectPDFMimeType = async (blob: Blob): Promise<string> => {
|
||||
const arrayBuffer = await blob.slice(0, 4).arrayBuffer();
|
||||
const uint8Array = new Uint8Array(arrayBuffer);
|
||||
const header = uint8Array.reduce(
|
||||
(acc, byte) => acc + String.fromCharCode(byte),
|
||||
"",
|
||||
);
|
||||
|
||||
if (header === "%PDF") {
|
||||
return "application/pdf";
|
||||
}
|
||||
return "application/octet-stream";
|
||||
};
|
||||
|
||||
const BinaryDownloadButton = ({
|
||||
pdfBlob,
|
||||
fileName = "document",
|
||||
label,
|
||||
mimeType,
|
||||
}: {
|
||||
pdfBlob: Blob;
|
||||
mimeType?: string;
|
||||
fileName?: string;
|
||||
label: string;
|
||||
}) => {
|
||||
const downloadFile = () => {
|
||||
const url = URL.createObjectURL(
|
||||
new Blob([pdfBlob], mimeType ? { type: mimeType } : {}),
|
||||
);
|
||||
const link = document.createElement("a");
|
||||
link.href = url;
|
||||
link.download =
|
||||
mimeType === "application/pdf" ? `${fileName}.pdf` : fileName;
|
||||
document.body.appendChild(link);
|
||||
link.click();
|
||||
document.body.removeChild(link);
|
||||
URL.revokeObjectURL(url);
|
||||
};
|
||||
|
||||
return (
|
||||
<button
|
||||
className="flex items-center gap-2 px-2 py-1 text-gray-900 border border-gray-900/10 bg-clip-border shadow-sm transition-colors rounded bg-gray-50 text-sm"
|
||||
onClick={downloadFile}
|
||||
>
|
||||
<ArrowDownToLine size={16} />
|
||||
{label}
|
||||
{/* Download {mimeType === "application/pdf" ? "PDF" : "File"} */}
|
||||
</button>
|
||||
);
|
||||
};
|
||||
|
||||
const LabelContentPair = ({
|
||||
label,
|
||||
content,
|
||||
}: {
|
||||
label: string;
|
||||
content: React.ReactNode;
|
||||
}) => {
|
||||
return (
|
||||
<div className="flex flex-col gap-1.5 ">
|
||||
<span className="uppercase text-xs font-medium text-gray-600 tracking-wide">
|
||||
{label}
|
||||
</span>
|
||||
<span>{content}</span>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
function RenderCoBinaryStream({
|
||||
value,
|
||||
items,
|
||||
}: {
|
||||
items: BinaryStreamItem[];
|
||||
value: RawBinaryCoStream;
|
||||
}) {
|
||||
const [file, setFile] = useState<
|
||||
| {
|
||||
blob: Blob;
|
||||
mimeType: string;
|
||||
unfinishedChunks: boolean;
|
||||
totalSize: number | undefined;
|
||||
}
|
||||
| undefined
|
||||
| null
|
||||
>(null);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
|
||||
useEffect(() => {
|
||||
// load only the first chunk to get the mime type and size
|
||||
getBlobFromCoStream({
|
||||
items,
|
||||
onlyFirstChunk: true,
|
||||
})
|
||||
.then((v) => {
|
||||
if (v) {
|
||||
setFile(v);
|
||||
if (v.mimeType.includes("image")) {
|
||||
// If it's an image, load the full blob
|
||||
getBlobFromCoStream({
|
||||
items,
|
||||
}).then((s) => {
|
||||
if (s) setFile(s);
|
||||
});
|
||||
}
|
||||
}
|
||||
})
|
||||
.finally(() => setIsLoading(false));
|
||||
}, [items]);
|
||||
|
||||
if (!isLoading && !file) return <div>No blob</div>;
|
||||
|
||||
if (isLoading) return <div>Loading...</div>;
|
||||
if (!file) return <div>No blob</div>;
|
||||
|
||||
const { blob, mimeType } = file;
|
||||
|
||||
const sizeInKB = (file.totalSize || 0) / 1024;
|
||||
|
||||
return (
|
||||
<div className="space-y-8 mt-4">
|
||||
<div className="grid grid-cols-3 gap-2 max-w-3xl">
|
||||
<LabelContentPair
|
||||
label="Mime Type"
|
||||
content={
|
||||
<span className="font-mono bg-gray-100 rounded px-2 py-1 text-sm">
|
||||
{mimeType || "No mime type"}
|
||||
</span>
|
||||
}
|
||||
/>
|
||||
<LabelContentPair
|
||||
label="Size"
|
||||
content={<span>{sizeInKB.toFixed(2)} KB</span>}
|
||||
/>
|
||||
<LabelContentPair
|
||||
label="Download"
|
||||
content={
|
||||
<BinaryDownloadButton
|
||||
fileName={value.id.toString()}
|
||||
pdfBlob={blob}
|
||||
mimeType={mimeType}
|
||||
label={
|
||||
mimeType === "application/pdf"
|
||||
? "Download PDF"
|
||||
: "Download File"
|
||||
}
|
||||
/>
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
{mimeType === "image/png" || mimeType === "image/jpeg" ? (
|
||||
<LabelContentPair
|
||||
label="Preview"
|
||||
content={
|
||||
<div className="bg-gray-50 p-3 rounded-sm">
|
||||
<RenderBlobImage blob={blob} />
|
||||
</div>
|
||||
}
|
||||
/>
|
||||
) : null}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function RenderCoStream({
|
||||
value,
|
||||
node,
|
||||
}: {
|
||||
value: RawCoStream;
|
||||
node: LocalNode;
|
||||
}) {
|
||||
const streamPerUser = Object.keys(value.items);
|
||||
const userCoIds = streamPerUser.map(
|
||||
(stream) => stream.split("_session")[0],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="grid grid-cols-3 gap-2">
|
||||
{userCoIds.map((id, idx) => (
|
||||
<div
|
||||
className="bg-gray-100 p-3 rounded-lg transition-colors overflow-hidden bg-white border hover:bg-gray-100/5 cursor-pointer shadow-sm"
|
||||
key={id}
|
||||
>
|
||||
<AccountOrGroupPreview
|
||||
coId={id as CoID<RawCoValue>}
|
||||
node={node}
|
||||
/>
|
||||
{/* @ts-expect-error - TODO: fix types */}
|
||||
{value.items[streamPerUser[idx]]?.map(
|
||||
(item: CoStreamItem<JsonValue>) => (
|
||||
<div>
|
||||
{new Date(item.madeAt).toLocaleString()}{" "}
|
||||
{JSON.stringify(item.value)}
|
||||
</div>
|
||||
),
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export function CoStreamView({
|
||||
value,
|
||||
node,
|
||||
}: {
|
||||
data: JsonObject;
|
||||
onNavigate: (pages: PageInfo[]) => void;
|
||||
node: LocalNode;
|
||||
value: RawCoStream;
|
||||
}) {
|
||||
// if (!value) return <div>No value</div>;
|
||||
|
||||
const streamType = detectCoStreamType(value);
|
||||
|
||||
if (streamType.type === "binary") {
|
||||
if (streamType.items === undefined) {
|
||||
return <div>No binary stream</div>;
|
||||
}
|
||||
|
||||
return (
|
||||
<RenderCoBinaryStream
|
||||
value={value as RawBinaryCoStream}
|
||||
items={streamType.items}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
if (streamType.type === "coStream") {
|
||||
return <RenderCoStream value={value} node={node} />;
|
||||
}
|
||||
|
||||
if (streamType.type === "unknown") return <div>Unknown stream type</div>;
|
||||
|
||||
return <div>Unknown stream type</div>;
|
||||
}
|
||||
|
||||
function RenderBlobImage({ blob }: { blob: Blob }) {
|
||||
const urlCreator = window.URL || window.webkitURL;
|
||||
return <img src={urlCreator.createObjectURL(blob)} />;
|
||||
}
|
||||
73
examples/inspector/src/viewer/grid-view.tsx
Normal file
73
examples/inspector/src/viewer/grid-view.tsx
Normal file
@@ -0,0 +1,73 @@
|
||||
import { CoID, LocalNode, RawCoValue } from "cojson";
|
||||
import { JsonObject } from "cojson/src/jsonValue";
|
||||
import { CoMapPreview, ValueRenderer } from "./value-renderer";
|
||||
import clsx from "clsx";
|
||||
import { PageInfo, isCoId } from "./types";
|
||||
import { ResolveIcon } from "./type-icon";
|
||||
|
||||
export function GridView({
|
||||
data,
|
||||
onNavigate,
|
||||
node,
|
||||
}: {
|
||||
data: JsonObject;
|
||||
onNavigate: (pages: PageInfo[]) => void;
|
||||
node: LocalNode;
|
||||
}) {
|
||||
const entries = Object.entries(data);
|
||||
|
||||
return (
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4 p-2">
|
||||
{entries.map(([key, child], childIndex) => (
|
||||
<div
|
||||
key={childIndex}
|
||||
className={clsx(
|
||||
"bg-gray-100 p-3 rounded-lg transition-colors overflow-hidden",
|
||||
isCoId(child)
|
||||
? "bg-white border hover:bg-gray-100/5 cursor-pointer shadow-sm"
|
||||
: "bg-gray-50",
|
||||
)}
|
||||
onClick={() =>
|
||||
isCoId(child) &&
|
||||
onNavigate([
|
||||
{ coId: child as CoID<RawCoValue>, name: key },
|
||||
])
|
||||
}
|
||||
>
|
||||
<h3 className="truncate">
|
||||
{isCoId(child) ? (
|
||||
<span className="font-medium flex justify-between">
|
||||
{key}
|
||||
|
||||
<div className="px-2 py-1 text-xs bg-gray-100 rounded">
|
||||
<ResolveIcon
|
||||
coId={child as CoID<RawCoValue>}
|
||||
node={node}
|
||||
/>
|
||||
</div>
|
||||
</span>
|
||||
) : (
|
||||
<span>{key}</span>
|
||||
)}
|
||||
</h3>
|
||||
<div className="mt-2 text-sm">
|
||||
{isCoId(child) ? (
|
||||
<CoMapPreview
|
||||
coId={child as CoID<RawCoValue>}
|
||||
node={node}
|
||||
/>
|
||||
) : (
|
||||
<ValueRenderer
|
||||
json={child}
|
||||
onCoIDClick={(coId) => {
|
||||
onNavigate([{ coId, name: key }]);
|
||||
}}
|
||||
compact
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
27
examples/inspector/src/viewer/index.tsx
Normal file
27
examples/inspector/src/viewer/index.tsx
Normal file
@@ -0,0 +1,27 @@
|
||||
import { LocalNode } from "cojson";
|
||||
import { Breadcrumbs } from "./breadcrumbs";
|
||||
import { usePagePath } from "./use-page-path";
|
||||
import { PageInfo } from "./types";
|
||||
import { PageStack } from "./page-stack";
|
||||
|
||||
export default function CoJsonViewer({
|
||||
defaultPath,
|
||||
node,
|
||||
}: {
|
||||
defaultPath?: PageInfo[];
|
||||
node: LocalNode;
|
||||
}) {
|
||||
const { path, addPages, goToIndex, goBack } = usePagePath(defaultPath);
|
||||
|
||||
return (
|
||||
<div className="w-full h-screen bg-gray-100 p-4 overflow-hidden">
|
||||
<Breadcrumbs path={path} onBreadcrumbClick={goToIndex} />
|
||||
<PageStack
|
||||
path={path}
|
||||
node={node}
|
||||
goBack={goBack}
|
||||
addPages={addPages}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
310
examples/inspector/src/viewer/new-app.tsx
Normal file
310
examples/inspector/src/viewer/new-app.tsx
Normal file
@@ -0,0 +1,310 @@
|
||||
import React, { useState, useEffect } from "react";
|
||||
import {
|
||||
LocalNode,
|
||||
CoID,
|
||||
RawCoValue,
|
||||
RawAccount,
|
||||
AgentSecret,
|
||||
AccountID,
|
||||
cojsonInternals,
|
||||
WasmCrypto,
|
||||
} from "cojson";
|
||||
import { createWebSocketPeer } from "cojson-transport-ws";
|
||||
import { Trash2 } from "lucide-react";
|
||||
import { Breadcrumbs } from "./breadcrumbs";
|
||||
import { usePagePath } from "./use-page-path";
|
||||
import { PageStack } from "./page-stack";
|
||||
import { resolveCoValue, useResolvedCoValue } from "./use-resolve-covalue";
|
||||
import clsx from "clsx";
|
||||
|
||||
interface Account {
|
||||
id: CoID<RawAccount>;
|
||||
secret: AgentSecret;
|
||||
}
|
||||
|
||||
export default function CoJsonViewerApp() {
|
||||
const [accounts, setAccounts] = useState<Account[]>(() => {
|
||||
const storedAccounts = localStorage.getItem("inspectorAccounts");
|
||||
return storedAccounts ? JSON.parse(storedAccounts) : [];
|
||||
});
|
||||
const [currentAccount, setCurrentAccount] = useState<Account | null>(() => {
|
||||
const lastSelectedId = localStorage.getItem("lastSelectedAccountId");
|
||||
if (lastSelectedId) {
|
||||
const lastAccount = accounts.find(
|
||||
(account) => account.id === lastSelectedId,
|
||||
);
|
||||
return lastAccount || null;
|
||||
}
|
||||
return null;
|
||||
});
|
||||
const [localNode, setLocalNode] = useState<LocalNode | null>(null);
|
||||
const [coValueId, setCoValueId] = useState<CoID<RawCoValue> | "">("");
|
||||
const { path, addPages, goToIndex, goBack, setPage } = usePagePath();
|
||||
|
||||
useEffect(() => {
|
||||
localStorage.setItem("inspectorAccounts", JSON.stringify(accounts));
|
||||
}, [accounts]);
|
||||
|
||||
useEffect(() => {
|
||||
if (currentAccount) {
|
||||
localStorage.setItem("lastSelectedAccountId", currentAccount.id);
|
||||
} else {
|
||||
localStorage.removeItem("lastSelectedAccountId");
|
||||
}
|
||||
}, [currentAccount]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!currentAccount) {
|
||||
setLocalNode(null);
|
||||
goToIndex(-1);
|
||||
return;
|
||||
}
|
||||
|
||||
WasmCrypto.create().then(async (crypto) => {
|
||||
const wsPeer = createWebSocketPeer({
|
||||
id: "mesh",
|
||||
websocket: new WebSocket("wss://mesh.jazz.tools"),
|
||||
role: "server",
|
||||
});
|
||||
const node = await LocalNode.withLoadedAccount({
|
||||
accountID: currentAccount.id,
|
||||
accountSecret: currentAccount.secret,
|
||||
sessionID: cojsonInternals.newRandomSessionID(
|
||||
currentAccount.id,
|
||||
),
|
||||
peersToLoadFrom: [wsPeer],
|
||||
crypto,
|
||||
migration: async () => {
|
||||
console.log("Not running any migration in inspector");
|
||||
},
|
||||
});
|
||||
setLocalNode(node);
|
||||
});
|
||||
}, [currentAccount, goToIndex]);
|
||||
|
||||
const addAccount = (id: AccountID, secret: AgentSecret) => {
|
||||
const newAccount = { id, secret };
|
||||
setAccounts([...accounts, newAccount]);
|
||||
setCurrentAccount(newAccount);
|
||||
};
|
||||
|
||||
const deleteCurrentAccount = () => {
|
||||
if (currentAccount) {
|
||||
const updatedAccounts = accounts.filter(
|
||||
(account) => account.id !== currentAccount.id,
|
||||
);
|
||||
setAccounts(updatedAccounts);
|
||||
setCurrentAccount(
|
||||
updatedAccounts.length > 0 ? updatedAccounts[0] : null,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const handleCoValueIdSubmit = (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
if (coValueId) {
|
||||
setPage(coValueId);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="w-full h-screen bg-gray-100 p-4 overflow-hidden flex flex-col">
|
||||
<div className="flex justify-between items-center mb-4">
|
||||
<Breadcrumbs path={path} onBreadcrumbClick={goToIndex} />
|
||||
<AccountSwitcher
|
||||
accounts={accounts}
|
||||
currentAccount={currentAccount}
|
||||
setCurrentAccount={setCurrentAccount}
|
||||
deleteCurrentAccount={deleteCurrentAccount}
|
||||
localNode={localNode}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<PageStack
|
||||
path={path}
|
||||
node={localNode}
|
||||
goBack={goBack}
|
||||
addPages={addPages}
|
||||
>
|
||||
{!currentAccount ? (
|
||||
<AddAccountForm addAccount={addAccount} />
|
||||
) : (
|
||||
<form
|
||||
onSubmit={handleCoValueIdSubmit}
|
||||
aria-hidden={path.length !== 0}
|
||||
className={clsx(
|
||||
"flex flex-col justify-center items-center gap-2 h-full w-full mb-20 ",
|
||||
"transition-all duration-150",
|
||||
path.length > 0
|
||||
? "opacity-0 -translate-y-2 scale-95"
|
||||
: "opacity-100",
|
||||
)}
|
||||
>
|
||||
<fieldset className="flex flex-col gap-2 text-sm">
|
||||
<h2 className="text-3xl font-medium text-gray-950 text-center mb-4">
|
||||
Jazz CoValue Inspector
|
||||
</h2>
|
||||
<input
|
||||
className="border p-4 rounded-lg min-w-[21rem] font-mono"
|
||||
placeholder="co_z1234567890abcdef123456789"
|
||||
value={coValueId}
|
||||
onChange={(e) =>
|
||||
setCoValueId(
|
||||
e.target.value as CoID<RawCoValue>,
|
||||
)
|
||||
}
|
||||
/>
|
||||
<button
|
||||
type="submit"
|
||||
className="bg-indigo-500 hover:bg-indigo-500/80 text-white px-4 py-2 rounded-md"
|
||||
>
|
||||
Inspect
|
||||
</button>
|
||||
<hr />
|
||||
<button
|
||||
type="button"
|
||||
className="border inline-block px-2 py-1.5 text-black rounded"
|
||||
onClick={() => {
|
||||
setCoValueId(currentAccount.id);
|
||||
setPage(currentAccount.id);
|
||||
}}
|
||||
>
|
||||
Inspect My Account
|
||||
</button>
|
||||
</fieldset>
|
||||
</form>
|
||||
)}
|
||||
</PageStack>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function AccountSwitcher({
|
||||
accounts,
|
||||
currentAccount,
|
||||
setCurrentAccount,
|
||||
deleteCurrentAccount,
|
||||
localNode,
|
||||
}: {
|
||||
accounts: Account[];
|
||||
currentAccount: Account | null;
|
||||
setCurrentAccount: (account: Account | null) => void;
|
||||
deleteCurrentAccount: () => void;
|
||||
localNode: LocalNode | null;
|
||||
}) {
|
||||
return (
|
||||
<div className="relative flex items-center gap-1">
|
||||
<select
|
||||
value={currentAccount?.id || "add-account"}
|
||||
onChange={(e) => {
|
||||
if (e.target.value === "add-account") {
|
||||
setCurrentAccount(null);
|
||||
} else {
|
||||
const account = accounts.find(
|
||||
(a) => a.id === e.target.value,
|
||||
);
|
||||
setCurrentAccount(account || null);
|
||||
}
|
||||
}}
|
||||
className="p-2 px-4 bg-gray-100/50 border border-indigo-500/10 backdrop-blur-sm rounded-md text-indigo-700 appearance-none"
|
||||
>
|
||||
{accounts.map((account) => (
|
||||
<option key={account.id} value={account.id}>
|
||||
{localNode ? (
|
||||
<AccountNameDisplay
|
||||
accountId={account.id}
|
||||
node={localNode}
|
||||
/>
|
||||
) : (
|
||||
account.id
|
||||
)}
|
||||
</option>
|
||||
))}
|
||||
<option value="add-account">Add account</option>
|
||||
</select>
|
||||
{currentAccount && (
|
||||
<button
|
||||
onClick={deleteCurrentAccount}
|
||||
className="p-3 rounded hover:bg-gray-200 transition-colors"
|
||||
title="Delete Account"
|
||||
>
|
||||
<Trash2 size={16} className="text-gray-500" />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function AddAccountForm({
|
||||
addAccount,
|
||||
}: {
|
||||
addAccount: (id: AccountID, secret: AgentSecret) => void;
|
||||
}) {
|
||||
const [id, setId] = useState("");
|
||||
const [secret, setSecret] = useState("");
|
||||
|
||||
const handleSubmit = (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
addAccount(id as AccountID, secret as AgentSecret);
|
||||
setId("");
|
||||
setSecret("");
|
||||
};
|
||||
|
||||
return (
|
||||
<form
|
||||
onSubmit={handleSubmit}
|
||||
className="flex flex-col gap-2 max-w-md mx-auto h-full justify-center"
|
||||
>
|
||||
<h2 className="text-2xl font-medium text-gray-900 mb-3">
|
||||
Add an Account to Inspect
|
||||
</h2>
|
||||
<input
|
||||
className="border py-2 px-3 rounded-md"
|
||||
placeholder="Account ID"
|
||||
value={id}
|
||||
onChange={(e) => setId(e.target.value)}
|
||||
/>
|
||||
<input
|
||||
type="password"
|
||||
className="border py-2 px-3 rounded-md"
|
||||
placeholder="Account Secret"
|
||||
value={secret}
|
||||
onChange={(e) => setSecret(e.target.value)}
|
||||
/>
|
||||
<button
|
||||
type="submit"
|
||||
className="bg-indigo-500 text-white px-4 py-2 rounded-md"
|
||||
>
|
||||
Add Account
|
||||
</button>
|
||||
</form>
|
||||
);
|
||||
}
|
||||
|
||||
function AccountNameDisplay({
|
||||
accountId,
|
||||
node,
|
||||
}: {
|
||||
accountId: CoID<RawAccount>;
|
||||
node: LocalNode;
|
||||
}) {
|
||||
const { snapshot } = useResolvedCoValue(accountId, node);
|
||||
const [name, setName] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (snapshot && typeof snapshot === "object" && "profile" in snapshot) {
|
||||
const profileId = snapshot.profile as CoID<RawCoValue>;
|
||||
resolveCoValue(profileId, node).then((profileResult) => {
|
||||
if (
|
||||
profileResult.snapshot &&
|
||||
typeof profileResult.snapshot === "object" &&
|
||||
"name" in profileResult.snapshot
|
||||
) {
|
||||
setName(profileResult.snapshot.name as string);
|
||||
}
|
||||
});
|
||||
}
|
||||
}, [snapshot, node]);
|
||||
|
||||
return name ? `${name} <${accountId}>` : accountId;
|
||||
}
|
||||
55
examples/inspector/src/viewer/page-stack.tsx
Normal file
55
examples/inspector/src/viewer/page-stack.tsx
Normal file
@@ -0,0 +1,55 @@
|
||||
import { Page } from "./page"; // Assuming you have a Page component
|
||||
import { CoID, LocalNode, RawCoValue } from "cojson";
|
||||
|
||||
// Define the structure of a page in the path
|
||||
interface PageInfo {
|
||||
coId: CoID<RawCoValue>;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
// Props for the PageStack component
|
||||
interface PageStackProps {
|
||||
path: PageInfo[];
|
||||
node?: LocalNode | null;
|
||||
goBack: () => void;
|
||||
addPages: (pages: PageInfo[]) => void;
|
||||
children?: React.ReactNode;
|
||||
}
|
||||
|
||||
export function PageStack({
|
||||
path,
|
||||
node,
|
||||
goBack,
|
||||
addPages,
|
||||
children,
|
||||
}: PageStackProps) {
|
||||
return (
|
||||
<div className="relative mt-4 h-[calc(100vh-6rem)]">
|
||||
{children && (
|
||||
<div className="absolute inset-0 pb-20">{children}</div>
|
||||
)}
|
||||
{node &&
|
||||
path.map((page, index) => (
|
||||
<Page
|
||||
key={`${page.coId}-${index}`}
|
||||
coId={page.coId}
|
||||
node={node}
|
||||
name={page.name || page.coId}
|
||||
onHeaderClick={goBack}
|
||||
onNavigate={addPages}
|
||||
isTopLevel={index === path.length - 1}
|
||||
style={{
|
||||
transform: `translateZ(${(index - path.length + 1) * 200}px) scale(${
|
||||
1 - (path.length - index - 1) * 0.05
|
||||
}) translateY(${-(index - path.length + 1) * -4}%)`,
|
||||
opacity: 1 - (path.length - index - 1) * 0.05,
|
||||
zIndex: index,
|
||||
transitionProperty: "transform, opacity",
|
||||
transitionDuration: "0.3s",
|
||||
transitionTimingFunction: "ease-out",
|
||||
}}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
154
examples/inspector/src/viewer/page.tsx
Normal file
154
examples/inspector/src/viewer/page.tsx
Normal file
@@ -0,0 +1,154 @@
|
||||
import clsx from "clsx";
|
||||
import { CoID, LocalNode, RawCoStream, RawCoValue } from "cojson";
|
||||
import { useEffect, useState } from "react";
|
||||
import { useResolvedCoValue } from "./use-resolve-covalue";
|
||||
import { GridView } from "./grid-view";
|
||||
import { PageInfo } from "./types";
|
||||
import { TableView } from "./table-viewer";
|
||||
import { TypeIcon } from "./type-icon";
|
||||
import { CoStreamView } from "./co-stream-view";
|
||||
import { AccountOrGroupPreview } from "./value-renderer";
|
||||
|
||||
type PageProps = {
|
||||
coId: CoID<RawCoValue>;
|
||||
node: LocalNode;
|
||||
name: string;
|
||||
onNavigate: (newPages: PageInfo[]) => void;
|
||||
onHeaderClick?: () => void;
|
||||
isTopLevel?: boolean;
|
||||
style: React.CSSProperties;
|
||||
};
|
||||
|
||||
export function Page({
|
||||
coId,
|
||||
node,
|
||||
name,
|
||||
onNavigate,
|
||||
onHeaderClick,
|
||||
style,
|
||||
isTopLevel,
|
||||
}: PageProps) {
|
||||
const { value, snapshot, type, extendedType } = useResolvedCoValue(
|
||||
coId,
|
||||
node,
|
||||
);
|
||||
const [viewMode, setViewMode] = useState<"grid" | "table">("grid");
|
||||
|
||||
const supportsTableView = type === "colist" || extendedType === "record";
|
||||
|
||||
// Automatically switch to table view if the page is a CoMap record
|
||||
useEffect(() => {
|
||||
if (supportsTableView) {
|
||||
setViewMode("table");
|
||||
}
|
||||
}, [supportsTableView]);
|
||||
|
||||
if (snapshot === "unavailable") {
|
||||
return <div style={style}>Data unavailable</div>;
|
||||
}
|
||||
|
||||
if (!snapshot) {
|
||||
return <div style={style}></div>;
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
style={style}
|
||||
className={clsx(
|
||||
"absolute inset-0 border border-gray-900/5 bg-clip-padding bg-white rounded-xl shadow-lg p-6 animate-in",
|
||||
)}
|
||||
>
|
||||
{!isTopLevel && (
|
||||
<div
|
||||
className="absolute inset-x-0 top-0 h-10"
|
||||
aria-label="Back"
|
||||
onClick={() => {
|
||||
onHeaderClick?.();
|
||||
}}
|
||||
aria-hidden="true"
|
||||
></div>
|
||||
)}
|
||||
<div className="flex justify-between items-center mb-4">
|
||||
<div className="flex flex-col gap-2">
|
||||
<h2 className="text-2xl font-bold flex items-start flex-col gap-1">
|
||||
<span>
|
||||
{name}
|
||||
{typeof snapshot === "object" &&
|
||||
"name" in snapshot ? (
|
||||
<span className="text-gray-600 font-medium">
|
||||
{" "}
|
||||
{
|
||||
(
|
||||
snapshot as {
|
||||
name: string;
|
||||
}
|
||||
).name
|
||||
}
|
||||
</span>
|
||||
) : null}
|
||||
</span>
|
||||
</h2>
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="text-xs text-gray-700 font-medium py-0.5 px-1 -ml-0.5 rounded bg-gray-700/5 inline-block font-mono">
|
||||
{type && (
|
||||
<TypeIcon
|
||||
type={type}
|
||||
extendedType={extendedType}
|
||||
/>
|
||||
)}
|
||||
</span>
|
||||
<span className="text-xs text-gray-700 font-medium py-0.5 px-1 -ml-0.5 rounded bg-gray-700/5 inline-block font-mono">
|
||||
{coId}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
{/* {supportsTableView && (
|
||||
<button
|
||||
onClick={toggleViewMode}
|
||||
className="px-4 py-2 bg-blue-500 text-white rounded hover:bg-blue-600 transition-colors"
|
||||
>
|
||||
{viewMode === "grid" ? "Table View" : "Grid View"}
|
||||
</button>
|
||||
)} */}
|
||||
</div>
|
||||
<div className="overflow-auto max-h-[calc(100%-4rem)]">
|
||||
{type === "costream" ? (
|
||||
<CoStreamView
|
||||
data={snapshot}
|
||||
onNavigate={onNavigate}
|
||||
node={node}
|
||||
value={value as RawCoStream}
|
||||
/>
|
||||
) : viewMode === "grid" ? (
|
||||
<GridView
|
||||
data={snapshot}
|
||||
onNavigate={onNavigate}
|
||||
node={node}
|
||||
/>
|
||||
) : (
|
||||
<TableView
|
||||
data={snapshot}
|
||||
node={node}
|
||||
onNavigate={onNavigate}
|
||||
/>
|
||||
)}
|
||||
{/* --- */}
|
||||
{extendedType !== "account" && extendedType !== "group" && (
|
||||
<div className="text-xs text-gray-500 mt-4">
|
||||
Owned by{" "}
|
||||
<AccountOrGroupPreview
|
||||
coId={value.group.id}
|
||||
node={node}
|
||||
showId
|
||||
onClick={() => {
|
||||
onNavigate([
|
||||
{ coId: value.group.id, name: "owner" },
|
||||
]);
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
142
examples/inspector/src/viewer/table-viewer.tsx
Normal file
142
examples/inspector/src/viewer/table-viewer.tsx
Normal file
@@ -0,0 +1,142 @@
|
||||
import { CoID, LocalNode, RawCoValue } from "cojson";
|
||||
import { JsonObject } from "cojson/src/jsonValue";
|
||||
import { PageInfo } from "./types";
|
||||
import { useMemo, useState } from "react";
|
||||
import { ValueRenderer } from "./value-renderer";
|
||||
import { LinkIcon } from "../link-icon";
|
||||
import { useResolvedCoValues } from "./use-resolve-covalue";
|
||||
|
||||
export function TableView({
|
||||
data,
|
||||
node,
|
||||
onNavigate,
|
||||
}: {
|
||||
data: JsonObject;
|
||||
node: LocalNode;
|
||||
onNavigate: (pages: PageInfo[]) => void;
|
||||
}) {
|
||||
const [visibleRowsCount, setVisibleRowsCount] = useState(10);
|
||||
const [coIdArray, visibleRows] = useMemo(() => {
|
||||
const coIdArray = Array.isArray(data)
|
||||
? data
|
||||
: Object.values(data).every(
|
||||
(k) => typeof k === "string" && k.startsWith("co_"),
|
||||
)
|
||||
? Object.values(data).map((k) => k as CoID<RawCoValue>)
|
||||
: [];
|
||||
|
||||
const visibleRows = coIdArray.slice(0, visibleRowsCount);
|
||||
|
||||
return [coIdArray, visibleRows];
|
||||
}, [data, visibleRowsCount]);
|
||||
const resolvedRows = useResolvedCoValues(visibleRows, node);
|
||||
|
||||
const hasMore = visibleRowsCount < coIdArray.length;
|
||||
|
||||
if (!coIdArray.length) {
|
||||
return <div>No data to display</div>;
|
||||
}
|
||||
|
||||
if (resolvedRows.length === 0) {
|
||||
return <div>Loading...</div>;
|
||||
}
|
||||
|
||||
const keys = Array.from(
|
||||
new Set(
|
||||
resolvedRows.flatMap((item) => Object.keys(item.snapshot || {})),
|
||||
),
|
||||
);
|
||||
|
||||
const loadMore = () => {
|
||||
setVisibleRowsCount((prevVisibleRows) => prevVisibleRows + 10);
|
||||
};
|
||||
|
||||
return (
|
||||
<div>
|
||||
<table className="min-w-full divide-y divide-gray-200">
|
||||
<thead className="sticky top-0 border-b">
|
||||
<tr>
|
||||
{["", ...keys].map((key) => (
|
||||
<th
|
||||
key={key}
|
||||
className="px-4 py-3 bg-gray-50 text-left text-xs font-medium text-gray-500 rounded"
|
||||
>
|
||||
{key}
|
||||
</th>
|
||||
))}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody className="bg-white divide-y divide-gray-200">
|
||||
{resolvedRows
|
||||
.slice(0, visibleRowsCount)
|
||||
.map((item, index) => (
|
||||
<tr key={index}>
|
||||
<td className="px-1 py-0">
|
||||
<button
|
||||
onClick={() =>
|
||||
onNavigate([
|
||||
{
|
||||
coId: item.value!.id,
|
||||
name: index.toString(),
|
||||
},
|
||||
])
|
||||
}
|
||||
className="px-4 py-4 whitespace-nowrap text-sm text-gray-500 hover:text-blue-500 hover:bg-gray-100 rounded"
|
||||
>
|
||||
<LinkIcon />
|
||||
</button>
|
||||
</td>
|
||||
{keys.map((key) => (
|
||||
<td
|
||||
key={key}
|
||||
className="px-4 py-4 whitespace-nowrap text-sm text-gray-500"
|
||||
>
|
||||
<ValueRenderer
|
||||
json={
|
||||
(item.snapshot as JsonObject)[
|
||||
key
|
||||
]
|
||||
}
|
||||
onCoIDClick={(coId) => {
|
||||
async function handleClick() {
|
||||
onNavigate([
|
||||
{
|
||||
coId: item.value!
|
||||
.id,
|
||||
name: index.toString(),
|
||||
},
|
||||
{
|
||||
coId: coId,
|
||||
name: key,
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
handleClick();
|
||||
}}
|
||||
/>
|
||||
</td>
|
||||
))}
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
<div className="py-4 text-gray-500 flex items-center justify-between gap-2">
|
||||
<span>
|
||||
Showing {Math.min(visibleRowsCount, coIdArray.length)} of{" "}
|
||||
{coIdArray.length}
|
||||
</span>
|
||||
{hasMore && (
|
||||
<div className="text-center">
|
||||
<button
|
||||
onClick={loadMore}
|
||||
className="px-4 py-2 bg-blue-500 text-white rounded hover:bg-blue-600"
|
||||
>
|
||||
Load More
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
47
examples/inspector/src/viewer/type-icon.tsx
Normal file
47
examples/inspector/src/viewer/type-icon.tsx
Normal file
@@ -0,0 +1,47 @@
|
||||
import { CoID, LocalNode, RawCoValue } from "cojson";
|
||||
import {
|
||||
CoJsonType,
|
||||
ExtendedCoJsonType,
|
||||
useResolvedCoValue,
|
||||
} from "./use-resolve-covalue";
|
||||
|
||||
export const TypeIcon = ({
|
||||
type,
|
||||
extendedType,
|
||||
}: {
|
||||
type: CoJsonType;
|
||||
extendedType?: ExtendedCoJsonType;
|
||||
}) => {
|
||||
const iconMap: Record<ExtendedCoJsonType | CoJsonType, string> = {
|
||||
record: "{} Record",
|
||||
image: "🖼️ Image",
|
||||
comap: "{} CoMap",
|
||||
costream: "≋ CoStream",
|
||||
colist: "☰ CoList",
|
||||
account: "👤 Account",
|
||||
group: "👥 Group",
|
||||
};
|
||||
|
||||
const iconKey = extendedType || type;
|
||||
const icon = iconMap[iconKey as keyof typeof iconMap];
|
||||
|
||||
return icon ? <span className="font-mono">{icon}</span> : null;
|
||||
};
|
||||
|
||||
export const ResolveIcon = ({
|
||||
coId,
|
||||
node,
|
||||
}: {
|
||||
coId: CoID<RawCoValue>;
|
||||
node: LocalNode;
|
||||
}) => {
|
||||
const { type, extendedType, snapshot } = useResolvedCoValue(coId, node);
|
||||
|
||||
if (snapshot === "unavailable" && !type) {
|
||||
return <div className="text-gray-600 font-medium">Unavailable</div>;
|
||||
}
|
||||
|
||||
if (!type) return <div className="whitespace-pre w-14 font-mono"> </div>;
|
||||
|
||||
return <TypeIcon type={type} extendedType={extendedType} />;
|
||||
};
|
||||
9
examples/inspector/src/viewer/types.ts
Normal file
9
examples/inspector/src/viewer/types.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { CoID, RawCoValue } from "cojson";
|
||||
|
||||
export type PageInfo = {
|
||||
coId: CoID<RawCoValue>;
|
||||
name?: string;
|
||||
};
|
||||
|
||||
export const isCoId = (coId: unknown): coId is CoID<RawCoValue> =>
|
||||
typeof coId === "string" && coId.startsWith("co_");
|
||||
107
examples/inspector/src/viewer/use-page-path.ts
Normal file
107
examples/inspector/src/viewer/use-page-path.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import { useState, useEffect, useCallback } from "react";
|
||||
import { PageInfo } from "./types";
|
||||
import { CoID, RawCoValue } from "cojson";
|
||||
|
||||
export function usePagePath(defaultPath?: PageInfo[]) {
|
||||
const [path, setPath] = useState<PageInfo[]>(() => {
|
||||
const hash = window.location.hash.slice(2); // Remove '#/'
|
||||
if (hash) {
|
||||
try {
|
||||
return decodePathFromHash(hash);
|
||||
} catch (e) {
|
||||
console.error("Failed to parse hash:", e);
|
||||
}
|
||||
}
|
||||
return defaultPath || [];
|
||||
});
|
||||
|
||||
const updatePath = useCallback((newPath: PageInfo[]) => {
|
||||
setPath(newPath);
|
||||
const hash = encodePathToHash(newPath);
|
||||
window.location.hash = `#/${hash}`;
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
const handleHashChange = () => {
|
||||
const hash = window.location.hash.slice(2);
|
||||
if (hash) {
|
||||
try {
|
||||
const newPath = decodePathFromHash(hash);
|
||||
setPath(newPath);
|
||||
} catch (e) {
|
||||
console.error("Failed to parse hash:", e);
|
||||
}
|
||||
} else if (defaultPath) {
|
||||
setPath(defaultPath);
|
||||
}
|
||||
};
|
||||
|
||||
window.addEventListener("hashchange", handleHashChange);
|
||||
return () => window.removeEventListener("hashchange", handleHashChange);
|
||||
}, [defaultPath]);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
defaultPath &&
|
||||
JSON.stringify(path) !== JSON.stringify(defaultPath)
|
||||
) {
|
||||
updatePath(defaultPath);
|
||||
}
|
||||
}, [defaultPath, path, updatePath]);
|
||||
|
||||
const addPages = useCallback(
|
||||
(newPages: PageInfo[]) => {
|
||||
updatePath([...path, ...newPages]);
|
||||
},
|
||||
[path, updatePath],
|
||||
);
|
||||
|
||||
const goToIndex = useCallback(
|
||||
(index: number) => {
|
||||
updatePath(path.slice(0, index + 1));
|
||||
},
|
||||
[path, updatePath],
|
||||
);
|
||||
|
||||
const setPage = useCallback(
|
||||
(coId: CoID<RawCoValue>) => {
|
||||
updatePath([{ coId, name: "Root" }]);
|
||||
},
|
||||
[updatePath],
|
||||
);
|
||||
|
||||
const goBack = useCallback(() => {
|
||||
if (path.length > 1) {
|
||||
updatePath(path.slice(0, path.length - 1));
|
||||
}
|
||||
}, [path, updatePath]);
|
||||
|
||||
return {
|
||||
path,
|
||||
setPage,
|
||||
addPages,
|
||||
goToIndex,
|
||||
goBack,
|
||||
};
|
||||
}
|
||||
|
||||
function encodePathToHash(path: PageInfo[]): string {
|
||||
return path
|
||||
.map((page) => {
|
||||
if (page.name && page.name !== "Root") {
|
||||
return `${page.coId}:${encodeURIComponent(page.name)}`;
|
||||
}
|
||||
return page.coId;
|
||||
})
|
||||
.join("/");
|
||||
}
|
||||
|
||||
function decodePathFromHash(hash: string): PageInfo[] {
|
||||
return hash.split("/").map((segment) => {
|
||||
const [coId, encodedName] = segment.split(":");
|
||||
return {
|
||||
coId,
|
||||
name: encodedName ? decodeURIComponent(encodedName) : undefined,
|
||||
} as PageInfo;
|
||||
});
|
||||
}
|
||||
152
examples/inspector/src/viewer/use-resolve-covalue.ts
Normal file
152
examples/inspector/src/viewer/use-resolve-covalue.ts
Normal file
@@ -0,0 +1,152 @@
|
||||
import { CoID, LocalNode, RawBinaryCoStream, RawCoValue } from "cojson";
|
||||
import { useEffect, useState } from "react";
|
||||
|
||||
export type CoJsonType = "comap" | "costream" | "colist";
|
||||
export type ExtendedCoJsonType = "image" | "record" | "account" | "group";
|
||||
|
||||
type JSON = string | number | boolean | null | JSON[] | { [key: string]: JSON };
|
||||
type JSONObject = { [key: string]: JSON };
|
||||
|
||||
type ResolvedImageDefinition = {
|
||||
originalSize: [number, number];
|
||||
placeholderDataURL?: string;
|
||||
[res: `${number}x${number}`]: RawBinaryCoStream["id"];
|
||||
};
|
||||
|
||||
// Type guard for browser image
|
||||
export const isBrowserImage = (
|
||||
coValue: JSONObject,
|
||||
): coValue is ResolvedImageDefinition => {
|
||||
return "originalSize" in coValue && "placeholderDataURL" in coValue;
|
||||
};
|
||||
|
||||
export type ResolvedGroup = {
|
||||
readKey: string;
|
||||
[key: string]: JSON;
|
||||
};
|
||||
|
||||
export const isGroup = (coValue: JSONObject): coValue is ResolvedGroup => {
|
||||
return "readKey" in coValue;
|
||||
};
|
||||
|
||||
export type ResolvedAccount = {
|
||||
profile: {
|
||||
name: string;
|
||||
};
|
||||
[key: string]: JSON;
|
||||
};
|
||||
|
||||
export const isAccount = (coValue: JSONObject): coValue is ResolvedAccount => {
|
||||
return isGroup(coValue) && "profile" in coValue;
|
||||
};
|
||||
|
||||
export async function resolveCoValue(
|
||||
coValueId: CoID<RawCoValue>,
|
||||
node: LocalNode,
|
||||
): Promise<
|
||||
| {
|
||||
value: RawCoValue;
|
||||
snapshot: JSONObject;
|
||||
type: CoJsonType | null;
|
||||
extendedType: ExtendedCoJsonType | undefined;
|
||||
}
|
||||
| {
|
||||
value: undefined;
|
||||
snapshot: "unavailable";
|
||||
type: null;
|
||||
extendedType: undefined;
|
||||
}
|
||||
> {
|
||||
const value = await node.load(coValueId);
|
||||
|
||||
if (value === "unavailable") {
|
||||
return {
|
||||
value: undefined,
|
||||
snapshot: "unavailable",
|
||||
type: null,
|
||||
extendedType: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
const snapshot = value.toJSON() as JSONObject;
|
||||
const type = value.type as CoJsonType;
|
||||
|
||||
// Determine extended type
|
||||
let extendedType: ExtendedCoJsonType | undefined;
|
||||
|
||||
if (type === "comap") {
|
||||
if (isBrowserImage(snapshot)) {
|
||||
extendedType = "image";
|
||||
} else if (isAccount(snapshot)) {
|
||||
extendedType = "account";
|
||||
} else if (isGroup(snapshot)) {
|
||||
extendedType = "group";
|
||||
} else {
|
||||
// This check is a bit of a hack
|
||||
// There might be a better way to do this
|
||||
const children = Object.values(snapshot).slice(0, 10);
|
||||
if (
|
||||
children.every(
|
||||
(c) => typeof c === "string" && c.startsWith("co_"),
|
||||
) &&
|
||||
children.length > 3
|
||||
) {
|
||||
extendedType = "record";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
value,
|
||||
snapshot,
|
||||
type,
|
||||
extendedType,
|
||||
};
|
||||
}
|
||||
|
||||
export function useResolvedCoValue(
|
||||
coValueId: CoID<RawCoValue>,
|
||||
node: LocalNode,
|
||||
) {
|
||||
const [result, setResult] =
|
||||
useState<Awaited<ReturnType<typeof resolveCoValue>>>();
|
||||
|
||||
useEffect(() => {
|
||||
resolveCoValue(coValueId, node).then(setResult);
|
||||
}, [coValueId, node]);
|
||||
|
||||
return (
|
||||
result || {
|
||||
value: undefined,
|
||||
snapshot: undefined,
|
||||
type: undefined,
|
||||
extendedType: undefined,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export function useResolvedCoValues(
|
||||
coValueIds: CoID<RawCoValue>[],
|
||||
node: LocalNode,
|
||||
) {
|
||||
const [results, setResults] = useState<
|
||||
Awaited<ReturnType<typeof resolveCoValue>>[]
|
||||
>([]);
|
||||
|
||||
useEffect(() => {
|
||||
console.log("RETECHING", coValueIds);
|
||||
const fetchResults = async () => {
|
||||
if (coValueIds.length === 0) return;
|
||||
const resolvedValues = await Promise.all(
|
||||
coValueIds.map((coValueId) => resolveCoValue(coValueId, node)),
|
||||
);
|
||||
|
||||
console.log({ resolvedValues });
|
||||
setResults(resolvedValues);
|
||||
};
|
||||
|
||||
fetchResults();
|
||||
}, [coValueIds, node]);
|
||||
|
||||
return results;
|
||||
}
|
||||
248
examples/inspector/src/viewer/value-renderer.tsx
Normal file
248
examples/inspector/src/viewer/value-renderer.tsx
Normal file
@@ -0,0 +1,248 @@
|
||||
import clsx from "clsx";
|
||||
import { CoID, JsonValue, LocalNode, RawCoValue } from "cojson";
|
||||
import { LinkIcon } from "../link-icon";
|
||||
import {
|
||||
isBrowserImage,
|
||||
resolveCoValue,
|
||||
useResolvedCoValue,
|
||||
} from "./use-resolve-covalue";
|
||||
import React, { useEffect, useState } from "react";
|
||||
|
||||
// Is there a chance we can pass the actual CoValue here?
|
||||
export function ValueRenderer({
|
||||
json,
|
||||
compact,
|
||||
onCoIDClick,
|
||||
}: {
|
||||
json: JsonValue | undefined;
|
||||
compact?: boolean;
|
||||
onCoIDClick?: (childNode: CoID<RawCoValue>) => void;
|
||||
}) {
|
||||
if (typeof json === "undefined" || json === undefined) {
|
||||
return <span className="text-gray-400">undefined</span>;
|
||||
}
|
||||
|
||||
if (json === null) {
|
||||
return <span className="text-gray-400">null</span>;
|
||||
}
|
||||
|
||||
if (typeof json === "string" && json.startsWith("co_")) {
|
||||
return (
|
||||
<span
|
||||
className={clsx(
|
||||
"inline-flex gap-1 items-center",
|
||||
onCoIDClick &&
|
||||
"text-blue-500 cursor-pointer hover:underline",
|
||||
)}
|
||||
onClick={() => {
|
||||
onCoIDClick?.(json as CoID<RawCoValue>);
|
||||
}}
|
||||
>
|
||||
{json}
|
||||
{onCoIDClick && <LinkIcon />}
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
if (typeof json === "string") {
|
||||
return (
|
||||
<span className="text-green-900 font-mono">
|
||||
{/* <span className="select-none opacity-70">{'"'}</span> */}
|
||||
{json}
|
||||
{/* <span className="select-none opacity-70">{'"'}</span> */}
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
if (typeof json === "number") {
|
||||
return <span className="text-purple-500">{json}</span>;
|
||||
}
|
||||
|
||||
if (typeof json === "boolean") {
|
||||
return (
|
||||
<span
|
||||
className={clsx(
|
||||
json
|
||||
? "text-green-700 bg-green-700/5"
|
||||
: "text-amber-700 bg-amber-500/5",
|
||||
"font-mono",
|
||||
"inline-block px-1 py-0.5 rounded",
|
||||
)}
|
||||
>
|
||||
{json.toString()}
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
if (Array.isArray(json)) {
|
||||
return (
|
||||
<span title={JSON.stringify(json)}>
|
||||
Array <span className="text-gray-500">({json.length})</span>
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
if (typeof json === "object") {
|
||||
return (
|
||||
<span
|
||||
title={JSON.stringify(json, null, 2)}
|
||||
className="inline-block max-w-64 truncate"
|
||||
>
|
||||
{compact ? (
|
||||
<span>
|
||||
Object{" "}
|
||||
<span className="text-gray-500">
|
||||
({Object.keys(json).length})
|
||||
</span>
|
||||
</span>
|
||||
) : (
|
||||
JSON.stringify(json, null, 2)
|
||||
)}
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
return <span>{String(json)}</span>;
|
||||
}
|
||||
|
||||
export const CoMapPreview = ({
|
||||
coId,
|
||||
node,
|
||||
limit = 6,
|
||||
}: {
|
||||
coId: CoID<RawCoValue>;
|
||||
node: LocalNode;
|
||||
limit?: number;
|
||||
}) => {
|
||||
const { value, snapshot, type, extendedType } = useResolvedCoValue(
|
||||
coId,
|
||||
node,
|
||||
);
|
||||
|
||||
if (!snapshot) {
|
||||
return (
|
||||
<div className="rounded bg-gray-100 animate-pulse whitespace-pre w-24">
|
||||
{" "}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (snapshot === "unavailable" && !value) {
|
||||
return <div className="text-gray-500">Unavailable</div>;
|
||||
}
|
||||
|
||||
if (extendedType === "image" && isBrowserImage(snapshot)) {
|
||||
return (
|
||||
<div>
|
||||
<img
|
||||
src={snapshot.placeholderDataURL}
|
||||
className="size-8 border-2 border-white drop-shadow-md my-2"
|
||||
/>
|
||||
<span className="text-gray-500 text-sm">
|
||||
{snapshot.originalSize[0]} x {snapshot.originalSize[1]}
|
||||
</span>
|
||||
|
||||
{/* <CoMapPreview coId={value[]} node={node} /> */}
|
||||
{/* <ProgressiveImg image={value}>
|
||||
{({ src }) => <img src={src} className={clsx("w-full")} />}
|
||||
</ProgressiveImg> */}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (extendedType === "record") {
|
||||
return (
|
||||
<div>
|
||||
Record{" "}
|
||||
<span className="text-gray-500">
|
||||
({Object.keys(snapshot).length})
|
||||
</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (type === "colist") {
|
||||
return (
|
||||
<div>
|
||||
List{" "}
|
||||
<span className="text-gray-500">
|
||||
({(snapshot as unknown as []).length})
|
||||
</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="text-sm flex flex-col gap-2 items-start">
|
||||
<div className="grid grid-cols-[auto_1fr] gap-2">
|
||||
{Object.entries(snapshot)
|
||||
.slice(0, limit)
|
||||
.map(([key, value]) => (
|
||||
<React.Fragment key={key}>
|
||||
<span className="font-medium">{key}: </span>
|
||||
<span>
|
||||
<ValueRenderer json={value} />
|
||||
</span>
|
||||
</React.Fragment>
|
||||
))}
|
||||
</div>
|
||||
{Object.entries(snapshot).length > limit && (
|
||||
<div className="text-left text-xs text-gray-500 mt-2">
|
||||
{Object.entries(snapshot).length - limit} more
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export function AccountOrGroupPreview({
|
||||
coId,
|
||||
node,
|
||||
showId = false,
|
||||
onClick,
|
||||
}: {
|
||||
coId: CoID<RawCoValue>;
|
||||
node: LocalNode;
|
||||
showId?: boolean;
|
||||
onClick?: (name?: string) => void;
|
||||
}) {
|
||||
const { snapshot, extendedType } = useResolvedCoValue(coId, node);
|
||||
const [name, setName] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (extendedType === "account") {
|
||||
resolveCoValue(
|
||||
(snapshot as unknown as { profile: CoID<RawCoValue> }).profile,
|
||||
node,
|
||||
).then(({ snapshot }) => {
|
||||
if (
|
||||
typeof snapshot === "object" &&
|
||||
"name" in snapshot &&
|
||||
typeof snapshot.name === "string"
|
||||
) {
|
||||
setName(snapshot.name);
|
||||
}
|
||||
});
|
||||
}
|
||||
}, [snapshot, node, extendedType]);
|
||||
|
||||
if (!snapshot) return <span>Loading...</span>;
|
||||
if (extendedType !== "account" && extendedType !== "group") {
|
||||
return <span>CoID is not an account or group</span>;
|
||||
}
|
||||
|
||||
const displayName =
|
||||
extendedType === "account" ? name || "Account" : "Group";
|
||||
const displayText = showId ? `${displayName} (${coId})` : displayName;
|
||||
|
||||
const props = onClick
|
||||
? {
|
||||
onClick: () => onClick(displayName),
|
||||
className: "text-blue-500 cursor-pointer hover:underline",
|
||||
}
|
||||
: {
|
||||
className: "text-gray-500",
|
||||
};
|
||||
|
||||
return <span {...props}>{displayText}</span>;
|
||||
}
|
||||
@@ -1,5 +1,165 @@
|
||||
# jazz-example-pets
|
||||
|
||||
## 0.0.93
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.7.28
|
||||
- jazz-tools@0.7.28
|
||||
- jazz-browser-media-images@0.7.28
|
||||
|
||||
## 0.0.92
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-browser-media-images@0.7.27
|
||||
- jazz-react@0.7.27
|
||||
|
||||
## 0.0.91
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-react@0.7.26
|
||||
- jazz-tools@0.7.26
|
||||
- jazz-browser-media-images@0.7.26
|
||||
|
||||
## 0.0.90
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.25
|
||||
- jazz-browser-media-images@0.7.25
|
||||
- jazz-react@0.7.25
|
||||
|
||||
## 0.0.89
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.24
|
||||
- jazz-browser-media-images@0.7.24
|
||||
- jazz-react@0.7.24
|
||||
|
||||
## 0.0.88
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-react@0.7.23
|
||||
- jazz-tools@0.7.23
|
||||
- jazz-browser-media-images@0.7.23
|
||||
|
||||
## 0.0.87
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-browser-media-images@0.7.22
|
||||
- jazz-react@0.7.22
|
||||
|
||||
## 0.0.86
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.21
|
||||
- jazz-browser-media-images@0.7.21
|
||||
- jazz-react@0.7.21
|
||||
|
||||
## 0.0.85
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.20
|
||||
- jazz-browser-media-images@0.7.20
|
||||
- jazz-react@0.7.20
|
||||
|
||||
## 0.0.84
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.19
|
||||
- jazz-browser-media-images@0.7.19
|
||||
- jazz-react@0.7.19
|
||||
|
||||
## 0.0.83
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.7.18
|
||||
- jazz-tools@0.7.18
|
||||
- jazz-browser-media-images@0.7.18
|
||||
|
||||
## 0.0.82
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.7.17
|
||||
- jazz-tools@0.7.17
|
||||
- jazz-browser-media-images@0.7.17
|
||||
|
||||
## 0.0.81
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.16
|
||||
- jazz-browser-media-images@0.7.16
|
||||
- jazz-react@0.7.16
|
||||
|
||||
## 0.0.80
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-react@0.7.15
|
||||
|
||||
## 0.0.79
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.14
|
||||
- jazz-react@0.7.14
|
||||
- jazz-browser-media-images@0.7.14
|
||||
|
||||
## 0.0.78
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.13
|
||||
- jazz-browser-media-images@0.7.13
|
||||
- jazz-react@0.7.13
|
||||
|
||||
## 0.0.77
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.12
|
||||
- jazz-browser-media-images@0.7.12
|
||||
- jazz-react@0.7.12
|
||||
|
||||
## 0.0.76
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.7.11
|
||||
- jazz-tools@0.7.11
|
||||
- jazz-browser-media-images@0.7.11
|
||||
|
||||
## 0.0.75
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.7.10
|
||||
- jazz-tools@0.7.10
|
||||
- jazz-browser-media-images@0.7.10
|
||||
|
||||
## 0.0.74
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "jazz-example-pets",
|
||||
"private": true,
|
||||
"version": "0.0.74",
|
||||
"version": "0.0.93",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -1,5 +1,147 @@
|
||||
# jazz-example-todo
|
||||
|
||||
## 0.0.92
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.7.28
|
||||
- jazz-tools@0.7.28
|
||||
|
||||
## 0.0.91
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.7.27
|
||||
|
||||
## 0.0.90
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-react@0.7.26
|
||||
- jazz-tools@0.7.26
|
||||
|
||||
## 0.0.89
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.25
|
||||
- jazz-react@0.7.25
|
||||
|
||||
## 0.0.88
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.24
|
||||
- jazz-react@0.7.24
|
||||
|
||||
## 0.0.87
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-react@0.7.23
|
||||
- jazz-tools@0.7.23
|
||||
|
||||
## 0.0.86
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.7.22
|
||||
|
||||
## 0.0.85
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.21
|
||||
- jazz-react@0.7.21
|
||||
|
||||
## 0.0.84
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.20
|
||||
- jazz-react@0.7.20
|
||||
|
||||
## 0.0.83
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.19
|
||||
- jazz-react@0.7.19
|
||||
|
||||
## 0.0.82
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.7.18
|
||||
- jazz-tools@0.7.18
|
||||
|
||||
## 0.0.81
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.7.17
|
||||
- jazz-tools@0.7.17
|
||||
|
||||
## 0.0.80
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.16
|
||||
- jazz-react@0.7.16
|
||||
|
||||
## 0.0.79
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-react@0.7.15
|
||||
|
||||
## 0.0.78
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.14
|
||||
- jazz-react@0.7.14
|
||||
|
||||
## 0.0.77
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.13
|
||||
- jazz-react@0.7.13
|
||||
|
||||
## 0.0.76
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.12
|
||||
- jazz-react@0.7.12
|
||||
|
||||
## 0.0.75
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.7.11
|
||||
- jazz-tools@0.7.11
|
||||
|
||||
## 0.0.74
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-react@0.7.10
|
||||
- jazz-tools@0.7.10
|
||||
|
||||
## 0.0.73
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "jazz-example-todo",
|
||||
"private": true,
|
||||
"version": "0.0.73",
|
||||
"version": "0.0.92",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
@@ -178,7 +178,7 @@ function App() {// old
|
||||
if (issue) {// old
|
||||
return <IssueComponent issue={issue} />; // old
|
||||
} else { // old
|
||||
return <button onClick={createIssue}>Create Issue</button>; // old
|
||||
return <button onClick={createIssue}>Create Issue</button>;
|
||||
} // old
|
||||
} // old
|
||||
// old
|
||||
|
||||
@@ -9,9 +9,11 @@ import localFont from "next/font/local";
|
||||
import { GcmpLogo, JazzLogo } from "@/components/logos";
|
||||
import { SiGithub, SiDiscord, SiTwitter } from "@icons-pack/react-simple-icons";
|
||||
import { Nav, NavLink, Newsletter, NewsletterButton } from "@/components/nav";
|
||||
import { MailIcon } from "lucide-react";
|
||||
import { DocNav } from "@/components/docs/nav";
|
||||
|
||||
import { SpeedInsights } from "@vercel/speed-insights/next";
|
||||
import { Analytics } from "@vercel/analytics/react";
|
||||
|
||||
// If loading a variable font, you don't need to specify the font weight
|
||||
const manrope = Manrope({
|
||||
subsets: ["latin"],
|
||||
@@ -48,6 +50,8 @@ export default function RootLayout({
|
||||
"flex flex-col items-center bg-stone-50 dark:bg-stone-950 overflow-x-hidden",
|
||||
].join(" ")}
|
||||
>
|
||||
<SpeedInsights />
|
||||
<Analytics />
|
||||
<ThemeProvider
|
||||
attribute="class"
|
||||
defaultTheme="system"
|
||||
@@ -108,7 +112,7 @@ export default function RootLayout({
|
||||
<div className="col-span-full md:col-span-1 sm:row-start-4 md:row-start-auto lg:col-span-2 md:row-span-2 md:flex-1 flex flex-row md:flex-col max-sm:mt-4 justify-between max-sm:items-start gap-2 text-sm min-w-[10rem]">
|
||||
<GcmpLogo monochrome className="w-32" />
|
||||
<p className="max-sm:text-right">
|
||||
© 2023
|
||||
© {new Date().getFullYear()}
|
||||
<br />
|
||||
Garden Computing, Inc.
|
||||
</p>
|
||||
@@ -192,12 +196,6 @@ export default function RootLayout({
|
||||
</div>
|
||||
</footer>
|
||||
</ThemeProvider>
|
||||
<script
|
||||
defer
|
||||
data-api="/api/event"
|
||||
data-domain="jazz.tools"
|
||||
src="/js/script.js"
|
||||
></script>
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
|
||||
@@ -28,12 +28,6 @@ import Link from "next/link";
|
||||
|
||||
<Prose>
|
||||
|
||||
<a href="https://app.localfirstconf.com/schedule/conference/every-app-secretly-wants-to-be-local-first" className="-mt-8 md:-mt-20 float-right top-[5rem] right-4 border border-stone-700 dark:border-stone-300 rounded flex gap-3 items-center px-4 py-2 mb-4 rotate-2 md:rotate-6 no-underline hover:scale-105 transition-transform">
|
||||
<div className="text-sm font-bold uppercase">See you in Berlin<br/>May 30-31!</div>
|
||||
<LocalFirstConfLogo className="w-24"/>
|
||||
</a>
|
||||
|
||||
|
||||
# Instant sync.
|
||||
|
||||
<Slogan>A new way to build apps with distributed state.</Slogan>
|
||||
@@ -274,11 +268,7 @@ Jazz Mesh is currently free — and it's set up as the default sync & storag
|
||||
|
||||
## Get Started
|
||||
|
||||
- <Link href="/docs" target="_blank">
|
||||
Read the docs
|
||||
</Link>
|
||||
- <Link href="https://discord.gg/utDMjHYg42" target="_blank">
|
||||
Join our Discord
|
||||
</Link>
|
||||
- <Link href="/docs" target="_blank">Read the docs</Link>
|
||||
- <Link href="https://discord.gg/utDMjHYg42" target="_blank">Join our Discord</Link>
|
||||
|
||||
</Prose>
|
||||
File diff suppressed because one or more lines are too long
@@ -14,6 +14,7 @@
|
||||
"*.{ts,tsx}": "eslint --fix",
|
||||
"*.{js,jsx,mdx,json}": "prettier --write"
|
||||
},
|
||||
"packageManager": "pnpm@9.1.4",
|
||||
"dependencies": {
|
||||
"@evilmartians/harmony": "^1.0.0",
|
||||
"@icons-pack/react-simple-icons": "^9.1.0",
|
||||
@@ -21,6 +22,8 @@
|
||||
"@mdx-js/react": "^2.3.0",
|
||||
"@next/mdx": "^13.5.4",
|
||||
"@types/mdx": "^2.0.8",
|
||||
"@vercel/analytics": "^1.3.1",
|
||||
"@vercel/speed-insights": "^1.0.12",
|
||||
"class-variance-authority": "^0.7.0",
|
||||
"clsx": "^2.0.0",
|
||||
"lucide-react": "^0.284.0",
|
||||
|
||||
5244
homepage/pnpm-lock.yaml
generated
5244
homepage/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -1,18 +0,0 @@
|
||||
# jazz-example-chat
|
||||
|
||||
## 0.0.47
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.6.7
|
||||
- jazz-react@0.5.5
|
||||
- jazz-react-auth-local@0.4.18
|
||||
|
||||
## 0.0.46
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-react@0.5.0
|
||||
- jazz-react-auth-local@0.4.16
|
||||
@@ -1,14 +0,0 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/png" href="/jazz-logo.png" />
|
||||
<link rel="stylesheet" href="/src/index.css" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Jazz Chat Example</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/app.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,243 +0,0 @@
|
||||
import {
|
||||
WithJazz,
|
||||
useJazz,
|
||||
DemoAuth,
|
||||
useAutoSub,
|
||||
useBinaryStream,
|
||||
} from "jazz-react";
|
||||
import ReactDOM from "react-dom/client";
|
||||
import { HashRoute } from "hash-slash";
|
||||
import { Account, CoID, CoValue, SessionID } from "cojson";
|
||||
import { clsx } from "clsx";
|
||||
import { ImageDefinition } from "cojson/src/media";
|
||||
import { CoJsonTree } from "./cojson-tree";
|
||||
|
||||
ReactDOM.createRoot(document.getElementById("root")!).render(
|
||||
<WithJazz
|
||||
auth={DemoAuth({ appName: "Jazz Chat Example" })}
|
||||
apiKey="api_z9d034j3t34ht034ir"
|
||||
>
|
||||
<App />
|
||||
</WithJazz>
|
||||
);
|
||||
|
||||
function App() {
|
||||
return (
|
||||
<div className="flex flex-col items-center justify-between w-screen h-screen p-2 ">
|
||||
<button
|
||||
onClick={useJazz().logOut}
|
||||
className="rounded mb-5 px-2 py-1 bg-stone-200 dark:bg-stone-800 dark:text-white self-end"
|
||||
>
|
||||
Log Out
|
||||
</button>
|
||||
{HashRoute(
|
||||
{
|
||||
"/": <Home />,
|
||||
"/:id": (id) => <Inspect coValueId={id as CoID<CoValue>} />,
|
||||
},
|
||||
{ reportToParentFrame: true }
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function Home() {
|
||||
return (
|
||||
<form
|
||||
className="mb-auto"
|
||||
onSubmit={(event) => {
|
||||
const coValueId = (event.target as any).coValueId
|
||||
.value as CoID<CoValue>;
|
||||
location.hash = "/" + coValueId;
|
||||
event.preventDefault();
|
||||
}}
|
||||
>
|
||||
<input name="coValueId" className="border" />
|
||||
<button>Inspect</button>
|
||||
</form>
|
||||
);
|
||||
}
|
||||
|
||||
function Tag({ children, href }: { children: React.ReactNode; href?: string }) {
|
||||
if (href) {
|
||||
return (
|
||||
<a
|
||||
href={href}
|
||||
className="border text-xs px-2 py-0.5 rounded hover:underline"
|
||||
>
|
||||
{children}
|
||||
</a>
|
||||
);
|
||||
}
|
||||
return <span className="border text-xs px-2 py-0.5 rounded">{children}</span>;
|
||||
}
|
||||
|
||||
function ImageCoValue({ value }: { value: ImageDefinition["_shape"] }) {
|
||||
const keys = Object.keys(value);
|
||||
const keyIncludingRes = keys.find((key) => key.includes("x"));
|
||||
const idToResolve = keyIncludingRes
|
||||
? value[keyIncludingRes as `${number}x${number}`]
|
||||
: null;
|
||||
|
||||
if (!idToResolve) return <div>Can't find image</div>;
|
||||
|
||||
const image = useBinaryStream(idToResolve);
|
||||
|
||||
return (
|
||||
<img src={image?.blobURL || value.placeholderDataURL} alt="placeholder" />
|
||||
);
|
||||
}
|
||||
|
||||
function Inspect({ coValueId }: { coValueId: CoID<CoValue> }) {
|
||||
const coValue = useAutoSub(coValueId);
|
||||
|
||||
const values = coValue?.meta.coValue.toJSON() || {};
|
||||
const isImage = "placeholderDataURL" in values;
|
||||
const isGroup = coValue?.meta.group.id === coValueId;
|
||||
|
||||
const entires = Object.entries(values as any) as [string, string][];
|
||||
const onlyCoValues = entires.filter(([key]) => key.startsWith("co_"));
|
||||
|
||||
let title = "";
|
||||
if (isImage) {
|
||||
title = "Image";
|
||||
} else if (isGroup) {
|
||||
title = "Group";
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="mb-auto">
|
||||
<h1 className="text-xl font-bold mb-2">
|
||||
Inspecting {title}{" "}
|
||||
<span className="text-gray-500 text-sm">{coValueId}</span>
|
||||
</h1>
|
||||
|
||||
{isGroup ? (
|
||||
<p>
|
||||
{onlyCoValues.length > 0 ? <h3>Permissions</h3> : ""}
|
||||
<div className="flex gap-2 flex-col">
|
||||
{onlyCoValues?.map(([key, value]) => (
|
||||
<div className="flex gap-1 items-center">
|
||||
<span className="bg-gray-200 text-xs px-2 py-0.5 rounded">
|
||||
{value}
|
||||
</span>
|
||||
<AccountInfo accountID={key as CoID<Account>} />
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</p>
|
||||
) : (
|
||||
<span className="">
|
||||
Group{" "}
|
||||
<Tag href={`#/${coValue?.meta.group.id}`}>
|
||||
{coValue?.meta.group.id}
|
||||
</Tag>
|
||||
</span>
|
||||
)}
|
||||
{isImage ? (
|
||||
<div className="my-2">
|
||||
<ImageCoValue value={values as any} />
|
||||
</div>
|
||||
) : null}
|
||||
<pre className="max-w-[80vw] overflow-scroll text-sm mt-4">
|
||||
<CoJsonTree coValueId={coValueId} />
|
||||
</pre>
|
||||
<h2 className="text-lg font-semibold mt-10 mb-4">Sessions</h2>
|
||||
{coValue && <Sessions coValue={coValue.meta.coValue} />}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function Sessions({ coValue }: { coValue: CoValue }) {
|
||||
const validTx = coValue.core.getValidSortedTransactions();
|
||||
return (
|
||||
<div className="max-w-[80vw] border rounded">
|
||||
{[...coValue.core.sessionLogs.entries()].map(([sessionID, session]) => (
|
||||
<div
|
||||
key={sessionID}
|
||||
className="mv-10 flex gap-2 border-b p-5 flex-wrap flex-col"
|
||||
>
|
||||
<div className="flex gap-2 flex-row">
|
||||
<SessionInfo
|
||||
sessionID={sessionID}
|
||||
transactionCount={session.transactions.length}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex gap-1 flex-wrap max-h-64 overflow-y-auto p-1 bg-gray-50 rounded">
|
||||
{session.transactions.map((tx, txIdx) => {
|
||||
const correspondingValidTx = validTx.find(
|
||||
(validTx) =>
|
||||
validTx.txID.sessionID === sessionID &&
|
||||
validTx.txID.txIndex == txIdx
|
||||
);
|
||||
return (
|
||||
<div
|
||||
key={txIdx}
|
||||
className={clsx(
|
||||
"text-xs flex-1 p-2 border rounded min-w-36 max-w-40 overflow-scroll bg-white",
|
||||
!correspondingValidTx && "bg-red-50 border-red-100"
|
||||
)}
|
||||
>
|
||||
<div>{new Date(tx.madeAt).toLocaleString()}</div>
|
||||
<div>{tx.privacy}</div>
|
||||
<pre>
|
||||
{correspondingValidTx
|
||||
? JSON.stringify(
|
||||
correspondingValidTx.changes,
|
||||
undefined,
|
||||
2
|
||||
)
|
||||
: "invalid/undecryptable"}
|
||||
</pre>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
<div className="text-xs">
|
||||
{session.lastHash} / {session.lastSignature}{" "}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function SessionInfo({
|
||||
sessionID,
|
||||
transactionCount,
|
||||
}: {
|
||||
sessionID: SessionID;
|
||||
transactionCount: number;
|
||||
}) {
|
||||
let Prefix = sessionID.startsWith("co_") ? (
|
||||
<AccountInfo accountID={sessionID.split("_session_")[0] as CoID<Account>} />
|
||||
) : (
|
||||
<pre className="text-xs">{sessionID.split("_session_")[0]}</pre>
|
||||
);
|
||||
|
||||
return (
|
||||
<div>
|
||||
{Prefix}
|
||||
<div>
|
||||
<span className="text-xs">
|
||||
Session {sessionID.split("_session_")[1]}
|
||||
</span>
|
||||
<span className="text-xs text-gray-600 font-medium">
|
||||
{" "}
|
||||
- {transactionCount} txs
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function AccountInfo({ accountID }: { accountID: CoID<Account> }) {
|
||||
const account = useAutoSub(accountID);
|
||||
return (
|
||||
<div className="flex items-center gap-2">
|
||||
<h1>{account?.profile?.name}</h1>
|
||||
|
||||
<Tag href={`#/${accountID}`}>{account?.id}</Tag>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,151 +0,0 @@
|
||||
import clsx from "clsx";
|
||||
import { CoID, CoValue } from "cojson";
|
||||
import { useAutoSub } from "jazz-react";
|
||||
import { useState } from "react";
|
||||
import { LinkIcon } from "./link-icon";
|
||||
|
||||
export function CoJsonTree({ coValueId }: { coValueId: CoID<CoValue> }) {
|
||||
const coValue = useAutoSub(coValueId);
|
||||
|
||||
const values = coValue?.meta.coValue.toJSON() || {};
|
||||
|
||||
return <RenderCoValueJSON json={values} />;
|
||||
}
|
||||
|
||||
function RenderObject({ json }: { json: Record<string, any> }) {
|
||||
const [limit, setLimit] = useState(10);
|
||||
const hasMore = Object.keys(json).length > limit;
|
||||
|
||||
const entries = Object.entries(json).slice(0, limit);
|
||||
return (
|
||||
<div className="flex gap-x-1 flex-col font-mono text-xs overflow-auto">
|
||||
{entries.map(([key, value]) => {
|
||||
return <RenderObjectValue property={key} value={value} />;
|
||||
})}
|
||||
{hasMore ? (
|
||||
<div
|
||||
className="text-gray-500 cursor-pointer"
|
||||
onClick={() => setLimit((l) => l + 10)}
|
||||
>
|
||||
... {Object.keys(json).length - limit} more
|
||||
</div>
|
||||
) : null}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function RenderObjectValue({
|
||||
property,
|
||||
value,
|
||||
}: {
|
||||
property: string;
|
||||
value: any;
|
||||
}) {
|
||||
const [shouldLoad, setShouldLoad] = useState(false);
|
||||
|
||||
const isCoValue =
|
||||
typeof value === "string" ? value?.startsWith("co_") : false;
|
||||
|
||||
return (
|
||||
<div className={clsx(`flex group`)}>
|
||||
<span className="text-gray-500 flex">
|
||||
<RenderCoValueJSON json={property} />:{" "}
|
||||
</span>
|
||||
|
||||
{isCoValue ? (
|
||||
<div className={clsx(shouldLoad && "pb-2")}>
|
||||
<div className="flex items-center ">
|
||||
<div onClick={() => setShouldLoad((s) => !s)}>
|
||||
<div className="w-8 text-center text-gray-700 font-mono px-1 text-xs rounded hover:bg-gray-300 cursor-pointer">
|
||||
{shouldLoad ? `-` : `...`}
|
||||
</div>
|
||||
</div>
|
||||
<a href={`#/${value}`} className="ml-2 group-hover:block hidden">
|
||||
<LinkIcon />
|
||||
</a>
|
||||
</div>
|
||||
<span>{shouldLoad ? <CoJsonTree coValueId={value} /> : null}</span>
|
||||
</div>
|
||||
) : (
|
||||
<div className="">
|
||||
<RenderCoValueJSON json={value} />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function RenderCoValueArray({ json }: { json: any[] }) {
|
||||
const [limit, setLimit] = useState(10);
|
||||
const hasMore = json.length > limit;
|
||||
|
||||
const entries = json.slice(0, limit);
|
||||
return (
|
||||
<div className="flex gap-x-1 flex-col font-mono text-xs overflow-auto">
|
||||
{entries.map((value, idx) => {
|
||||
return (
|
||||
<div key={idx} className="flex gap-x-1">
|
||||
<RenderCoValueJSON json={value} />
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
{hasMore ? (
|
||||
<div
|
||||
className="text-gray-500 cursor-pointer"
|
||||
onClick={() => setLimit((l) => l + 10)}
|
||||
>
|
||||
... {json.length - limit} more
|
||||
</div>
|
||||
) : null}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function RenderCoValueJSON({
|
||||
json,
|
||||
}: {
|
||||
json:
|
||||
| Record<string, any>
|
||||
| any[]
|
||||
| string
|
||||
| null
|
||||
| number
|
||||
| boolean
|
||||
| undefined;
|
||||
}) {
|
||||
if (typeof json === "undefined") {
|
||||
return <>"undefined"</>;
|
||||
} else if (Array.isArray(json)) {
|
||||
return (
|
||||
<div className="">
|
||||
<span className="text-gray-500">[</span>
|
||||
<div className="ml-2">
|
||||
<RenderCoValueArray json={json} />
|
||||
</div>
|
||||
<span className="text-gray-500">]</span>
|
||||
</div>
|
||||
);
|
||||
} else if (
|
||||
typeof json === "object" &&
|
||||
json &&
|
||||
Object.getPrototypeOf(json) === Object.prototype
|
||||
) {
|
||||
return <RenderObject json={json} />;
|
||||
} else if (typeof json === "string") {
|
||||
if (json?.startsWith("co_")) {
|
||||
return (
|
||||
<>
|
||||
<a className="underline" href={`#/${json}`}>
|
||||
{'"'}
|
||||
{json}
|
||||
{'"'}
|
||||
</a>
|
||||
</>
|
||||
);
|
||||
} else {
|
||||
return <div className="truncate max-w-64 ml-1">{json}</div>;
|
||||
}
|
||||
} else {
|
||||
return <div className="truncate max-w-64">{JSON.stringify(json)}</div>;
|
||||
}
|
||||
}
|
||||
@@ -1,78 +0,0 @@
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
@layer base {
|
||||
:root {
|
||||
--background: 0 0% 100%;
|
||||
--foreground: 20 14.3% 4.1%;
|
||||
|
||||
--card: 0 0% 100%;
|
||||
--card-foreground: 20 14.3% 4.1%;
|
||||
|
||||
--popover: 0 0% 100%;
|
||||
--popover-foreground: 20 14.3% 4.1%;
|
||||
|
||||
--primary: 24 9.8% 10%;
|
||||
--primary-foreground: 60 9.1% 97.8%;
|
||||
|
||||
--secondary: 60 4.8% 95.9%;
|
||||
--secondary-foreground: 24 9.8% 10%;
|
||||
|
||||
--muted: 60 4.8% 95.9%;
|
||||
--muted-foreground: 25 5.3% 44.7%;
|
||||
|
||||
--accent: 60 4.8% 95.9%;
|
||||
--accent-foreground: 24 9.8% 10%;
|
||||
|
||||
--destructive: 0 84.2% 60.2%;
|
||||
--destructive-foreground: 60 9.1% 97.8%;
|
||||
|
||||
--border: 20 5.9% 90%;
|
||||
--input: 20 5.9% 90%;
|
||||
--ring: 20 14.3% 4.1%;
|
||||
|
||||
--radius: 0.5rem;
|
||||
}
|
||||
|
||||
.dark {
|
||||
--background: 20 14.3% 4.1%;
|
||||
--foreground: 60 9.1% 97.8%;
|
||||
|
||||
--card: 20 14.3% 4.1%;
|
||||
--card-foreground: 60 9.1% 97.8%;
|
||||
|
||||
--popover: 20 14.3% 4.1%;
|
||||
--popover-foreground: 60 9.1% 97.8%;
|
||||
|
||||
--primary: 60 9.1% 97.8%;
|
||||
--primary-foreground: 24 9.8% 10%;
|
||||
|
||||
--secondary: 12 6.5% 15.1%;
|
||||
--secondary-foreground: 60 9.1% 97.8%;
|
||||
|
||||
--muted: 12 6.5% 15.1%;
|
||||
--muted-foreground: 24 5.4% 63.9%;
|
||||
|
||||
--accent: 12 6.5% 15.1%;
|
||||
--accent-foreground: 60 9.1% 97.8%;
|
||||
|
||||
--destructive: 0 62.8% 30.6%;
|
||||
--destructive-foreground: 60 9.1% 97.8%;
|
||||
|
||||
--border: 12 6.5% 15.1%;
|
||||
--input: 12 6.5% 15.1%;
|
||||
--ring: 24 5.7% 82.9%;
|
||||
}
|
||||
}
|
||||
|
||||
@layer base {
|
||||
* {
|
||||
@apply border-border;
|
||||
}
|
||||
body {
|
||||
@apply bg-background text-foreground;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
export function LinkIcon() {
|
||||
return (
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
strokeWidth={1.5}
|
||||
stroke="currentColor"
|
||||
className="w-3 h-3"
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
d="M13.19 8.688a4.5 4.5 0 0 1 1.242 7.244l-4.5 4.5a4.5 4.5 0 0 1-6.364-6.364l1.757-1.757m13.35-.622 1.757-1.757a4.5 4.5 0 0 0-6.364-6.364l-4.5 4.5a4.5 4.5 0 0 0 1.242 7.244"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
@@ -6,10 +6,9 @@
|
||||
"packages/*",
|
||||
"examples/*"
|
||||
],
|
||||
"packageManager": "pnpm@9.1.4",
|
||||
"devDependencies": {
|
||||
"@changesets/cli": "^2.27.3",
|
||||
"husky": "^9.0.11",
|
||||
"lint-staged": "^15.2.2",
|
||||
"prettier": "^3.1.1",
|
||||
"ts-node": "^10.9.1",
|
||||
"turbo": "^1.11.2",
|
||||
@@ -24,8 +23,7 @@
|
||||
"format": "pnpm run -r format && cd homepage/homepage && pnpm run format",
|
||||
"changeset": "changeset",
|
||||
"changeset-version": "changeset version",
|
||||
"release": "pnpm changeset publish && git push --follow-tags",
|
||||
"prepare": "husky"
|
||||
"release": "pnpm changeset publish && git push --follow-tags"
|
||||
},
|
||||
"lint-staged": {},
|
||||
"version": "0.0.0"
|
||||
|
||||
@@ -1,5 +1,62 @@
|
||||
# cojson-storage-indexeddb
|
||||
|
||||
## 0.7.28
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.28
|
||||
|
||||
## 0.7.26
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Remove Effect from jazz/cojson internals
|
||||
- Updated dependencies
|
||||
- cojson@0.7.26
|
||||
|
||||
## 0.7.23
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.23
|
||||
|
||||
## 0.7.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.18
|
||||
|
||||
## 0.7.17
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.17
|
||||
|
||||
## 0.7.14
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.14
|
||||
|
||||
## 0.7.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.11
|
||||
|
||||
## 0.7.10
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.10
|
||||
|
||||
## 0.7.9
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
{
|
||||
"name": "cojson-storage-indexeddb",
|
||||
"version": "0.7.9",
|
||||
"version": "0.7.28",
|
||||
"main": "dist/index.js",
|
||||
"type": "module",
|
||||
"types": "src/index.ts",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"cojson": "workspace:*",
|
||||
"typescript": "^5.1.6",
|
||||
"isomorphic-streams": "https://github.com/sgwilym/isomorphic-streams.git#aa9394781bfc92f8d7c981be7daf8af4b4cd4fae"
|
||||
"typescript": "^5.1.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@vitest/browser": "^0.34.1",
|
||||
|
||||
@@ -6,13 +6,9 @@ import {
|
||||
CojsonInternalTypes,
|
||||
MAX_RECOMMENDED_TX_SIZE,
|
||||
AccountID,
|
||||
IncomingSyncStream,
|
||||
OutgoingSyncQueue,
|
||||
} from "cojson";
|
||||
import {
|
||||
ReadableStream,
|
||||
WritableStream,
|
||||
ReadableStreamDefaultReader,
|
||||
WritableStreamDefaultWriter,
|
||||
} from "isomorphic-streams";
|
||||
import { SyncPromise } from "./syncPromises.js";
|
||||
|
||||
type CoValueRow = {
|
||||
@@ -46,39 +42,43 @@ type SignatureAfterRow = {
|
||||
|
||||
export class IDBStorage {
|
||||
db: IDBDatabase;
|
||||
fromLocalNode!: ReadableStreamDefaultReader<SyncMessage>;
|
||||
toLocalNode: WritableStreamDefaultWriter<SyncMessage>;
|
||||
toLocalNode: OutgoingSyncQueue;
|
||||
|
||||
constructor(
|
||||
db: IDBDatabase,
|
||||
fromLocalNode: ReadableStream<SyncMessage>,
|
||||
toLocalNode: WritableStream<SyncMessage>,
|
||||
fromLocalNode: IncomingSyncStream,
|
||||
toLocalNode: OutgoingSyncQueue,
|
||||
) {
|
||||
this.db = db;
|
||||
this.fromLocalNode = fromLocalNode.getReader();
|
||||
this.toLocalNode = toLocalNode.getWriter();
|
||||
this.toLocalNode = toLocalNode;
|
||||
|
||||
void (async () => {
|
||||
let done = false;
|
||||
while (!done) {
|
||||
const result = await this.fromLocalNode.read();
|
||||
done = result.done;
|
||||
|
||||
if (result.value) {
|
||||
// console.log(
|
||||
// "IDB: handling msg",
|
||||
// result.value.id,
|
||||
// result.value.action
|
||||
// );
|
||||
await this.handleSyncMessage(result.value);
|
||||
// console.log(
|
||||
// "IDB: handled msg",
|
||||
// result.value.id,
|
||||
// result.value.action
|
||||
// );
|
||||
const processMessages = async () => {
|
||||
for await (const msg of fromLocalNode) {
|
||||
try {
|
||||
if (msg === "Disconnected" || msg === "PingTimeout") {
|
||||
throw new Error("Unexpected Disconnected message");
|
||||
}
|
||||
await this.handleSyncMessage(msg);
|
||||
} catch (e) {
|
||||
console.error(
|
||||
new Error(
|
||||
`Error reading from localNode, handling msg\n\n${JSON.stringify(
|
||||
msg,
|
||||
(k, v) =>
|
||||
k === "changes" || k === "encryptedChanges"
|
||||
? v.slice(0, 20) + "..."
|
||||
: v,
|
||||
)}`,
|
||||
{ cause: e },
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
})();
|
||||
};
|
||||
|
||||
processMessages().catch((e) =>
|
||||
console.error("Error in processMessages in IndexedDB", e),
|
||||
);
|
||||
}
|
||||
|
||||
static async asPeer(
|
||||
@@ -104,8 +104,8 @@ export class IDBStorage {
|
||||
}
|
||||
|
||||
static async open(
|
||||
fromLocalNode: ReadableStream<SyncMessage>,
|
||||
toLocalNode: WritableStream<SyncMessage>,
|
||||
fromLocalNode: IncomingSyncStream,
|
||||
toLocalNode: OutgoingSyncQueue,
|
||||
) {
|
||||
const dbPromise = new Promise<IDBDatabase>((resolve, reject) => {
|
||||
const request = indexedDB.open("jazz-storage", 4);
|
||||
@@ -150,23 +150,6 @@ export class IDBStorage {
|
||||
keyPath: ["ses", "idx"],
|
||||
});
|
||||
}
|
||||
// if (ev.oldVersion !== 0 && ev.oldVersion <= 3) {
|
||||
// // fix embarrassing off-by-one error for transaction indices
|
||||
// console.log("Migration: fixing off-by-one error");
|
||||
// const transaction = (
|
||||
// ev.target as unknown as { transaction: IDBTransaction }
|
||||
// ).transaction;
|
||||
|
||||
// const txsStore = transaction.objectStore("transactions");
|
||||
// const txs = await promised(txsStore.getAll());
|
||||
|
||||
// for (const tx of txs) {
|
||||
// await promised(txsStore.delete([tx.ses, tx.idx]));
|
||||
// tx.idx -= 1;
|
||||
// await promised(txsStore.add(tx));
|
||||
// }
|
||||
// console.log("Migration: fixing off-by-one error - done");
|
||||
// }
|
||||
};
|
||||
});
|
||||
|
||||
@@ -409,12 +392,19 @@ export class IDBStorage {
|
||||
),
|
||||
).then(() => {
|
||||
// we're done with IndexedDB stuff here so can use native Promises again
|
||||
setTimeout(async () => {
|
||||
await this.toLocalNode.write({
|
||||
action: "known",
|
||||
...ourKnown,
|
||||
asDependencyOf,
|
||||
});
|
||||
setTimeout(() => {
|
||||
this.toLocalNode
|
||||
.push({
|
||||
action: "known",
|
||||
...ourKnown,
|
||||
asDependencyOf,
|
||||
})
|
||||
.catch((e) =>
|
||||
console.error(
|
||||
"Error sending known state",
|
||||
e,
|
||||
),
|
||||
);
|
||||
|
||||
const nonEmptyNewContentPieces =
|
||||
newContentPieces.filter(
|
||||
@@ -426,12 +416,16 @@ export class IDBStorage {
|
||||
// console.log(theirKnown.id, nonEmptyNewContentPieces);
|
||||
|
||||
for (const piece of nonEmptyNewContentPieces) {
|
||||
await this.toLocalNode.write(piece);
|
||||
await new Promise((resolve) =>
|
||||
setTimeout(resolve, 0),
|
||||
);
|
||||
this.toLocalNode
|
||||
.push(piece)
|
||||
.catch((e) =>
|
||||
console.error(
|
||||
"Error sending new content piece",
|
||||
e,
|
||||
),
|
||||
);
|
||||
}
|
||||
}, 0);
|
||||
});
|
||||
|
||||
return Promise.resolve();
|
||||
});
|
||||
@@ -456,14 +450,18 @@ export class IDBStorage {
|
||||
const header = msg.header;
|
||||
if (!header) {
|
||||
console.error("Expected to be sent header first");
|
||||
void this.toLocalNode.write({
|
||||
action: "known",
|
||||
id: msg.id,
|
||||
header: false,
|
||||
sessions: {},
|
||||
isCorrection: true,
|
||||
});
|
||||
throw new Error("Expected to be sent header first");
|
||||
this.toLocalNode
|
||||
.push({
|
||||
action: "known",
|
||||
id: msg.id,
|
||||
header: false,
|
||||
sessions: {},
|
||||
isCorrection: true,
|
||||
})
|
||||
.catch((e) =>
|
||||
console.error("Error sending known state", e),
|
||||
);
|
||||
return SyncPromise.resolve();
|
||||
}
|
||||
|
||||
return this.makeRequest<IDBValidKey>(({ coValues }) =>
|
||||
@@ -524,11 +522,18 @@ export class IDBStorage {
|
||||
),
|
||||
).then(() => {
|
||||
if (invalidAssumptions) {
|
||||
void this.toLocalNode.write({
|
||||
action: "known",
|
||||
...ourKnown,
|
||||
isCorrection: invalidAssumptions,
|
||||
});
|
||||
this.toLocalNode
|
||||
.push({
|
||||
action: "known",
|
||||
...ourKnown,
|
||||
isCorrection: invalidAssumptions,
|
||||
})
|
||||
.catch((e) =>
|
||||
console.error(
|
||||
"Error sending known state",
|
||||
e,
|
||||
),
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,62 @@
|
||||
# cojson-storage-sqlite
|
||||
|
||||
## 0.7.28
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.28
|
||||
|
||||
## 0.7.26
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Remove Effect from jazz/cojson internals
|
||||
- Updated dependencies
|
||||
- cojson@0.7.26
|
||||
|
||||
## 0.7.23
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.23
|
||||
|
||||
## 0.7.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.18
|
||||
|
||||
## 0.7.17
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.17
|
||||
|
||||
## 0.7.14
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.14
|
||||
|
||||
## 0.7.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.11
|
||||
|
||||
## 0.7.10
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.10
|
||||
|
||||
## 0.7.9
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
{
|
||||
"name": "cojson-storage-sqlite",
|
||||
"type": "module",
|
||||
"version": "0.7.9",
|
||||
"version": "0.7.28",
|
||||
"main": "dist/index.js",
|
||||
"types": "src/index.ts",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"better-sqlite3": "^8.5.2",
|
||||
"cojson": "workspace:*",
|
||||
"typescript": "^5.1.6",
|
||||
"isomorphic-streams": "https://github.com/sgwilym/isomorphic-streams.git#aa9394781bfc92f8d7c981be7daf8af4b4cd4fae"
|
||||
"typescript": "^5.1.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/better-sqlite3": "^7.6.4"
|
||||
|
||||
@@ -6,13 +6,9 @@ import {
|
||||
SessionID,
|
||||
MAX_RECOMMENDED_TX_SIZE,
|
||||
AccountID,
|
||||
IncomingSyncStream,
|
||||
OutgoingSyncQueue,
|
||||
} from "cojson";
|
||||
import {
|
||||
ReadableStream,
|
||||
WritableStream,
|
||||
ReadableStreamDefaultReader,
|
||||
WritableStreamDefaultWriter,
|
||||
} from "isomorphic-streams";
|
||||
|
||||
import Database, { Database as DatabaseT } from "better-sqlite3";
|
||||
|
||||
@@ -46,30 +42,44 @@ type SignatureAfterRow = {
|
||||
};
|
||||
|
||||
export class SQLiteStorage {
|
||||
fromLocalNode!: ReadableStreamDefaultReader<SyncMessage>;
|
||||
toLocalNode: WritableStreamDefaultWriter<SyncMessage>;
|
||||
toLocalNode: OutgoingSyncQueue;
|
||||
db: DatabaseT;
|
||||
|
||||
constructor(
|
||||
db: DatabaseT,
|
||||
fromLocalNode: ReadableStream<SyncMessage>,
|
||||
toLocalNode: WritableStream<SyncMessage>,
|
||||
fromLocalNode: IncomingSyncStream,
|
||||
toLocalNode: OutgoingSyncQueue,
|
||||
) {
|
||||
this.db = db;
|
||||
this.fromLocalNode = fromLocalNode.getReader();
|
||||
this.toLocalNode = toLocalNode.getWriter();
|
||||
this.toLocalNode = toLocalNode;
|
||||
|
||||
void (async () => {
|
||||
let done = false;
|
||||
while (!done) {
|
||||
const result = await this.fromLocalNode.read();
|
||||
done = result.done;
|
||||
|
||||
if (result.value) {
|
||||
await this.handleSyncMessage(result.value);
|
||||
const processMessages = async () => {
|
||||
for await (const msg of fromLocalNode) {
|
||||
try {
|
||||
if (msg === "Disconnected" || msg === "PingTimeout") {
|
||||
throw new Error("Unexpected Disconnected message");
|
||||
}
|
||||
await this.handleSyncMessage(msg);
|
||||
} catch (e) {
|
||||
console.error(
|
||||
new Error(
|
||||
`Error reading from localNode, handling msg\n\n${JSON.stringify(
|
||||
msg,
|
||||
(k, v) =>
|
||||
k === "changes" || k === "encryptedChanges"
|
||||
? v.slice(0, 20) + "..."
|
||||
: v,
|
||||
)}`,
|
||||
{ cause: e },
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
})();
|
||||
};
|
||||
|
||||
processMessages().catch((e) =>
|
||||
console.error("Error in processMessages in sqlite", e),
|
||||
);
|
||||
}
|
||||
|
||||
static async asPeer({
|
||||
@@ -98,8 +108,8 @@ export class SQLiteStorage {
|
||||
|
||||
static async open(
|
||||
filename: string,
|
||||
fromLocalNode: ReadableStream<SyncMessage>,
|
||||
toLocalNode: WritableStream<SyncMessage>,
|
||||
fromLocalNode: IncomingSyncStream,
|
||||
toLocalNode: OutgoingSyncQueue,
|
||||
) {
|
||||
const db = Database(filename);
|
||||
db.pragma("journal_mode = WAL");
|
||||
@@ -431,11 +441,13 @@ export class SQLiteStorage {
|
||||
);
|
||||
}
|
||||
|
||||
await this.toLocalNode.write({
|
||||
action: "known",
|
||||
...ourKnown,
|
||||
asDependencyOf,
|
||||
});
|
||||
this.toLocalNode
|
||||
.push({
|
||||
action: "known",
|
||||
...ourKnown,
|
||||
asDependencyOf,
|
||||
})
|
||||
.catch((e) => console.error("Error while pushing known", e));
|
||||
|
||||
const nonEmptyNewContentPieces = newContentPieces.filter(
|
||||
(piece) => piece.header || Object.keys(piece.new).length > 0,
|
||||
@@ -444,7 +456,11 @@ export class SQLiteStorage {
|
||||
// console.log(theirKnown.id, nonEmptyNewContentPieces);
|
||||
|
||||
for (const piece of nonEmptyNewContentPieces) {
|
||||
await this.toLocalNode.write(piece);
|
||||
this.toLocalNode
|
||||
.push(piece)
|
||||
.catch((e) =>
|
||||
console.error("Error while pushing content piece", e),
|
||||
);
|
||||
await new Promise((resolve) => setTimeout(resolve, 0));
|
||||
}
|
||||
}
|
||||
@@ -466,13 +482,17 @@ export class SQLiteStorage {
|
||||
const header = msg.header;
|
||||
if (!header) {
|
||||
console.error("Expected to be sent header first");
|
||||
await this.toLocalNode.write({
|
||||
action: "known",
|
||||
id: msg.id,
|
||||
header: false,
|
||||
sessions: {},
|
||||
isCorrection: true,
|
||||
});
|
||||
this.toLocalNode
|
||||
.push({
|
||||
action: "known",
|
||||
id: msg.id,
|
||||
header: false,
|
||||
sessions: {},
|
||||
isCorrection: true,
|
||||
})
|
||||
.catch((e) =>
|
||||
console.error("Error while pushing known", e),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -604,11 +624,13 @@ export class SQLiteStorage {
|
||||
})();
|
||||
|
||||
if (invalidAssumptions) {
|
||||
await this.toLocalNode.write({
|
||||
action: "known",
|
||||
...ourKnown,
|
||||
isCorrection: invalidAssumptions,
|
||||
});
|
||||
this.toLocalNode
|
||||
.push({
|
||||
action: "known",
|
||||
...ourKnown,
|
||||
isCorrection: invalidAssumptions,
|
||||
})
|
||||
.catch((e) => console.error("Error while pushing known", e));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,94 +0,0 @@
|
||||
import { WebSocket } from "ws";
|
||||
import { WritableStream, ReadableStream } from "isomorphic-streams";
|
||||
|
||||
export function websocketReadableStream<T>(ws: WebSocket) {
|
||||
ws.binaryType = "arraybuffer";
|
||||
|
||||
return new ReadableStream<T>({
|
||||
start(controller) {
|
||||
ws.addEventListener("message", (event) => {
|
||||
if (typeof event.data !== "string")
|
||||
return console.warn(
|
||||
"Got non-string message from client",
|
||||
event.data,
|
||||
);
|
||||
const msg = JSON.parse(event.data);
|
||||
if (msg.type === "ping") {
|
||||
// console.debug(
|
||||
// "Got ping from",
|
||||
// msg.dc,
|
||||
// "latency",
|
||||
// Date.now() - msg.time,
|
||||
// "ms"
|
||||
// );
|
||||
return;
|
||||
}
|
||||
controller.enqueue(msg);
|
||||
});
|
||||
ws.addEventListener("close", () => {
|
||||
try {
|
||||
controller.close();
|
||||
} catch (ignore) {
|
||||
// will throw if already closed, with no way to check before-hand
|
||||
}
|
||||
});
|
||||
ws.addEventListener("error", () =>
|
||||
controller.error(new Error("The WebSocket errored!")),
|
||||
);
|
||||
},
|
||||
|
||||
cancel() {
|
||||
ws.close();
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export function websocketWritableStream<T>(ws: WebSocket) {
|
||||
return new WritableStream<T>({
|
||||
start(controller) {
|
||||
ws.addEventListener("close", () =>
|
||||
controller.error(
|
||||
new Error("The WebSocket closed unexpectedly!"),
|
||||
),
|
||||
);
|
||||
ws.addEventListener("error", () =>
|
||||
controller.error(new Error("The WebSocket errored!")),
|
||||
);
|
||||
|
||||
if (ws.readyState === WebSocket.OPEN) {
|
||||
return;
|
||||
}
|
||||
|
||||
return new Promise((resolve) =>
|
||||
ws.addEventListener("open", resolve, { once: true }),
|
||||
);
|
||||
},
|
||||
|
||||
write(chunk) {
|
||||
ws.send(JSON.stringify(chunk));
|
||||
// Return immediately, since the web socket gives us no easy way to tell
|
||||
// when the write completes.
|
||||
},
|
||||
|
||||
close() {
|
||||
return closeWS(1000);
|
||||
},
|
||||
|
||||
abort(reason) {
|
||||
return closeWS(4000, reason && reason.message);
|
||||
},
|
||||
});
|
||||
|
||||
function closeWS(code: number, reasonString?: string) {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
ws.onclose = (e) => {
|
||||
if (e.wasClean) {
|
||||
resolve();
|
||||
} else {
|
||||
reject(new Error("The connection was not closed cleanly"));
|
||||
}
|
||||
};
|
||||
ws.close(code, reasonString);
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,74 @@
|
||||
# cojson-transport-nodejs-ws
|
||||
|
||||
## 0.7.28
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.28
|
||||
|
||||
## 0.7.27
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Option to not expect pings
|
||||
|
||||
## 0.7.26
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Remove Effect from jazz/cojson internals
|
||||
- Updated dependencies
|
||||
- cojson@0.7.26
|
||||
|
||||
## 0.7.23
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.23
|
||||
|
||||
## 0.7.22
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Increase disconnect timeout for now
|
||||
|
||||
## 0.7.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.18
|
||||
|
||||
## 0.7.17
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.17
|
||||
|
||||
## 0.7.14
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.14
|
||||
|
||||
## 0.7.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.11
|
||||
|
||||
## 0.7.10
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.10
|
||||
|
||||
## 0.7.9
|
||||
|
||||
### Patch Changes
|
||||
@@ -1,15 +1,13 @@
|
||||
{
|
||||
"name": "cojson-transport-nodejs-ws",
|
||||
"name": "cojson-transport-ws",
|
||||
"type": "module",
|
||||
"version": "0.7.9",
|
||||
"version": "0.7.28",
|
||||
"main": "dist/index.js",
|
||||
"types": "src/index.ts",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"cojson": "workspace:*",
|
||||
"typescript": "^5.1.6",
|
||||
"ws": "^8.14.2",
|
||||
"isomorphic-streams": "https://github.com/sgwilym/isomorphic-streams.git#aa9394781bfc92f8d7c981be7daf8af4b4cd4fae"
|
||||
"typescript": "^5.1.6"
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "tsc --watch --sourceMap --outDir dist",
|
||||
118
packages/cojson-transport-ws/src/index.ts
Normal file
118
packages/cojson-transport-ws/src/index.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import {
|
||||
DisconnectedError,
|
||||
Peer,
|
||||
PingTimeoutError,
|
||||
SyncMessage,
|
||||
cojsonInternals,
|
||||
} from "cojson";
|
||||
|
||||
interface WebsocketEvents {
|
||||
close: { code: number; reason: string };
|
||||
message: { data: unknown };
|
||||
open: void;
|
||||
}
|
||||
interface PingMsg {
|
||||
time: number;
|
||||
dc: string;
|
||||
}
|
||||
|
||||
interface AnyWebSocket {
|
||||
addEventListener<K extends keyof WebsocketEvents>(
|
||||
type: K,
|
||||
listener: (event: WebsocketEvents[K]) => void,
|
||||
options?: { once: boolean },
|
||||
): void;
|
||||
removeEventListener<K extends keyof WebsocketEvents>(
|
||||
type: K,
|
||||
listener: (event: WebsocketEvents[K]) => void,
|
||||
): void;
|
||||
close(): void;
|
||||
send(data: string): void;
|
||||
readyState: number;
|
||||
}
|
||||
|
||||
const g: typeof globalThis & {
|
||||
jazzPings?: {
|
||||
received: number;
|
||||
sent: number;
|
||||
dc: string;
|
||||
}[];
|
||||
} = globalThis;
|
||||
|
||||
export function createWebSocketPeer({
|
||||
id,
|
||||
websocket,
|
||||
role,
|
||||
expectPings = true,
|
||||
}: {
|
||||
id: string;
|
||||
websocket: AnyWebSocket;
|
||||
role: Peer["role"];
|
||||
expectPings?: boolean;
|
||||
}): Peer {
|
||||
const incoming = new cojsonInternals.Channel<
|
||||
SyncMessage | DisconnectedError | PingTimeoutError
|
||||
>();
|
||||
|
||||
websocket.addEventListener("close", function handleClose() {
|
||||
incoming
|
||||
.push("Disconnected")
|
||||
.catch((e) =>
|
||||
console.error("Error while pushing disconnect msg", e),
|
||||
);
|
||||
});
|
||||
|
||||
let pingTimeout: ReturnType<typeof setTimeout> | null = null;
|
||||
|
||||
websocket.addEventListener("message", function handleIncomingMsg(event) {
|
||||
const msg = JSON.parse(event.data as string);
|
||||
pingTimeout && clearTimeout(pingTimeout);
|
||||
if (msg?.type === "ping") {
|
||||
const ping = msg as PingMsg;
|
||||
g.jazzPings ||= [];
|
||||
g.jazzPings.push({
|
||||
received: Date.now(),
|
||||
sent: ping.time,
|
||||
dc: ping.dc,
|
||||
});
|
||||
} else {
|
||||
incoming
|
||||
.push(msg)
|
||||
.catch((e) =>
|
||||
console.error("Error while pushing incoming msg", e),
|
||||
);
|
||||
}
|
||||
if (expectPings) {
|
||||
pingTimeout = setTimeout(() => {
|
||||
incoming
|
||||
.push("PingTimeout")
|
||||
.catch((e) =>
|
||||
console.error("Error while pushing ping timeout", e),
|
||||
);
|
||||
}, 10_000);
|
||||
}
|
||||
});
|
||||
|
||||
const websocketOpen = new Promise<void>((resolve) => {
|
||||
websocket.addEventListener("open", resolve, { once: true });
|
||||
});
|
||||
|
||||
return {
|
||||
id,
|
||||
incoming,
|
||||
outgoing: {
|
||||
async push(msg) {
|
||||
await websocketOpen;
|
||||
if (websocket.readyState === 1) {
|
||||
websocket.send(JSON.stringify(msg));
|
||||
}
|
||||
},
|
||||
close() {
|
||||
if (websocket.readyState === 1) {
|
||||
websocket.close();
|
||||
}
|
||||
},
|
||||
},
|
||||
role,
|
||||
};
|
||||
}
|
||||
@@ -1,5 +1,53 @@
|
||||
# cojson
|
||||
|
||||
## 0.7.28
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Fix ignoring server peers
|
||||
|
||||
## 0.7.26
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Remove Effect from jazz/cojson internals
|
||||
|
||||
## 0.7.23
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Mostly complete OPFS implementation (single-tab only)
|
||||
|
||||
## 0.7.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Update to Effect 3.5.2
|
||||
|
||||
## 0.7.17
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Fix bugs in new storage interface
|
||||
|
||||
## 0.7.14
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Use Effect Queues and Streams instead of custom queue implementation
|
||||
|
||||
## 0.7.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Fix webpack import of node:crypto module
|
||||
|
||||
## 0.7.10
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Also cache agent ID in RawControlledAccount
|
||||
|
||||
## 0.7.9
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"types": "src/index.ts",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"version": "0.7.9",
|
||||
"version": "0.7.28",
|
||||
"devDependencies": {
|
||||
"@types/jest": "^29.5.3",
|
||||
"@typescript-eslint/eslint-plugin": "^6.2.1",
|
||||
@@ -18,13 +18,12 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@hazae41/berith": "^1.2.6",
|
||||
"@noble/curves": "^1.3.0",
|
||||
"@noble/ciphers": "^0.1.3",
|
||||
"@noble/curves": "^1.3.0",
|
||||
"@noble/hashes": "^1.4.0",
|
||||
"@scure/base": "^1.1.1",
|
||||
"effect": "^3.1.5",
|
||||
"hash-wasm": "^4.9.0",
|
||||
"isomorphic-streams": "https://github.com/sgwilym/isomorphic-streams.git#aa9394781bfc92f8d7c981be7daf8af4b4cd4fae"
|
||||
"queueable": "^5.3.2"
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "tsc --watch --sourceMap --outDir dist",
|
||||
|
||||
@@ -26,6 +26,13 @@ import { expectGroup } from "./typeUtils/expectGroup.js";
|
||||
import { isAccountID } from "./typeUtils/isAccountID.js";
|
||||
import { accountOrAgentIDfromSessionID } from "./typeUtils/accountOrAgentIDfromSessionID.js";
|
||||
|
||||
/**
|
||||
In order to not block other concurrently syncing CoValues we introduce a maximum size of transactions,
|
||||
since they are the smallest unit of progress that can be synced within a CoValue.
|
||||
This is particularly important for storing binary data in CoValues, since they are likely to be at least on the order of megabytes.
|
||||
This also means that we want to keep signatures roughly after each MAX_RECOMMENDED_TX size chunk,
|
||||
to be able to verify partially loaded CoValues or CoValues that are still being created (like a video live stream).
|
||||
**/
|
||||
export const MAX_RECOMMENDED_TX_SIZE = 100 * 1024;
|
||||
|
||||
export type CoValueHeader = {
|
||||
@@ -383,7 +390,7 @@ export class CoValueCore {
|
||||
0,
|
||||
);
|
||||
|
||||
if (sizeOfTxsSinceLastInbetweenSignature > 100 * 1024) {
|
||||
if (sizeOfTxsSinceLastInbetweenSignature > MAX_RECOMMENDED_TX_SIZE) {
|
||||
// console.log(
|
||||
// "Saving inbetween signature for tx ",
|
||||
// sessionID,
|
||||
|
||||
@@ -97,7 +97,12 @@ export class RawControlledAccount<Meta extends AccountMeta = AccountMeta>
|
||||
}
|
||||
|
||||
currentAgentID(): AgentID {
|
||||
return this.crypto.getAgentID(this.agentSecret);
|
||||
if (this._cachedCurrentAgentID) {
|
||||
return this._cachedCurrentAgentID;
|
||||
}
|
||||
const agentID = this.crypto.getAgentID(this.agentSecret);
|
||||
this._cachedCurrentAgentID = agentID;
|
||||
return agentID;
|
||||
}
|
||||
|
||||
currentSignerID(): SignerID {
|
||||
|
||||
@@ -44,11 +44,13 @@ export class WasmCrypto extends CryptoProvider<Uint8Array> {
|
||||
if ("crypto" in globalThis) {
|
||||
resolve();
|
||||
} else {
|
||||
return import("node:crypto").then(({ webcrypto }) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(globalThis as any).crypto = webcrypto;
|
||||
resolve();
|
||||
});
|
||||
return import(/*webpackIgnore: true*/ "node:crypto").then(
|
||||
({ webcrypto }) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(globalThis as any).crypto = webcrypto;
|
||||
resolve();
|
||||
},
|
||||
);
|
||||
}
|
||||
}),
|
||||
]).then(([blake3instance]) => new WasmCrypto(blake3instance));
|
||||
|
||||
@@ -18,7 +18,7 @@ import {
|
||||
} from "./crypto/crypto.js";
|
||||
import { WasmCrypto } from "./crypto/WasmCrypto.js";
|
||||
import { PureJSCrypto } from "./crypto/PureJSCrypto.js";
|
||||
import { connectedPeers } from "./streamUtils.js";
|
||||
import { connectedPeers, Channel } from "./streamUtils.js";
|
||||
import { ControlledAgent, RawControlledAccount } from "./coValues/account.js";
|
||||
import type { Role } from "./permissions.js";
|
||||
import { rawCoIDtoBytes, rawCoIDfromBytes, isRawCoID } from "./ids.js";
|
||||
@@ -41,7 +41,13 @@ import type {
|
||||
BinaryCoStreamMeta,
|
||||
} from "./coValues/coStream.js";
|
||||
import type { JsonValue } from "./jsonValue.js";
|
||||
import type { SyncMessage, Peer } from "./sync.js";
|
||||
import type {
|
||||
SyncMessage,
|
||||
Peer,
|
||||
IncomingSyncStream,
|
||||
OutgoingSyncQueue,
|
||||
} from "./sync.js";
|
||||
import { DisconnectedError, PingTimeoutError } from "./sync.js";
|
||||
import type { AgentSecret } from "./crypto/crypto.js";
|
||||
import type {
|
||||
AccountID,
|
||||
@@ -53,12 +59,7 @@ import type * as Media from "./media.js";
|
||||
|
||||
type Value = JsonValue | AnyRawCoValue;
|
||||
|
||||
import {
|
||||
LSMStorage,
|
||||
FSErr,
|
||||
BlockFilename,
|
||||
WalFilename,
|
||||
} from "./storage/index.js";
|
||||
import { LSMStorage, BlockFilename, WalFilename } from "./storage/index.js";
|
||||
import { FileSystem } from "./storage/FileSystem.js";
|
||||
|
||||
/** @hidden */
|
||||
@@ -78,6 +79,7 @@ export const cojsonInternals = {
|
||||
accountHeaderForInitialAgentSecret,
|
||||
idforHeader,
|
||||
StreamingHash,
|
||||
Channel,
|
||||
};
|
||||
|
||||
export {
|
||||
@@ -119,7 +121,16 @@ export {
|
||||
LSMStorage,
|
||||
};
|
||||
|
||||
export type { Value, FileSystem, FSErr, BlockFilename, WalFilename };
|
||||
export type {
|
||||
Value,
|
||||
FileSystem,
|
||||
BlockFilename,
|
||||
WalFilename,
|
||||
IncomingSyncStream,
|
||||
OutgoingSyncQueue,
|
||||
DisconnectedError,
|
||||
PingTimeoutError,
|
||||
};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-namespace
|
||||
export namespace CojsonInternalTypes {
|
||||
|
||||
@@ -670,6 +670,10 @@ export class LocalNode {
|
||||
|
||||
return newNode;
|
||||
}
|
||||
|
||||
gracefulShutdown() {
|
||||
this.syncManager.gracefulShutdown();
|
||||
}
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { Effect } from "effect";
|
||||
import { CoValueChunk } from "./index.js";
|
||||
import { RawCoID } from "../ids.js";
|
||||
import { CryptoProvider, StreamingHash } from "../crypto/crypto.js";
|
||||
|
||||
export type BlockFilename = `${string}-L${number}-H${number}.jsonl`;
|
||||
export type BlockFilename = `L${number}-${string}-${string}-H${number}.jsonl`;
|
||||
|
||||
export type BlockHeader = { id: RawCoID; start: number; length: number }[];
|
||||
|
||||
@@ -11,142 +10,124 @@ export type WalEntry = { id: RawCoID } & CoValueChunk;
|
||||
|
||||
export type WalFilename = `wal-${number}.jsonl`;
|
||||
|
||||
export type FSErr = {
|
||||
type: "fileSystemError";
|
||||
error: Error;
|
||||
};
|
||||
|
||||
export interface FileSystem<WriteHandle, ReadHandle> {
|
||||
crypto: CryptoProvider;
|
||||
createFile(filename: string): Effect.Effect<WriteHandle, FSErr>;
|
||||
append(handle: WriteHandle, data: Uint8Array): Effect.Effect<void, FSErr>;
|
||||
close(handle: ReadHandle | WriteHandle): Effect.Effect<void, FSErr>;
|
||||
closeAndRename(
|
||||
handle: WriteHandle,
|
||||
filename: BlockFilename,
|
||||
): Effect.Effect<void, FSErr>;
|
||||
openToRead(
|
||||
filename: string,
|
||||
): Effect.Effect<{ handle: ReadHandle; size: number }, FSErr>;
|
||||
createFile(filename: string): Promise<WriteHandle>;
|
||||
append(handle: WriteHandle, data: Uint8Array): Promise<void>;
|
||||
close(handle: ReadHandle | WriteHandle): Promise<void>;
|
||||
closeAndRename(handle: WriteHandle, filename: BlockFilename): Promise<void>;
|
||||
openToRead(filename: string): Promise<{ handle: ReadHandle; size: number }>;
|
||||
read(
|
||||
handle: ReadHandle,
|
||||
offset: number,
|
||||
length: number,
|
||||
): Effect.Effect<Uint8Array, FSErr>;
|
||||
listFiles(): Effect.Effect<string[], FSErr>;
|
||||
removeFile(
|
||||
filename: BlockFilename | WalFilename,
|
||||
): Effect.Effect<void, FSErr>;
|
||||
): Promise<Uint8Array>;
|
||||
listFiles(): Promise<string[]>;
|
||||
removeFile(filename: BlockFilename | WalFilename): Promise<void>;
|
||||
}
|
||||
|
||||
export const textEncoder = new TextEncoder();
|
||||
export const textDecoder = new TextDecoder();
|
||||
|
||||
export function readChunk<RH, FS extends FileSystem<unknown, RH>>(
|
||||
export async function readChunk<RH, FS extends FileSystem<unknown, RH>>(
|
||||
handle: RH,
|
||||
header: { start: number; length: number },
|
||||
fs: FS,
|
||||
): Effect.Effect<CoValueChunk, FSErr> {
|
||||
return Effect.gen(function* ($) {
|
||||
const chunkBytes = yield* $(
|
||||
fs.read(handle, header.start, header.length),
|
||||
);
|
||||
): Promise<CoValueChunk> {
|
||||
const chunkBytes = await fs.read(handle, header.start, header.length);
|
||||
|
||||
const chunk = JSON.parse(textDecoder.decode(chunkBytes));
|
||||
return chunk;
|
||||
});
|
||||
const chunk = JSON.parse(textDecoder.decode(chunkBytes));
|
||||
return chunk;
|
||||
}
|
||||
|
||||
export function readHeader<RH, FS extends FileSystem<unknown, RH>>(
|
||||
export async function readHeader<RH, FS extends FileSystem<unknown, RH>>(
|
||||
filename: string,
|
||||
handle: RH,
|
||||
size: number,
|
||||
fs: FS,
|
||||
): Effect.Effect<BlockHeader, FSErr> {
|
||||
return Effect.gen(function* ($) {
|
||||
const headerLength = Number(filename.match(/-H(\d+)\.jsonl$/)![1]!);
|
||||
): Promise<BlockHeader> {
|
||||
const headerLength = Number(filename.match(/-H(\d+)\.jsonl$/)![1]!);
|
||||
|
||||
const headerBytes = yield* $(
|
||||
fs.read(handle, size - headerLength, headerLength),
|
||||
);
|
||||
const headerBytes = await fs.read(
|
||||
handle,
|
||||
size - headerLength,
|
||||
headerLength,
|
||||
);
|
||||
|
||||
const header = JSON.parse(textDecoder.decode(headerBytes));
|
||||
return header;
|
||||
});
|
||||
const header = JSON.parse(textDecoder.decode(headerBytes));
|
||||
return header;
|
||||
}
|
||||
|
||||
export function writeBlock<WH, RH, FS extends FileSystem<WH, RH>>(
|
||||
export async function writeBlock<WH, RH, FS extends FileSystem<WH, RH>>(
|
||||
chunks: Map<RawCoID, CoValueChunk>,
|
||||
level: number,
|
||||
blockNumber: number,
|
||||
fs: FS,
|
||||
): Effect.Effect<void, FSErr> {
|
||||
): Promise<BlockFilename> {
|
||||
if (chunks.size === 0) {
|
||||
return Effect.die(new Error("No chunks to write"));
|
||||
throw new Error("No chunks to write");
|
||||
}
|
||||
|
||||
return Effect.gen(function* ($) {
|
||||
const blockHeader: BlockHeader = [];
|
||||
const blockHeader: BlockHeader = [];
|
||||
|
||||
let offset = 0;
|
||||
let offset = 0;
|
||||
|
||||
const file = yield* $(
|
||||
fs.createFile(
|
||||
"wipBlock" +
|
||||
Math.random().toString(36).substring(7) +
|
||||
".tmp.jsonl",
|
||||
),
|
||||
);
|
||||
const hash = new StreamingHash(fs.crypto);
|
||||
const file = await fs.createFile(
|
||||
"wipBlock" + Math.random().toString(36).substring(7) + ".tmp.jsonl",
|
||||
);
|
||||
const hash = new StreamingHash(fs.crypto);
|
||||
|
||||
const chunksSortedById = Array.from(chunks).sort(([id1], [id2]) =>
|
||||
id1.localeCompare(id2),
|
||||
);
|
||||
const chunksSortedById = Array.from(chunks).sort(([id1], [id2]) =>
|
||||
id1.localeCompare(id2),
|
||||
);
|
||||
|
||||
for (const [id, chunk] of chunksSortedById) {
|
||||
const encodedBytes = hash.update(chunk);
|
||||
const encodedBytesWithNewline = new Uint8Array(
|
||||
encodedBytes.length + 1,
|
||||
);
|
||||
encodedBytesWithNewline.set(encodedBytes);
|
||||
encodedBytesWithNewline[encodedBytes.length] = 10;
|
||||
yield* $(fs.append(file, encodedBytesWithNewline));
|
||||
const length = encodedBytesWithNewline.length;
|
||||
blockHeader.push({ id, start: offset, length });
|
||||
offset += length;
|
||||
}
|
||||
for (const [id, chunk] of chunksSortedById) {
|
||||
const encodedBytes = hash.update(chunk);
|
||||
const encodedBytesWithNewline = new Uint8Array(encodedBytes.length + 1);
|
||||
encodedBytesWithNewline.set(encodedBytes);
|
||||
encodedBytesWithNewline[encodedBytes.length] = 10;
|
||||
await fs.append(file, encodedBytesWithNewline);
|
||||
const length = encodedBytesWithNewline.length;
|
||||
blockHeader.push({ id, start: offset, length });
|
||||
offset += length;
|
||||
}
|
||||
|
||||
const headerBytes = textEncoder.encode(JSON.stringify(blockHeader));
|
||||
yield* $(fs.append(file, headerBytes));
|
||||
const headerBytes = textEncoder.encode(JSON.stringify(blockHeader));
|
||||
await fs.append(file, headerBytes);
|
||||
|
||||
console.log(
|
||||
"full file",
|
||||
yield* $(
|
||||
fs.read(file as unknown as RH, 0, offset + headerBytes.length),
|
||||
),
|
||||
);
|
||||
// console.log(
|
||||
// "full file",
|
||||
// yield* $(
|
||||
// fs.read(file as unknown as RH, 0, offset + headerBytes.length),
|
||||
// ),
|
||||
// );
|
||||
|
||||
const filename: BlockFilename = `${hash.digest()}-L${level}-H${
|
||||
headerBytes.length
|
||||
}.jsonl`;
|
||||
console.log("renaming to" + filename);
|
||||
yield* $(fs.closeAndRename(file, filename));
|
||||
const filename: BlockFilename = `L${level}-${(blockNumber + "").padStart(
|
||||
3,
|
||||
"0",
|
||||
)}-${hash.digest().replace("hash_", "").slice(0, 15)}-H${
|
||||
headerBytes.length
|
||||
}.jsonl`;
|
||||
// console.log("renaming to" + filename);
|
||||
await fs.closeAndRename(file, filename);
|
||||
|
||||
console.log("Wrote block", filename, blockHeader);
|
||||
});
|
||||
return filename;
|
||||
|
||||
// console.log("Wrote block", filename, blockHeader);
|
||||
// console.log("IDs in block", blockHeader.map(e => e.id));
|
||||
}
|
||||
|
||||
export function writeToWal<WH, RH, FS extends FileSystem<WH, RH>>(
|
||||
export async function writeToWal<WH, RH, FS extends FileSystem<WH, RH>>(
|
||||
handle: WH,
|
||||
fs: FS,
|
||||
id: RawCoID,
|
||||
chunk: CoValueChunk,
|
||||
): Effect.Effect<void, FSErr> {
|
||||
return Effect.gen(function* ($) {
|
||||
const walEntry: WalEntry = {
|
||||
id,
|
||||
...chunk,
|
||||
};
|
||||
const bytes = textEncoder.encode(JSON.stringify(walEntry) + "\n");
|
||||
yield* $(fs.append(handle, bytes));
|
||||
});
|
||||
) {
|
||||
const walEntry: WalEntry = {
|
||||
id,
|
||||
...chunk,
|
||||
};
|
||||
const bytes = textEncoder.encode(JSON.stringify(walEntry) + "\n");
|
||||
console.log("writing to WAL", handle, id, bytes.length);
|
||||
return fs.append(handle, bytes);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { Either } from "effect";
|
||||
import { RawCoID, SessionID } from "../ids.js";
|
||||
import { MAX_RECOMMENDED_TX_SIZE } from "../index.js";
|
||||
import { CoValueKnownState, NewContentMessage } from "../sync.js";
|
||||
@@ -80,7 +79,7 @@ export function chunkToKnownState(id: RawCoID, chunk: CoValueChunk) {
|
||||
export function mergeChunks(
|
||||
chunkA: CoValueChunk,
|
||||
chunkB: CoValueChunk,
|
||||
): Either.Either<"nonContigous", CoValueChunk> {
|
||||
): "nonContigous" | CoValueChunk {
|
||||
const header = chunkA.header || chunkB.header;
|
||||
|
||||
const newSessions = { ...chunkA.sessionEntries };
|
||||
@@ -126,14 +125,16 @@ export function mergeChunks(
|
||||
} else {
|
||||
const lastNewEntry = newEntries[newEntries.length - 1]!;
|
||||
lastNewEntry.transactions.push(...entry.transactions);
|
||||
lastNewEntry.lastSignature = entry.lastSignature;
|
||||
|
||||
bytesSinceLastSignature += entry.transactions.length;
|
||||
}
|
||||
}
|
||||
newSessions[sessionID] = newEntries;
|
||||
} else {
|
||||
return Either.right("nonContigous" as const);
|
||||
return "nonContigous" as const;
|
||||
}
|
||||
}
|
||||
|
||||
return Either.left({ header, sessionEntries: newSessions });
|
||||
return { header, sessionEntries: newSessions };
|
||||
}
|
||||
|
||||
@@ -1,18 +1,12 @@
|
||||
import {
|
||||
ReadableStream,
|
||||
WritableStream,
|
||||
ReadableStreamDefaultReader,
|
||||
WritableStreamDefaultWriter,
|
||||
} from "isomorphic-streams";
|
||||
import { Effect, Either, SynchronizedRef } from "effect";
|
||||
import { RawCoID } from "../ids.js";
|
||||
import { CoValueHeader, Transaction } from "../coValueCore.js";
|
||||
import { Signature } from "../crypto/crypto.js";
|
||||
import {
|
||||
CoValueKnownState,
|
||||
IncomingSyncStream,
|
||||
NewContentMessage,
|
||||
OutgoingSyncQueue,
|
||||
Peer,
|
||||
SyncMessage,
|
||||
} from "../sync.js";
|
||||
import { CoID, RawCoValue } from "../index.js";
|
||||
import { connectedPeers } from "../streamUtils.js";
|
||||
@@ -23,7 +17,6 @@ import {
|
||||
} from "./chunksAndKnownStates.js";
|
||||
import {
|
||||
BlockFilename,
|
||||
FSErr,
|
||||
FileSystem,
|
||||
WalEntry,
|
||||
WalFilename,
|
||||
@@ -33,7 +26,9 @@ import {
|
||||
writeBlock,
|
||||
writeToWal,
|
||||
} from "./FileSystem.js";
|
||||
export type { FSErr, BlockFilename, WalFilename } from "./FileSystem.js";
|
||||
export type { BlockFilename, WalFilename } from "./FileSystem.js";
|
||||
|
||||
const MAX_N_LEVELS = 3;
|
||||
|
||||
export type CoValueChunk = {
|
||||
header?: CoValueHeader;
|
||||
@@ -47,409 +42,508 @@ export type CoValueChunk = {
|
||||
};
|
||||
|
||||
export class LSMStorage<WH, RH, FS extends FileSystem<WH, RH>> {
|
||||
fromLocalNode!: ReadableStreamDefaultReader<SyncMessage>;
|
||||
toLocalNode: WritableStreamDefaultWriter<SyncMessage>;
|
||||
fs: FS;
|
||||
currentWal: SynchronizedRef.SynchronizedRef<WH | undefined>;
|
||||
coValues: SynchronizedRef.SynchronizedRef<{
|
||||
currentWal: WH | undefined;
|
||||
coValues: {
|
||||
[id: RawCoID]: CoValueChunk | undefined;
|
||||
}>;
|
||||
};
|
||||
fileCache: string[] | undefined;
|
||||
headerCache = new Map<
|
||||
BlockFilename,
|
||||
{ [id: RawCoID]: { start: number; length: number } }
|
||||
>();
|
||||
blockFileHandles = new Map<
|
||||
BlockFilename,
|
||||
Promise<{ handle: RH; size: number }>
|
||||
>();
|
||||
|
||||
constructor(
|
||||
fs: FS,
|
||||
fromLocalNode: ReadableStream<SyncMessage>,
|
||||
toLocalNode: WritableStream<SyncMessage>,
|
||||
public fs: FS,
|
||||
public fromLocalNode: IncomingSyncStream,
|
||||
public toLocalNode: OutgoingSyncQueue,
|
||||
) {
|
||||
this.fs = fs;
|
||||
this.fromLocalNode = fromLocalNode.getReader();
|
||||
this.toLocalNode = toLocalNode.getWriter();
|
||||
this.coValues = SynchronizedRef.unsafeMake({});
|
||||
this.currentWal = SynchronizedRef.unsafeMake<WH | undefined>(undefined);
|
||||
this.coValues = {};
|
||||
this.currentWal = undefined;
|
||||
|
||||
void Effect.runPromise(
|
||||
Effect.gen(this, function* () {
|
||||
let done = false;
|
||||
while (!done) {
|
||||
const result = yield* Effect.promise(() =>
|
||||
this.fromLocalNode.read(),
|
||||
);
|
||||
done = result.done;
|
||||
let nMsg = 0;
|
||||
|
||||
if (result.value) {
|
||||
if (result.value.action === "done") {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (result.value.action === "content") {
|
||||
yield* this.handleNewContent(result.value);
|
||||
} else {
|
||||
yield* this.sendNewContent(
|
||||
result.value.id,
|
||||
result.value,
|
||||
undefined,
|
||||
);
|
||||
}
|
||||
const processMessages = async () => {
|
||||
for await (const msg of fromLocalNode) {
|
||||
console.log("Storage msg start", nMsg);
|
||||
try {
|
||||
if (msg === "Disconnected" || msg === "PingTimeout") {
|
||||
throw new Error("Unexpected Disconnected message");
|
||||
}
|
||||
if (msg.action === "done") {
|
||||
return;
|
||||
}
|
||||
|
||||
if (msg.action === "content") {
|
||||
await this.handleNewContent(msg);
|
||||
} else {
|
||||
await this.sendNewContent(msg.id, msg, undefined);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(
|
||||
new Error(
|
||||
`Error reading from localNode, handling msg\n\n${JSON.stringify(
|
||||
msg,
|
||||
(k, v) =>
|
||||
k === "changes" || k === "encryptedChanges"
|
||||
? v.slice(0, 20) + "..."
|
||||
: v,
|
||||
)}`,
|
||||
{ cause: e },
|
||||
),
|
||||
);
|
||||
}
|
||||
console.log("Storage msg end", nMsg);
|
||||
nMsg++;
|
||||
}
|
||||
};
|
||||
|
||||
return;
|
||||
}),
|
||||
processMessages().catch((e) =>
|
||||
console.error("Error in processMessages in storage", e),
|
||||
);
|
||||
|
||||
setTimeout(() => this.compact(), 20000);
|
||||
}
|
||||
|
||||
sendNewContent(
|
||||
id: RawCoID,
|
||||
known: CoValueKnownState | undefined,
|
||||
asDependencyOf: RawCoID | undefined,
|
||||
): Effect.Effect<void, FSErr> {
|
||||
return SynchronizedRef.updateEffect(this.coValues, (coValues) =>
|
||||
this.sendNewContentInner(coValues, id, known, asDependencyOf),
|
||||
setTimeout(
|
||||
() =>
|
||||
this.compact().catch((e) => {
|
||||
console.error("Error while compacting", e);
|
||||
}),
|
||||
20000,
|
||||
);
|
||||
}
|
||||
|
||||
private sendNewContentInner(
|
||||
coValues: { [id: `co_z${string}`]: CoValueChunk | undefined },
|
||||
async sendNewContent(
|
||||
id: RawCoID,
|
||||
known: CoValueKnownState | undefined,
|
||||
asDependencyOf: RawCoID | undefined,
|
||||
): Effect.Effect<
|
||||
{ [id: `co_z${string}`]: CoValueChunk | undefined },
|
||||
FSErr,
|
||||
never
|
||||
> {
|
||||
return Effect.gen(this, function* () {
|
||||
let coValue = coValues[id];
|
||||
) {
|
||||
let coValue = this.coValues[id];
|
||||
|
||||
if (!coValue) {
|
||||
coValue = yield* this.loadCoValue(id, this.fs);
|
||||
}
|
||||
if (!coValue) {
|
||||
coValue = await this.loadCoValue(id, this.fs);
|
||||
}
|
||||
|
||||
if (!coValue) {
|
||||
yield* Effect.promise(() =>
|
||||
this.toLocalNode.write({
|
||||
id: id,
|
||||
action: "known",
|
||||
header: false,
|
||||
sessions: {},
|
||||
asDependencyOf,
|
||||
}),
|
||||
);
|
||||
if (!coValue) {
|
||||
this.toLocalNode
|
||||
.push({
|
||||
id: id,
|
||||
action: "known",
|
||||
header: false,
|
||||
sessions: {},
|
||||
asDependencyOf,
|
||||
})
|
||||
.catch((e) => console.error("Error while pushing known", e));
|
||||
|
||||
return coValues;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
!known?.header &&
|
||||
coValue.header?.ruleset.type === "ownedByGroup"
|
||||
) {
|
||||
coValues = yield* this.sendNewContentInner(
|
||||
coValues,
|
||||
coValue.header.ruleset.group,
|
||||
undefined,
|
||||
asDependencyOf || id,
|
||||
);
|
||||
} else if (
|
||||
!known?.header &&
|
||||
coValue.header?.ruleset.type === "group"
|
||||
) {
|
||||
const dependedOnAccounts = new Set();
|
||||
for (const session of Object.values(coValue.sessionEntries)) {
|
||||
for (const entry of session) {
|
||||
for (const tx of entry.transactions) {
|
||||
if (tx.privacy === "trusting") {
|
||||
const parsedChanges = JSON.parse(tx.changes);
|
||||
for (const change of parsedChanges) {
|
||||
if (
|
||||
change.op === "set" &&
|
||||
change.key.startsWith("co_")
|
||||
) {
|
||||
dependedOnAccounts.add(change.key);
|
||||
}
|
||||
if (!known?.header && coValue.header?.ruleset.type === "ownedByGroup") {
|
||||
await this.sendNewContent(
|
||||
coValue.header.ruleset.group,
|
||||
undefined,
|
||||
asDependencyOf || id,
|
||||
);
|
||||
} else if (!known?.header && coValue.header?.ruleset.type === "group") {
|
||||
const dependedOnAccounts = new Set();
|
||||
for (const session of Object.values(coValue.sessionEntries)) {
|
||||
for (const entry of session) {
|
||||
for (const tx of entry.transactions) {
|
||||
if (tx.privacy === "trusting") {
|
||||
const parsedChanges = JSON.parse(tx.changes);
|
||||
for (const change of parsedChanges) {
|
||||
if (
|
||||
change.op === "set" &&
|
||||
change.key.startsWith("co_")
|
||||
) {
|
||||
dependedOnAccounts.add(change.key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const account of dependedOnAccounts) {
|
||||
coValues = yield* this.sendNewContentInner(
|
||||
coValues,
|
||||
account as CoID<RawCoValue>,
|
||||
undefined,
|
||||
asDependencyOf || id,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const newContentMessages = contentSinceChunk(
|
||||
id,
|
||||
coValue,
|
||||
known,
|
||||
).map((message) => ({ ...message, asDependencyOf }));
|
||||
|
||||
const ourKnown: CoValueKnownState = chunkToKnownState(id, coValue);
|
||||
|
||||
yield* Effect.promise(() =>
|
||||
this.toLocalNode.write({
|
||||
action: "known",
|
||||
...ourKnown,
|
||||
asDependencyOf,
|
||||
}),
|
||||
);
|
||||
|
||||
for (const message of newContentMessages) {
|
||||
if (Object.keys(message.new).length === 0) continue;
|
||||
yield* Effect.promise(() => this.toLocalNode.write(message));
|
||||
for (const account of dependedOnAccounts) {
|
||||
await this.sendNewContent(
|
||||
account as CoID<RawCoValue>,
|
||||
undefined,
|
||||
asDependencyOf || id,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return { ...coValues, [id]: coValue };
|
||||
});
|
||||
}
|
||||
|
||||
withWAL(
|
||||
handler: (wal: WH) => Effect.Effect<void, FSErr>,
|
||||
): Effect.Effect<void, FSErr> {
|
||||
return SynchronizedRef.updateEffect(this.currentWal, (wal) =>
|
||||
Effect.gen(this, function* () {
|
||||
let newWal = wal;
|
||||
if (!newWal) {
|
||||
newWal = yield* this.fs.createFile(
|
||||
`wal-${new Date().toISOString()}-${Math.random()
|
||||
.toString(36)
|
||||
.slice(2)}.jsonl`,
|
||||
);
|
||||
}
|
||||
yield* handler(newWal);
|
||||
return newWal;
|
||||
}),
|
||||
const newContentMessages = contentSinceChunk(id, coValue, known).map(
|
||||
(message) => ({ ...message, asDependencyOf }),
|
||||
);
|
||||
|
||||
const ourKnown: CoValueKnownState = chunkToKnownState(id, coValue);
|
||||
|
||||
this.toLocalNode
|
||||
.push({
|
||||
action: "known",
|
||||
...ourKnown,
|
||||
asDependencyOf,
|
||||
})
|
||||
.catch((e) => console.error("Error while pushing known", e));
|
||||
|
||||
for (const message of newContentMessages) {
|
||||
if (Object.keys(message.new).length === 0) continue;
|
||||
this.toLocalNode
|
||||
.push(message)
|
||||
.catch((e) =>
|
||||
console.error("Error while pushing new content", e),
|
||||
);
|
||||
}
|
||||
|
||||
this.coValues[id] = coValue;
|
||||
}
|
||||
|
||||
handleNewContent(
|
||||
newContent: NewContentMessage,
|
||||
): Effect.Effect<void, FSErr> {
|
||||
return SynchronizedRef.updateEffect(this.coValues, (coValues) =>
|
||||
Effect.gen(this, function* () {
|
||||
const coValue = coValues[newContent.id];
|
||||
async withWAL(handler: (wal: WH) => Promise<void>) {
|
||||
if (!this.currentWal) {
|
||||
this.currentWal = await this.fs.createFile(
|
||||
`wal-${Date.now()}-${Math.random()
|
||||
.toString(36)
|
||||
.slice(2)}.jsonl`,
|
||||
);
|
||||
}
|
||||
await handler(this.currentWal);
|
||||
}
|
||||
|
||||
const newContentAsChunk: CoValueChunk = {
|
||||
header: newContent.header,
|
||||
sessionEntries: Object.fromEntries(
|
||||
Object.entries(newContent.new).map(
|
||||
([sessionID, newInSession]) => [
|
||||
sessionID,
|
||||
[
|
||||
{
|
||||
after: newInSession.after,
|
||||
lastSignature:
|
||||
newInSession.lastSignature,
|
||||
transactions:
|
||||
newInSession.newTransactions,
|
||||
},
|
||||
],
|
||||
],
|
||||
),
|
||||
async handleNewContent(newContent: NewContentMessage) {
|
||||
const coValue = this.coValues[newContent.id];
|
||||
|
||||
const newContentAsChunk: CoValueChunk = {
|
||||
header: newContent.header,
|
||||
sessionEntries: Object.fromEntries(
|
||||
Object.entries(newContent.new).map(
|
||||
([sessionID, newInSession]) => [
|
||||
sessionID,
|
||||
[
|
||||
{
|
||||
after: newInSession.after,
|
||||
lastSignature: newInSession.lastSignature,
|
||||
transactions: newInSession.newTransactions,
|
||||
},
|
||||
],
|
||||
],
|
||||
),
|
||||
),
|
||||
};
|
||||
|
||||
if (!coValue) {
|
||||
if (newContent.header) {
|
||||
// console.log("Creating in WAL", newContent.id);
|
||||
await this.withWAL((wal) =>
|
||||
writeToWal(wal, this.fs, newContent.id, newContentAsChunk),
|
||||
);
|
||||
|
||||
this.coValues[newContent.id] = newContentAsChunk;
|
||||
} else {
|
||||
console.warn(
|
||||
"Incontiguous incoming update for " + newContent.id,
|
||||
);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
const merged = mergeChunks(coValue, newContentAsChunk);
|
||||
if (merged === "nonContigous") {
|
||||
console.warn(
|
||||
"Non-contigous new content for " + newContent.id,
|
||||
Object.entries(coValue.sessionEntries).map(
|
||||
([session, entries]) =>
|
||||
entries.map((entry) => ({
|
||||
session: session,
|
||||
after: entry.after,
|
||||
length: entry.transactions.length,
|
||||
})),
|
||||
),
|
||||
};
|
||||
Object.entries(newContentAsChunk.sessionEntries).map(
|
||||
([session, entries]) =>
|
||||
entries.map((entry) => ({
|
||||
session: session,
|
||||
after: entry.after,
|
||||
length: entry.transactions.length,
|
||||
})),
|
||||
),
|
||||
);
|
||||
} else {
|
||||
// console.log("Appending to WAL", newContent.id);
|
||||
await this.withWAL((wal) =>
|
||||
writeToWal(wal, this.fs, newContent.id, newContentAsChunk),
|
||||
);
|
||||
|
||||
if (!coValue) {
|
||||
if (newContent.header) {
|
||||
console.log("Creating in WAL", newContent.id);
|
||||
yield* this.withWAL((wal) =>
|
||||
writeToWal(
|
||||
wal,
|
||||
this.fs,
|
||||
newContent.id,
|
||||
newContentAsChunk,
|
||||
),
|
||||
);
|
||||
this.coValues[newContent.id] = merged;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...coValues,
|
||||
[newContent.id]: newContentAsChunk,
|
||||
};
|
||||
} else {
|
||||
// yield*
|
||||
// Effect.promise(() =>
|
||||
// this.toLocalNode.write({
|
||||
// action: "known",
|
||||
// id: newContent.id,
|
||||
// header: false,
|
||||
// sessions: {},
|
||||
// isCorrection: true,
|
||||
// })
|
||||
// )
|
||||
// );
|
||||
async getBlockHandle(
|
||||
blockFile: BlockFilename,
|
||||
fs: FS,
|
||||
): Promise<{ handle: RH; size: number }> {
|
||||
if (!this.blockFileHandles.has(blockFile)) {
|
||||
this.blockFileHandles.set(blockFile, fs.openToRead(blockFile));
|
||||
}
|
||||
|
||||
return this.blockFileHandles.get(blockFile)!;
|
||||
}
|
||||
|
||||
async loadCoValue(id: RawCoID, fs: FS): Promise<CoValueChunk | undefined> {
|
||||
const files = this.fileCache || (await fs.listFiles());
|
||||
this.fileCache = files;
|
||||
const blockFiles = (
|
||||
files.filter((name) => name.startsWith("L")) as BlockFilename[]
|
||||
).sort();
|
||||
|
||||
let result;
|
||||
|
||||
for (const blockFile of blockFiles) {
|
||||
let cachedHeader:
|
||||
| { [id: RawCoID]: { start: number; length: number } }
|
||||
| undefined = this.headerCache.get(blockFile);
|
||||
|
||||
const { handle, size } = await this.getBlockHandle(blockFile, fs);
|
||||
|
||||
// console.log("Attempting to load", id, blockFile);
|
||||
|
||||
if (!cachedHeader) {
|
||||
cachedHeader = {};
|
||||
const header = await readHeader(blockFile, handle, size, fs);
|
||||
for (const entry of header) {
|
||||
cachedHeader[entry.id] = {
|
||||
start: entry.start,
|
||||
length: entry.length,
|
||||
};
|
||||
}
|
||||
|
||||
this.headerCache.set(blockFile, cachedHeader);
|
||||
}
|
||||
const headerEntry = cachedHeader[id];
|
||||
|
||||
// console.log("Header entry", id, headerEntry);
|
||||
|
||||
if (headerEntry) {
|
||||
const nextChunk = await readChunk(handle, headerEntry, fs);
|
||||
if (result) {
|
||||
const merged = mergeChunks(result, nextChunk);
|
||||
|
||||
if (merged === "nonContigous") {
|
||||
console.warn(
|
||||
"Incontiguous incoming update for " + newContent.id,
|
||||
"Non-contigous chunks while loading " + id,
|
||||
result,
|
||||
nextChunk,
|
||||
);
|
||||
return coValues;
|
||||
} else {
|
||||
result = merged;
|
||||
}
|
||||
} else {
|
||||
const merged = mergeChunks(coValue, newContentAsChunk);
|
||||
if (Either.isRight(merged)) {
|
||||
yield* Effect.logWarning(
|
||||
"Non-contigous new content for " + newContent.id,
|
||||
);
|
||||
|
||||
// yield* Effect.promise(() =>
|
||||
// this.toLocalNode.write({
|
||||
// action: "known",
|
||||
// ...chunkToKnownState(newContent.id, coValue),
|
||||
// isCorrection: true,
|
||||
// })
|
||||
// );
|
||||
|
||||
return coValues;
|
||||
} else {
|
||||
console.log("Appending to WAL", newContent.id);
|
||||
yield* this.withWAL((wal) =>
|
||||
writeToWal(
|
||||
wal,
|
||||
this.fs,
|
||||
newContent.id,
|
||||
newContentAsChunk,
|
||||
),
|
||||
);
|
||||
|
||||
return { ...coValues, [newContent.id]: merged.left };
|
||||
}
|
||||
result = nextChunk;
|
||||
}
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
loadCoValue<WH, RH, FS extends FileSystem<WH, RH>>(
|
||||
id: RawCoID,
|
||||
fs: FS,
|
||||
): Effect.Effect<CoValueChunk | undefined, FSErr> {
|
||||
// return _loadChunkFromWal(id, fs);
|
||||
return Effect.gen(this, function* () {
|
||||
const files = this.fileCache || (yield* fs.listFiles());
|
||||
this.fileCache = files;
|
||||
const blockFiles = files.filter((name) =>
|
||||
name.startsWith("hash_"),
|
||||
) as BlockFilename[];
|
||||
|
||||
for (const blockFile of blockFiles) {
|
||||
let cachedHeader:
|
||||
| { [id: RawCoID]: { start: number; length: number } }
|
||||
| undefined = this.headerCache.get(blockFile);
|
||||
|
||||
const { handle, size } = yield* fs.openToRead(blockFile);
|
||||
|
||||
if (!cachedHeader) {
|
||||
cachedHeader = {};
|
||||
const header = yield* readHeader(
|
||||
blockFile,
|
||||
handle,
|
||||
size,
|
||||
fs,
|
||||
);
|
||||
for (const entry of header) {
|
||||
cachedHeader[entry.id] = {
|
||||
start: entry.start,
|
||||
length: entry.length,
|
||||
};
|
||||
}
|
||||
|
||||
this.headerCache.set(blockFile, cachedHeader);
|
||||
}
|
||||
const headerEntry = cachedHeader[id];
|
||||
|
||||
let result;
|
||||
if (headerEntry) {
|
||||
result = yield* readChunk(handle, headerEntry, fs);
|
||||
}
|
||||
|
||||
yield* fs.close(handle);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
});
|
||||
// await fs.close(handle);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async compact() {
|
||||
await Effect.runPromise(
|
||||
Effect.gen(this, function* () {
|
||||
const fileNames = yield* this.fs.listFiles();
|
||||
const fileNames = await this.fs.listFiles();
|
||||
|
||||
const walFiles = fileNames.filter((name) =>
|
||||
name.startsWith("wal-"),
|
||||
) as WalFilename[];
|
||||
walFiles.sort();
|
||||
const walFiles = fileNames.filter((name) =>
|
||||
name.startsWith("wal-"),
|
||||
) as WalFilename[];
|
||||
walFiles.sort();
|
||||
|
||||
const coValues = new Map<RawCoID, CoValueChunk>();
|
||||
|
||||
console.log("Compacting WAL files", walFiles);
|
||||
if (walFiles.length === 0) return;
|
||||
|
||||
const oldWal = this.currentWal;
|
||||
this.currentWal = undefined;
|
||||
|
||||
if (oldWal) {
|
||||
await this.fs.close(oldWal);
|
||||
}
|
||||
|
||||
for (const fileName of walFiles) {
|
||||
const { handle, size }: { handle: RH; size: number } =
|
||||
await this.fs.openToRead(fileName);
|
||||
if (size === 0) {
|
||||
await this.fs.close(handle);
|
||||
continue;
|
||||
}
|
||||
const bytes = await this.fs.read(handle, 0, size);
|
||||
|
||||
const decoded = textDecoder.decode(bytes);
|
||||
const lines = decoded.split("\n");
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.length === 0) continue;
|
||||
const chunk = JSON.parse(line) as WalEntry;
|
||||
|
||||
const existingChunk = coValues.get(chunk.id);
|
||||
|
||||
if (existingChunk) {
|
||||
const merged = mergeChunks(existingChunk, chunk);
|
||||
if (merged === "nonContigous") {
|
||||
console.log(
|
||||
"Non-contigous chunks in " +
|
||||
chunk.id +
|
||||
", " +
|
||||
fileName,
|
||||
existingChunk,
|
||||
chunk,
|
||||
);
|
||||
} else {
|
||||
coValues.set(chunk.id, merged);
|
||||
}
|
||||
} else {
|
||||
coValues.set(chunk.id, chunk);
|
||||
}
|
||||
}
|
||||
|
||||
await this.fs.close(handle);
|
||||
}
|
||||
|
||||
const highestBlockNumber = fileNames.reduce((acc, name) => {
|
||||
if (name.startsWith("L" + MAX_N_LEVELS)) {
|
||||
const num = parseInt(name.split("-")[1]!);
|
||||
if (num > acc) {
|
||||
return num;
|
||||
}
|
||||
}
|
||||
return acc;
|
||||
}, 0);
|
||||
|
||||
console.log([...coValues.keys()], fileNames, highestBlockNumber);
|
||||
|
||||
await writeBlock(
|
||||
coValues,
|
||||
MAX_N_LEVELS,
|
||||
highestBlockNumber + 1,
|
||||
this.fs,
|
||||
);
|
||||
|
||||
for (const walFile of walFiles) {
|
||||
await this.fs.removeFile(walFile);
|
||||
}
|
||||
this.fileCache = undefined;
|
||||
|
||||
const fileNames2 = await this.fs.listFiles();
|
||||
|
||||
const blockFiles = (
|
||||
fileNames2.filter((name) => name.startsWith("L")) as BlockFilename[]
|
||||
).sort();
|
||||
|
||||
const blockFilesByLevelInOrder: {
|
||||
[level: number]: BlockFilename[];
|
||||
} = {};
|
||||
|
||||
for (const blockFile of blockFiles) {
|
||||
const level = parseInt(blockFile.split("-")[0]!.slice(1));
|
||||
if (!blockFilesByLevelInOrder[level]) {
|
||||
blockFilesByLevelInOrder[level] = [];
|
||||
}
|
||||
blockFilesByLevelInOrder[level]!.push(blockFile);
|
||||
}
|
||||
|
||||
console.log(blockFilesByLevelInOrder);
|
||||
|
||||
for (let level = MAX_N_LEVELS; level > 0; level--) {
|
||||
const nBlocksDesired = Math.pow(2, level);
|
||||
const blocksInLevel = blockFilesByLevelInOrder[level];
|
||||
|
||||
if (blocksInLevel && blocksInLevel.length > nBlocksDesired) {
|
||||
console.log("Compacting blocks in level", level, blocksInLevel);
|
||||
|
||||
const coValues = new Map<RawCoID, CoValueChunk>();
|
||||
|
||||
console.log("Compacting WAL files", walFiles);
|
||||
if (walFiles.length === 0) return;
|
||||
for (const blockFile of blocksInLevel) {
|
||||
const { handle, size }: { handle: RH; size: number } =
|
||||
await this.getBlockHandle(blockFile, this.fs);
|
||||
|
||||
yield* SynchronizedRef.updateEffect(this.currentWal, (wal) =>
|
||||
Effect.gen(this, function* () {
|
||||
if (wal) {
|
||||
yield* this.fs.close(wal);
|
||||
}
|
||||
return undefined;
|
||||
}),
|
||||
);
|
||||
|
||||
for (const fileName of walFiles) {
|
||||
const { handle, size } =
|
||||
yield* this.fs.openToRead(fileName);
|
||||
if (size === 0) {
|
||||
yield* this.fs.close(handle);
|
||||
continue;
|
||||
}
|
||||
const bytes = yield* this.fs.read(handle, 0, size);
|
||||
const header = await readHeader(
|
||||
blockFile,
|
||||
handle,
|
||||
size,
|
||||
this.fs,
|
||||
);
|
||||
for (const entry of header) {
|
||||
const chunk = await readChunk(handle, entry, this.fs);
|
||||
|
||||
const decoded = textDecoder.decode(bytes);
|
||||
const lines = decoded.split("\n");
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.length === 0) continue;
|
||||
const chunk = JSON.parse(line) as WalEntry;
|
||||
|
||||
const existingChunk = coValues.get(chunk.id);
|
||||
const existingChunk = coValues.get(entry.id);
|
||||
|
||||
if (existingChunk) {
|
||||
const merged = mergeChunks(existingChunk, chunk);
|
||||
if (Either.isRight(merged)) {
|
||||
console.warn(
|
||||
if (merged === "nonContigous") {
|
||||
console.log(
|
||||
"Non-contigous chunks in " +
|
||||
chunk.id +
|
||||
entry.id +
|
||||
", " +
|
||||
fileName,
|
||||
blockFile,
|
||||
existingChunk,
|
||||
chunk,
|
||||
);
|
||||
} else {
|
||||
coValues.set(chunk.id, merged.left);
|
||||
coValues.set(entry.id, merged);
|
||||
}
|
||||
} else {
|
||||
coValues.set(chunk.id, chunk);
|
||||
coValues.set(entry.id, chunk);
|
||||
}
|
||||
}
|
||||
|
||||
yield* this.fs.close(handle);
|
||||
}
|
||||
|
||||
yield* writeBlock(coValues, 0, this.fs);
|
||||
for (const walFile of walFiles) {
|
||||
yield* this.fs.removeFile(walFile);
|
||||
let levelBelow = blockFilesByLevelInOrder[level - 1];
|
||||
if (!levelBelow) {
|
||||
levelBelow = [];
|
||||
blockFilesByLevelInOrder[level - 1] = levelBelow;
|
||||
}
|
||||
this.fileCache = undefined;
|
||||
}),
|
||||
|
||||
const highestBlockNumberInLevelBelow = levelBelow.reduce(
|
||||
(acc, name) => {
|
||||
const num = parseInt(name.split("-")[1]!);
|
||||
if (num > acc) {
|
||||
return num;
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
0,
|
||||
);
|
||||
|
||||
const newBlockName = await writeBlock(
|
||||
coValues,
|
||||
level - 1,
|
||||
highestBlockNumberInLevelBelow + 1,
|
||||
this.fs,
|
||||
);
|
||||
levelBelow.push(newBlockName);
|
||||
|
||||
// delete blocks that went into this one
|
||||
for (const blockFile of blocksInLevel) {
|
||||
const handle = await this.getBlockHandle(
|
||||
blockFile,
|
||||
this.fs,
|
||||
);
|
||||
await this.fs.close(handle.handle);
|
||||
await this.fs.removeFile(blockFile);
|
||||
this.blockFileHandles.delete(blockFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setTimeout(
|
||||
() =>
|
||||
this.compact().catch((e) => {
|
||||
console.error("Error while compacting", e);
|
||||
}),
|
||||
5000,
|
||||
);
|
||||
|
||||
setTimeout(() => this.compact(), 5000);
|
||||
}
|
||||
|
||||
static asPeer<WH, RH, FS extends FileSystem<WH, RH>>({
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
import {
|
||||
ReadableStream,
|
||||
TransformStream,
|
||||
WritableStream,
|
||||
} from "isomorphic-streams";
|
||||
import { Peer, PeerID, SyncMessage } from "./sync.js";
|
||||
import { Channel } from "queueable";
|
||||
export { Channel } from "queueable";
|
||||
|
||||
export function connectedPeers(
|
||||
peer1id: PeerID,
|
||||
@@ -18,159 +15,56 @@ export function connectedPeers(
|
||||
peer2role?: Peer["role"];
|
||||
} = {},
|
||||
): [Peer, Peer] {
|
||||
const [inRx1, inTx1] = newStreamPair<SyncMessage>(peer1id + "_in");
|
||||
const [outRx1, outTx1] = newStreamPair<SyncMessage>(peer1id + "_out");
|
||||
|
||||
const [inRx2, inTx2] = newStreamPair<SyncMessage>(peer2id + "_in");
|
||||
const [outRx2, outTx2] = newStreamPair<SyncMessage>(peer2id + "_out");
|
||||
|
||||
void outRx2
|
||||
.pipeThrough(
|
||||
new TransformStream({
|
||||
transform(
|
||||
chunk: SyncMessage,
|
||||
controller: { enqueue: (msg: SyncMessage) => void },
|
||||
) {
|
||||
trace &&
|
||||
console.debug(
|
||||
`${peer2id} -> ${peer1id}`,
|
||||
JSON.stringify(
|
||||
chunk,
|
||||
(k, v) =>
|
||||
k === "changes" || k === "encryptedChanges"
|
||||
? v.slice(0, 20) + "..."
|
||||
: v,
|
||||
2,
|
||||
),
|
||||
);
|
||||
controller.enqueue(chunk);
|
||||
},
|
||||
}),
|
||||
)
|
||||
.pipeTo(inTx1);
|
||||
|
||||
void outRx1
|
||||
.pipeThrough(
|
||||
new TransformStream({
|
||||
transform(
|
||||
chunk: SyncMessage,
|
||||
controller: { enqueue: (msg: SyncMessage) => void },
|
||||
) {
|
||||
trace &&
|
||||
console.debug(
|
||||
`${peer1id} -> ${peer2id}`,
|
||||
JSON.stringify(
|
||||
chunk,
|
||||
(k, v) =>
|
||||
k === "changes" || k === "encryptedChanges"
|
||||
? v.slice(0, 20) + "..."
|
||||
: v,
|
||||
2,
|
||||
),
|
||||
);
|
||||
controller.enqueue(chunk);
|
||||
},
|
||||
}),
|
||||
)
|
||||
.pipeTo(inTx2);
|
||||
const [from1to2Rx, from1to2Tx] = newQueuePair(
|
||||
trace ? { traceAs: `${peer1id} -> ${peer2id}` } : undefined,
|
||||
);
|
||||
const [from2to1Rx, from2to1Tx] = newQueuePair(
|
||||
trace ? { traceAs: `${peer2id} -> ${peer1id}` } : undefined,
|
||||
);
|
||||
|
||||
const peer2AsPeer: Peer = {
|
||||
id: peer2id,
|
||||
incoming: inRx1,
|
||||
outgoing: outTx1,
|
||||
incoming: from2to1Rx,
|
||||
outgoing: from1to2Tx,
|
||||
role: peer2role,
|
||||
};
|
||||
|
||||
const peer1AsPeer: Peer = {
|
||||
id: peer1id,
|
||||
incoming: inRx2,
|
||||
outgoing: outTx2,
|
||||
incoming: from1to2Rx,
|
||||
outgoing: from2to1Tx,
|
||||
role: peer1role,
|
||||
};
|
||||
|
||||
return [peer1AsPeer, peer2AsPeer];
|
||||
}
|
||||
|
||||
export function newStreamPair<T>(
|
||||
pairName?: string,
|
||||
): [ReadableStream<T>, WritableStream<T>] {
|
||||
let queueLength = 0;
|
||||
let readerClosed = false;
|
||||
export function newQueuePair(
|
||||
options: { traceAs?: string } = {},
|
||||
): [AsyncIterable<SyncMessage>, Channel<SyncMessage>] {
|
||||
const channel = new Channel<SyncMessage>();
|
||||
|
||||
let resolveEnqueue: (enqueue: (item: T) => void) => void;
|
||||
const enqueuePromise = new Promise<(item: T) => void>((resolve) => {
|
||||
resolveEnqueue = resolve;
|
||||
});
|
||||
|
||||
let resolveClose: (close: () => void) => void;
|
||||
const closePromise = new Promise<() => void>((resolve) => {
|
||||
resolveClose = resolve;
|
||||
});
|
||||
|
||||
let queueWasOverflowing = false;
|
||||
|
||||
function maybeReportQueueLength() {
|
||||
if (queueLength >= 100) {
|
||||
queueWasOverflowing = true;
|
||||
if (queueLength % 100 === 0) {
|
||||
console.warn(pairName, "overflowing queue length", queueLength);
|
||||
}
|
||||
} else {
|
||||
if (queueWasOverflowing) {
|
||||
console.debug(pairName, "ok queue length", queueLength);
|
||||
queueWasOverflowing = false;
|
||||
}
|
||||
}
|
||||
if (options.traceAs) {
|
||||
return [
|
||||
(async function* () {
|
||||
for await (const msg of channel) {
|
||||
console.debug(
|
||||
options.traceAs,
|
||||
JSON.stringify(
|
||||
msg,
|
||||
(k, v) =>
|
||||
k === "changes" || k === "encryptedChanges"
|
||||
? v.slice(0, 20) + "..."
|
||||
: v,
|
||||
2,
|
||||
),
|
||||
);
|
||||
yield msg;
|
||||
}
|
||||
})(),
|
||||
channel,
|
||||
];
|
||||
} else {
|
||||
return [channel.wrap(), channel];
|
||||
}
|
||||
|
||||
const readable = new ReadableStream<T>({
|
||||
async start(controller) {
|
||||
resolveEnqueue(controller.enqueue.bind(controller));
|
||||
resolveClose(controller.close.bind(controller));
|
||||
},
|
||||
|
||||
cancel(_reason) {
|
||||
console.log("Manually closing reader");
|
||||
readerClosed = true;
|
||||
},
|
||||
}).pipeThrough(
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
new TransformStream<any, any>({
|
||||
transform(
|
||||
chunk: SyncMessage,
|
||||
controller: { enqueue: (msg: SyncMessage) => void },
|
||||
) {
|
||||
queueLength -= 1;
|
||||
maybeReportQueueLength();
|
||||
controller.enqueue(chunk);
|
||||
},
|
||||
}),
|
||||
) as ReadableStream<T>;
|
||||
|
||||
let lastWritePromise = Promise.resolve();
|
||||
|
||||
const writable = new WritableStream<T>({
|
||||
async write(chunk) {
|
||||
queueLength += 1;
|
||||
maybeReportQueueLength();
|
||||
const enqueue = await enqueuePromise;
|
||||
if (readerClosed) {
|
||||
throw new Error("Reader closed");
|
||||
} else {
|
||||
// make sure write resolves before corresponding read, but make sure writes are still in order
|
||||
await lastWritePromise;
|
||||
lastWritePromise = new Promise((resolve) => {
|
||||
enqueue(chunk);
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
},
|
||||
async abort(reason) {
|
||||
console.debug("Manually closing writer", reason);
|
||||
const close = await closePromise;
|
||||
close();
|
||||
},
|
||||
});
|
||||
|
||||
return [readable, writable];
|
||||
}
|
||||
|
||||
@@ -1,12 +1,7 @@
|
||||
import { Signature } from "./crypto/crypto.js";
|
||||
import { CoValueHeader, Transaction } from "./coValueCore.js";
|
||||
import { CoValueCore } from "./coValueCore.js";
|
||||
import { LocalNode } from "./localNode.js";
|
||||
import {
|
||||
ReadableStream,
|
||||
WritableStream,
|
||||
WritableStreamDefaultWriter,
|
||||
} from "isomorphic-streams";
|
||||
import { LocalNode, newLoadingState } from "./localNode.js";
|
||||
import { RawCoID, SessionID } from "./ids.js";
|
||||
|
||||
export type CoValueKnownState = {
|
||||
@@ -60,10 +55,22 @@ export type DoneMessage = {
|
||||
|
||||
export type PeerID = string;
|
||||
|
||||
export type DisconnectedError = "Disconnected";
|
||||
|
||||
export type PingTimeoutError = "PingTimeout";
|
||||
|
||||
export type IncomingSyncStream = AsyncIterable<
|
||||
SyncMessage | DisconnectedError | PingTimeoutError
|
||||
>;
|
||||
export type OutgoingSyncQueue = {
|
||||
push: (msg: SyncMessage) => Promise<unknown>;
|
||||
close: () => void;
|
||||
};
|
||||
|
||||
export interface Peer {
|
||||
id: PeerID;
|
||||
incoming: ReadableStream<SyncMessage>;
|
||||
outgoing: WritableStream<SyncMessage>;
|
||||
incoming: IncomingSyncStream;
|
||||
outgoing: OutgoingSyncQueue;
|
||||
role: "peer" | "server" | "client";
|
||||
delayOnError?: number;
|
||||
priority?: number;
|
||||
@@ -73,8 +80,8 @@ export interface PeerState {
|
||||
id: PeerID;
|
||||
optimisticKnownStates: { [id: RawCoID]: CoValueKnownState };
|
||||
toldKnownState: Set<RawCoID>;
|
||||
incoming: ReadableStream<SyncMessage>;
|
||||
outgoing: WritableStreamDefaultWriter<SyncMessage>;
|
||||
incoming: IncomingSyncStream;
|
||||
outgoing: OutgoingSyncQueue;
|
||||
role: "peer" | "server" | "client";
|
||||
delayOnError?: number;
|
||||
priority?: number;
|
||||
@@ -127,25 +134,20 @@ export class SyncManager {
|
||||
});
|
||||
}
|
||||
|
||||
async loadFromPeers(id: RawCoID, excludePeer?: PeerID) {
|
||||
for (const peer of this.peersInPriorityOrder()) {
|
||||
if (peer.id === excludePeer) {
|
||||
continue;
|
||||
}
|
||||
if (peer.role !== "server") {
|
||||
continue;
|
||||
}
|
||||
async loadFromPeers(id: RawCoID, forPeer?: PeerID) {
|
||||
const eligiblePeers = this.peersInPriorityOrder().filter(
|
||||
(peer) => peer.id !== forPeer && peer.role === "server",
|
||||
);
|
||||
|
||||
for (const peer of eligiblePeers) {
|
||||
// console.log("loading", id, "from", peer.id);
|
||||
peer.outgoing
|
||||
.write({
|
||||
action: "load",
|
||||
id: id,
|
||||
header: false,
|
||||
sessions: {},
|
||||
})
|
||||
.catch((e) => {
|
||||
console.error("Error writing to peer", e);
|
||||
});
|
||||
await peer.outgoing.push({
|
||||
action: "load",
|
||||
id: id,
|
||||
header: false,
|
||||
sessions: {},
|
||||
});
|
||||
|
||||
const coValueEntry = this.local.coValues[id];
|
||||
if (coValueEntry?.state !== "loading") {
|
||||
continue;
|
||||
@@ -220,11 +222,13 @@ export class SyncManager {
|
||||
}
|
||||
|
||||
if (entry.state === "loading") {
|
||||
await this.trySendToPeer(peer, {
|
||||
this.trySendToPeer(peer, {
|
||||
action: "load",
|
||||
id,
|
||||
header: false,
|
||||
sessions: {},
|
||||
}).catch((e) => {
|
||||
console.error("Error sending load", e);
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -237,9 +241,11 @@ export class SyncManager {
|
||||
|
||||
if (!peer.toldKnownState.has(id)) {
|
||||
peer.toldKnownState.add(id);
|
||||
await this.trySendToPeer(peer, {
|
||||
this.trySendToPeer(peer, {
|
||||
action: "load",
|
||||
...coValue.knownState(),
|
||||
}).catch((e) => {
|
||||
console.error("Error sending load", e);
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -264,10 +270,12 @@ export class SyncManager {
|
||||
);
|
||||
|
||||
if (!peer.toldKnownState.has(id)) {
|
||||
await this.trySendToPeer(peer, {
|
||||
this.trySendToPeer(peer, {
|
||||
action: "known",
|
||||
asDependencyOf,
|
||||
...coValue.knownState(),
|
||||
}).catch((e) => {
|
||||
console.error("Error sending known state", e);
|
||||
});
|
||||
|
||||
peer.toldKnownState.add(id);
|
||||
@@ -297,10 +305,14 @@ export class SyncManager {
|
||||
let lastYield = performance.now();
|
||||
for (const [_i, piece] of newContentPieces.entries()) {
|
||||
// console.log(
|
||||
// `${id} -> ${peer.id}: Sending content piece ${i + 1}/${newContentPieces.length} header: ${!!piece.header}`,
|
||||
// `${id} -> ${peer.id}: Sending content piece ${i + 1}/${
|
||||
// newContentPieces.length
|
||||
// } header: ${!!piece.header}`,
|
||||
// // Object.values(piece.new).map((s) => s.newTransactions)
|
||||
// );
|
||||
await this.trySendToPeer(peer, piece);
|
||||
this.trySendToPeer(peer, piece).catch((e) => {
|
||||
console.error("Error sending content piece", e);
|
||||
});
|
||||
if (performance.now() - lastYield > 10) {
|
||||
await new Promise<void>((resolve) => {
|
||||
setTimeout(resolve, 0);
|
||||
@@ -328,7 +340,7 @@ export class SyncManager {
|
||||
id: peer.id,
|
||||
optimisticKnownStates: {},
|
||||
incoming: peer.incoming,
|
||||
outgoing: peer.outgoing.getWriter(),
|
||||
outgoing: peer.outgoing,
|
||||
toldKnownState: new Set(),
|
||||
role: peer.role,
|
||||
delayOnError: peer.delayOnError,
|
||||
@@ -354,91 +366,39 @@ export class SyncManager {
|
||||
void initialSync();
|
||||
}
|
||||
|
||||
const readIncoming = async () => {
|
||||
try {
|
||||
for await (const msg of peerState.incoming) {
|
||||
try {
|
||||
// await this.handleSyncMessage(msg, peerState);
|
||||
this.handleSyncMessage(msg, peerState).catch((e) => {
|
||||
console.error(
|
||||
new Date(),
|
||||
`Error reading from peer ${peer.id}, handling msg`,
|
||||
JSON.stringify(msg, (k, v) =>
|
||||
k === "changes" || k === "encryptedChanges"
|
||||
? v.slice(0, 20) + "..."
|
||||
: v,
|
||||
),
|
||||
e,
|
||||
);
|
||||
});
|
||||
// await new Promise<void>((resolve) => {
|
||||
// setTimeout(resolve, 0);
|
||||
// });
|
||||
} catch (e) {
|
||||
console.error(
|
||||
new Date(),
|
||||
`Error reading from peer ${peer.id}, handling msg`,
|
||||
JSON.stringify(msg, (k, v) =>
|
||||
k === "changes" || k === "encryptedChanges"
|
||||
? v.slice(0, 20) + "..."
|
||||
: v,
|
||||
),
|
||||
e,
|
||||
);
|
||||
if (peerState.delayOnError) {
|
||||
await new Promise<void>((resolve) => {
|
||||
setTimeout(resolve, peerState.delayOnError);
|
||||
});
|
||||
}
|
||||
}
|
||||
const processMessages = async () => {
|
||||
for await (const msg of peerState.incoming) {
|
||||
if (msg === "Disconnected") {
|
||||
return;
|
||||
}
|
||||
if (msg === "PingTimeout") {
|
||||
console.error("Ping timeout from peer", peer.id);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await this.handleSyncMessage(msg, peerState);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`Error reading from peer ${
|
||||
peer.id
|
||||
}, handling msg\n\n${JSON.stringify(msg, (k, v) =>
|
||||
k === "changes" || k === "encryptedChanges"
|
||||
? v.slice(0, 20) + "..."
|
||||
: v,
|
||||
)}`,
|
||||
{ cause: e },
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`Error reading from peer ${peer.id}`, e);
|
||||
}
|
||||
|
||||
console.log("Peer disconnected:", peer.id);
|
||||
delete this.peers[peer.id];
|
||||
};
|
||||
|
||||
void readIncoming();
|
||||
processMessages().catch((e) => {
|
||||
console.error("Error processing messages from peer", peer.id, e);
|
||||
});
|
||||
}
|
||||
|
||||
trySendToPeer(peer: PeerState, msg: SyncMessage) {
|
||||
if (!this.peers[peer.id]) {
|
||||
// already disconnected, return to drain potential queue
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
return new Promise<void>((resolve) => {
|
||||
const start = Date.now();
|
||||
peer.outgoing
|
||||
.write(msg)
|
||||
.then(() => {
|
||||
const end = Date.now();
|
||||
if (end - start > 1000) {
|
||||
// console.error(
|
||||
// new Error(
|
||||
// `Writing to peer "${peer.id}" took ${
|
||||
// Math.round((Date.now() - start) / 100) / 10
|
||||
// }s - this should never happen as write should resolve quickly or error`
|
||||
// )
|
||||
// );
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
})
|
||||
.catch((e) => {
|
||||
console.error(
|
||||
new Error(
|
||||
`Error writing to peer ${peer.id}, disconnecting`,
|
||||
{
|
||||
cause: e,
|
||||
},
|
||||
),
|
||||
);
|
||||
delete this.peers[peer.id];
|
||||
});
|
||||
});
|
||||
return peer.outgoing.push(msg);
|
||||
}
|
||||
|
||||
async handleLoad(msg: LoadMessage, peer: PeerState) {
|
||||
@@ -447,30 +407,61 @@ export class SyncManager {
|
||||
|
||||
if (!entry) {
|
||||
// console.log(`Loading ${msg.id} from all peers except ${peer.id}`);
|
||||
this.local
|
||||
.loadCoValueCore(msg.id, {
|
||||
dontLoadFrom: peer.id,
|
||||
dontWaitFor: peer.id,
|
||||
})
|
||||
.catch((e) => {
|
||||
console.error("Error loading coValue in handleLoad", e);
|
||||
});
|
||||
|
||||
// special case: we should be able to solve this much more neatly
|
||||
// with an explicit state machine in the future
|
||||
const eligiblePeers = this.peersInPriorityOrder().filter(
|
||||
(other) => other.id !== peer.id && other.role === "server",
|
||||
);
|
||||
if (eligiblePeers.length === 0) {
|
||||
if (msg.header || Object.keys(msg.sessions).length > 0) {
|
||||
this.local.coValues[msg.id] = newLoadingState(
|
||||
new Set([peer.id]),
|
||||
);
|
||||
this.trySendToPeer(peer, {
|
||||
action: "known",
|
||||
id: msg.id,
|
||||
header: false,
|
||||
sessions: {},
|
||||
}).catch((e) => {
|
||||
console.error("Error sending known state", e);
|
||||
});
|
||||
}
|
||||
return;
|
||||
} else {
|
||||
this.local
|
||||
.loadCoValueCore(msg.id, {
|
||||
dontLoadFrom: peer.id,
|
||||
dontWaitFor: peer.id,
|
||||
})
|
||||
.catch((e) => {
|
||||
console.error("Error loading coValue in handleLoad", e);
|
||||
});
|
||||
}
|
||||
|
||||
entry = this.local.coValues[msg.id]!;
|
||||
}
|
||||
|
||||
if (entry.state === "loading") {
|
||||
console.log(
|
||||
"Waiting for loaded",
|
||||
msg.id,
|
||||
"after message from",
|
||||
peer.id,
|
||||
);
|
||||
const loaded = await entry.done;
|
||||
|
||||
console.log("Loaded", msg.id, loaded);
|
||||
if (loaded === "unavailable") {
|
||||
peer.optimisticKnownStates[msg.id] = knownStateIn(msg);
|
||||
peer.toldKnownState.add(msg.id);
|
||||
|
||||
await this.trySendToPeer(peer, {
|
||||
this.trySendToPeer(peer, {
|
||||
action: "known",
|
||||
id: msg.id,
|
||||
header: false,
|
||||
sessions: {},
|
||||
}).catch((e) => {
|
||||
console.error("Error sending known state back", e);
|
||||
});
|
||||
|
||||
return;
|
||||
@@ -508,7 +499,7 @@ export class SyncManager {
|
||||
}
|
||||
} else {
|
||||
throw new Error(
|
||||
"Expected coValue entry to be created, missing subscribe?",
|
||||
`Expected coValue entry for ${msg.id} to be created on known state, missing subscribe?`,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -549,7 +540,7 @@ export class SyncManager {
|
||||
|
||||
if (!entry) {
|
||||
throw new Error(
|
||||
"Expected coValue entry to be created, missing subscribe?",
|
||||
`Expected coValue entry for ${msg.id} to be created on new content, missing subscribe?`,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -680,10 +671,12 @@ export class SyncManager {
|
||||
await this.syncCoValue(coValue);
|
||||
|
||||
if (invalidStateAssumed) {
|
||||
await this.trySendToPeer(peer, {
|
||||
this.trySendToPeer(peer, {
|
||||
action: "known",
|
||||
isCorrection: true,
|
||||
...coValue.knownState(),
|
||||
}).catch((e) => {
|
||||
console.error("Error sending known state correction", e);
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -751,6 +744,12 @@ export class SyncManager {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gracefulShutdown() {
|
||||
for (const peer of Object.values(this.peers)) {
|
||||
peer.outgoing.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function knownStateIn(msg: LoadMessage | KnownStateMessage) {
|
||||
|
||||
@@ -56,7 +56,9 @@ test("Can create account with one node, and then load it on another", async () =
|
||||
trace: true,
|
||||
peer1role: "server",
|
||||
peer2role: "client",
|
||||
});
|
||||
})
|
||||
|
||||
console.log("After connected peers");
|
||||
|
||||
node.syncManager.addPeer(node2asPeer);
|
||||
|
||||
|
||||
@@ -1,18 +1,15 @@
|
||||
import { expect, test } from "vitest";
|
||||
import { newRandomSessionID } from "../coValueCore.js";
|
||||
import { LocalNode } from "../localNode.js";
|
||||
import { SyncMessage } from "../sync.js";
|
||||
import { expectMap } from "../coValue.js";
|
||||
import { MapOpPayload } from "../coValues/coMap.js";
|
||||
import { RawGroup } from "../coValues/group.js";
|
||||
import {
|
||||
randomAnonymousAccountAndSessionID,
|
||||
shouldNotResolve,
|
||||
} from "./testUtils.js";
|
||||
import { connectedPeers, newStreamPair } from "../streamUtils.js";
|
||||
import { randomAnonymousAccountAndSessionID } from "./testUtils.js";
|
||||
import { connectedPeers, newQueuePair } from "../streamUtils.js";
|
||||
import { AccountID } from "../coValues/account.js";
|
||||
import { stableStringify } from "../jsonStringify.js";
|
||||
import { WasmCrypto } from "../crypto/WasmCrypto.js";
|
||||
import { expectMap } from "../coValue.js";
|
||||
import { newRandomSessionID } from "../coValueCore.js";
|
||||
|
||||
const Crypto = await WasmCrypto.create();
|
||||
|
||||
@@ -26,8 +23,9 @@ test("Node replies with initial tx and header to empty subscribe", async () => {
|
||||
|
||||
map.set("hello", "world", "trusting");
|
||||
|
||||
const [inRx, inTx] = newStreamPair<SyncMessage>();
|
||||
const [outRx, outTx] = newStreamPair<SyncMessage>();
|
||||
const [inRx, inTx] = newQueuePair();
|
||||
const [outRx, outTx] = newQueuePair();
|
||||
const outRxQ = outRx[Symbol.asyncIterator]();
|
||||
|
||||
node.syncManager.addPeer({
|
||||
id: "test",
|
||||
@@ -36,32 +34,28 @@ test("Node replies with initial tx and header to empty subscribe", async () => {
|
||||
role: "peer",
|
||||
});
|
||||
|
||||
const writer = inTx.getWriter();
|
||||
|
||||
await writer.write({
|
||||
await inTx.push({
|
||||
action: "load",
|
||||
id: map.core.id,
|
||||
header: false,
|
||||
sessions: {},
|
||||
});
|
||||
|
||||
const reader = outRx.getReader();
|
||||
// expect((await outRxQ.next()).value).toMatchObject(admStateEx(admin.id));
|
||||
expect((await outRxQ.next()).value).toMatchObject(groupStateEx(group));
|
||||
|
||||
// expect((await reader.read()).value).toMatchObject(admStateEx(admin.id));
|
||||
expect((await reader.read()).value).toMatchObject(groupStateEx(group));
|
||||
|
||||
const mapTellKnownStateMsg = await reader.read();
|
||||
expect(mapTellKnownStateMsg.value).toEqual({
|
||||
const mapTellKnownStateMsg = (await outRxQ.next()).value;
|
||||
expect(mapTellKnownStateMsg).toEqual({
|
||||
action: "known",
|
||||
...map.core.knownState(),
|
||||
} satisfies SyncMessage);
|
||||
|
||||
// expect((await reader.read()).value).toMatchObject(admContEx(admin.id));
|
||||
expect((await reader.read()).value).toMatchObject(groupContentEx(group));
|
||||
// expect((await outRxQ.next()).value).toMatchObject(admContEx(admin.id));
|
||||
expect((await outRxQ.next()).value).toMatchObject(groupContentEx(group));
|
||||
|
||||
const newContentMsg = await reader.read();
|
||||
const newContentMsg = (await outRxQ.next()).value;
|
||||
|
||||
expect(newContentMsg.value).toEqual({
|
||||
expect(newContentMsg).toEqual({
|
||||
action: "content",
|
||||
id: map.core.id,
|
||||
header: {
|
||||
@@ -106,8 +100,9 @@ test("Node replies with only new tx to subscribe with some known state", async (
|
||||
map.set("hello", "world", "trusting");
|
||||
map.set("goodbye", "world", "trusting");
|
||||
|
||||
const [inRx, inTx] = newStreamPair<SyncMessage>();
|
||||
const [outRx, outTx] = newStreamPair<SyncMessage>();
|
||||
const [inRx, inTx] = newQueuePair();
|
||||
const [outRx, outTx] = newQueuePair();
|
||||
const outRxQ = outRx[Symbol.asyncIterator]();
|
||||
|
||||
node.syncManager.addPeer({
|
||||
id: "test",
|
||||
@@ -116,9 +111,7 @@ test("Node replies with only new tx to subscribe with some known state", async (
|
||||
role: "peer",
|
||||
});
|
||||
|
||||
const writer = inTx.getWriter();
|
||||
|
||||
await writer.write({
|
||||
await inTx.push({
|
||||
action: "load",
|
||||
id: map.core.id,
|
||||
header: true,
|
||||
@@ -127,23 +120,21 @@ test("Node replies with only new tx to subscribe with some known state", async (
|
||||
},
|
||||
});
|
||||
|
||||
const reader = outRx.getReader();
|
||||
// expect((await outRxQ.next()).value).toMatchObject(admStateEx(admin.id));
|
||||
expect((await outRxQ.next()).value).toMatchObject(groupStateEx(group));
|
||||
|
||||
// expect((await reader.read()).value).toMatchObject(admStateEx(admin.id));
|
||||
expect((await reader.read()).value).toMatchObject(groupStateEx(group));
|
||||
|
||||
const mapTellKnownStateMsg = await reader.read();
|
||||
expect(mapTellKnownStateMsg.value).toEqual({
|
||||
const mapTellKnownStateMsg = (await outRxQ.next()).value;
|
||||
expect(mapTellKnownStateMsg).toEqual({
|
||||
action: "known",
|
||||
...map.core.knownState(),
|
||||
} satisfies SyncMessage);
|
||||
|
||||
// expect((await reader.read()).value).toMatchObject(admContEx(admin.id));
|
||||
expect((await reader.read()).value).toMatchObject(groupContentEx(group));
|
||||
// expect((await outRxQ.next()).value).toMatchObject(admContEx(admin.id));
|
||||
expect((await outRxQ.next()).value).toMatchObject(groupContentEx(group));
|
||||
|
||||
const mapNewContentMsg = await reader.read();
|
||||
const mapNewContentMsg = (await outRxQ.next()).value;
|
||||
|
||||
expect(mapNewContentMsg.value).toEqual({
|
||||
expect(mapNewContentMsg).toEqual({
|
||||
action: "content",
|
||||
id: map.core.id,
|
||||
header: undefined,
|
||||
@@ -170,7 +161,6 @@ test("Node replies with only new tx to subscribe with some known state", async (
|
||||
},
|
||||
} satisfies SyncMessage);
|
||||
});
|
||||
|
||||
test.todo(
|
||||
"TODO: node only replies with new tx to subscribe with some known state, even in the depended on coValues",
|
||||
);
|
||||
@@ -183,8 +173,9 @@ test("After subscribing, node sends own known state and new txs to peer", async
|
||||
|
||||
const map = group.createMap();
|
||||
|
||||
const [inRx, inTx] = newStreamPair<SyncMessage>();
|
||||
const [outRx, outTx] = newStreamPair<SyncMessage>();
|
||||
const [inRx, inTx] = newQueuePair();
|
||||
const [outRx, outTx] = newQueuePair();
|
||||
const outRxQ = outRx[Symbol.asyncIterator]();
|
||||
|
||||
node.syncManager.addPeer({
|
||||
id: "test",
|
||||
@@ -193,9 +184,7 @@ test("After subscribing, node sends own known state and new txs to peer", async
|
||||
role: "peer",
|
||||
});
|
||||
|
||||
const writer = inTx.getWriter();
|
||||
|
||||
await writer.write({
|
||||
await inTx.push({
|
||||
action: "load",
|
||||
id: map.core.id,
|
||||
header: false,
|
||||
@@ -204,23 +193,21 @@ test("After subscribing, node sends own known state and new txs to peer", async
|
||||
},
|
||||
});
|
||||
|
||||
const reader = outRx.getReader();
|
||||
// expect((await outRxQ.next()).value).toMatchObject(admStateEx(admin.id));
|
||||
expect((await outRxQ.next()).value).toMatchObject(groupStateEx(group));
|
||||
|
||||
// expect((await reader.read()).value).toMatchObject(admStateEx(admin.id));
|
||||
expect((await reader.read()).value).toMatchObject(groupStateEx(group));
|
||||
|
||||
const mapTellKnownStateMsg = await reader.read();
|
||||
expect(mapTellKnownStateMsg.value).toEqual({
|
||||
const mapTellKnownStateMsg = (await outRxQ.next()).value;
|
||||
expect(mapTellKnownStateMsg).toEqual({
|
||||
action: "known",
|
||||
...map.core.knownState(),
|
||||
} satisfies SyncMessage);
|
||||
|
||||
// expect((await reader.read()).value).toMatchObject(admContEx(admin.id));
|
||||
expect((await reader.read()).value).toMatchObject(groupContentEx(group));
|
||||
// expect((await outRxQ.next()).value).toMatchObject(admContEx(admin.id));
|
||||
expect((await outRxQ.next()).value).toMatchObject(groupContentEx(group));
|
||||
|
||||
const mapNewContentHeaderOnlyMsg = await reader.read();
|
||||
const mapNewContentHeaderOnlyMsg = (await outRxQ.next()).value;
|
||||
|
||||
expect(mapNewContentHeaderOnlyMsg.value).toEqual({
|
||||
expect(mapNewContentHeaderOnlyMsg).toEqual({
|
||||
action: "content",
|
||||
id: map.core.id,
|
||||
header: map.core.header,
|
||||
@@ -229,9 +216,9 @@ test("After subscribing, node sends own known state and new txs to peer", async
|
||||
|
||||
map.set("hello", "world", "trusting");
|
||||
|
||||
const mapEditMsg1 = await reader.read();
|
||||
const mapEditMsg1 = (await outRxQ.next()).value;
|
||||
|
||||
expect(mapEditMsg1.value).toEqual({
|
||||
expect(mapEditMsg1).toEqual({
|
||||
action: "content",
|
||||
id: map.core.id,
|
||||
new: {
|
||||
@@ -259,9 +246,9 @@ test("After subscribing, node sends own known state and new txs to peer", async
|
||||
|
||||
map.set("goodbye", "world", "trusting");
|
||||
|
||||
const mapEditMsg2 = await reader.read();
|
||||
const mapEditMsg2 = (await outRxQ.next()).value;
|
||||
|
||||
expect(mapEditMsg2.value).toEqual({
|
||||
expect(mapEditMsg2).toEqual({
|
||||
action: "content",
|
||||
id: map.core.id,
|
||||
new: {
|
||||
@@ -298,8 +285,9 @@ test("Client replies with known new content to tellKnownState from server", asyn
|
||||
|
||||
map.set("hello", "world", "trusting");
|
||||
|
||||
const [inRx, inTx] = newStreamPair<SyncMessage>();
|
||||
const [outRx, outTx] = newStreamPair<SyncMessage>();
|
||||
const [inRx, inTx] = newQueuePair();
|
||||
const [outRx, outTx] = newQueuePair();
|
||||
const outRxQ = outRx[Symbol.asyncIterator]();
|
||||
|
||||
node.syncManager.addPeer({
|
||||
id: "test",
|
||||
@@ -308,13 +296,9 @@ test("Client replies with known new content to tellKnownState from server", asyn
|
||||
role: "peer",
|
||||
});
|
||||
|
||||
const reader = outRx.getReader();
|
||||
// expect((await outRxQ.next()).value).toMatchObject(groupStateEx(group));
|
||||
|
||||
// expect((await reader.read()).value).toMatchObject(groupStateEx(group));
|
||||
|
||||
const writer = inTx.getWriter();
|
||||
|
||||
await writer.write({
|
||||
await inTx.push({
|
||||
action: "known",
|
||||
id: map.core.id,
|
||||
header: false,
|
||||
@@ -323,21 +307,21 @@ test("Client replies with known new content to tellKnownState from server", asyn
|
||||
},
|
||||
});
|
||||
|
||||
// expect((await reader.read()).value).toMatchObject(admStateEx(admin.id));
|
||||
expect((await reader.read()).value).toMatchObject(groupStateEx(group));
|
||||
// expect((await outRxQ.next()).value).toMatchObject(admStateEx(admin.id));
|
||||
expect((await outRxQ.next()).value).toMatchObject(groupStateEx(group));
|
||||
|
||||
const mapTellKnownStateMsg = await reader.read();
|
||||
expect(mapTellKnownStateMsg.value).toEqual({
|
||||
const mapTellKnownStateMsg = (await outRxQ.next()).value;
|
||||
expect(mapTellKnownStateMsg).toEqual({
|
||||
action: "known",
|
||||
...map.core.knownState(),
|
||||
} satisfies SyncMessage);
|
||||
|
||||
// expect((await reader.read()).value).toMatchObject(admContEx(admin.id));
|
||||
expect((await reader.read()).value).toMatchObject(groupContentEx(group));
|
||||
// expect((await outRxQ.next()).value).toMatchObject(admContEx(admin.id));
|
||||
expect((await outRxQ.next()).value).toMatchObject(groupContentEx(group));
|
||||
|
||||
const mapNewContentMsg = await reader.read();
|
||||
const mapNewContentMsg = (await outRxQ.next()).value;
|
||||
|
||||
expect(mapNewContentMsg.value).toEqual({
|
||||
expect(mapNewContentMsg).toEqual({
|
||||
action: "content",
|
||||
id: map.core.id,
|
||||
header: map.core.header,
|
||||
@@ -373,8 +357,9 @@ test("No matter the optimistic known state, node respects invalid known state me
|
||||
|
||||
const map = group.createMap();
|
||||
|
||||
const [inRx, inTx] = newStreamPair<SyncMessage>();
|
||||
const [outRx, outTx] = newStreamPair<SyncMessage>();
|
||||
const [inRx, inTx] = newQueuePair();
|
||||
const [outRx, outTx] = newQueuePair();
|
||||
const outRxQ = outRx[Symbol.asyncIterator]();
|
||||
|
||||
node.syncManager.addPeer({
|
||||
id: "test",
|
||||
@@ -383,9 +368,7 @@ test("No matter the optimistic known state, node respects invalid known state me
|
||||
role: "peer",
|
||||
});
|
||||
|
||||
const writer = inTx.getWriter();
|
||||
|
||||
await writer.write({
|
||||
await inTx.push({
|
||||
action: "load",
|
||||
id: map.core.id,
|
||||
header: false,
|
||||
@@ -394,23 +377,21 @@ test("No matter the optimistic known state, node respects invalid known state me
|
||||
},
|
||||
});
|
||||
|
||||
const reader = outRx.getReader();
|
||||
// expect((await outRxQ.next()).value).toMatchObject(admStateEx(admin.id));
|
||||
expect((await outRxQ.next()).value).toMatchObject(groupStateEx(group));
|
||||
|
||||
// expect((await reader.read()).value).toMatchObject(admStateEx(admin.id));
|
||||
expect((await reader.read()).value).toMatchObject(groupStateEx(group));
|
||||
|
||||
const mapTellKnownStateMsg = await reader.read();
|
||||
expect(mapTellKnownStateMsg.value).toEqual({
|
||||
const mapTellKnownStateMsg = (await outRxQ.next()).value;
|
||||
expect(mapTellKnownStateMsg).toEqual({
|
||||
action: "known",
|
||||
...map.core.knownState(),
|
||||
} satisfies SyncMessage);
|
||||
|
||||
// expect((await reader.read()).value).toMatchObject(admContEx(admin.id));
|
||||
expect((await reader.read()).value).toMatchObject(groupContentEx(group));
|
||||
// expect((await outRxQ.next()).value).toMatchObject(admContEx(admin.id));
|
||||
expect((await outRxQ.next()).value).toMatchObject(groupContentEx(group));
|
||||
|
||||
const mapNewContentHeaderOnlyMsg = await reader.read();
|
||||
const mapNewContentHeaderOnlyMsg = (await outRxQ.next()).value;
|
||||
|
||||
expect(mapNewContentHeaderOnlyMsg.value).toEqual({
|
||||
expect(mapNewContentHeaderOnlyMsg).toEqual({
|
||||
action: "content",
|
||||
id: map.core.id,
|
||||
header: map.core.header,
|
||||
@@ -421,11 +402,11 @@ test("No matter the optimistic known state, node respects invalid known state me
|
||||
|
||||
map.set("goodbye", "world", "trusting");
|
||||
|
||||
const _mapEditMsgs = await reader.read();
|
||||
const _mapEditMsgs = (await outRxQ.next()).value;
|
||||
|
||||
console.log("Sending correction");
|
||||
|
||||
await writer.write({
|
||||
await inTx.push({
|
||||
action: "known",
|
||||
isCorrection: true,
|
||||
id: map.core.id,
|
||||
@@ -435,9 +416,9 @@ test("No matter the optimistic known state, node respects invalid known state me
|
||||
},
|
||||
} satisfies SyncMessage);
|
||||
|
||||
const newContentAfterWrongAssumedState = await reader.read();
|
||||
const newContentAfterWrongAssumedState = (await outRxQ.next()).value;
|
||||
|
||||
expect(newContentAfterWrongAssumedState.value).toEqual({
|
||||
expect(newContentAfterWrongAssumedState).toEqual({
|
||||
action: "content",
|
||||
id: map.core.id,
|
||||
header: undefined,
|
||||
@@ -473,8 +454,9 @@ test("If we add a peer, but it never subscribes to a coValue, it won't get any m
|
||||
|
||||
const map = group.createMap();
|
||||
|
||||
const [inRx, _inTx] = newStreamPair<SyncMessage>();
|
||||
const [outRx, outTx] = newStreamPair<SyncMessage>();
|
||||
const [inRx, _inTx] = newQueuePair();
|
||||
const [outRx, outTx] = newQueuePair();
|
||||
const outRxQ = outRx[Symbol.asyncIterator]();
|
||||
|
||||
node.syncManager.addPeer({
|
||||
id: "test",
|
||||
@@ -485,11 +467,16 @@ test("If we add a peer, but it never subscribes to a coValue, it won't get any m
|
||||
|
||||
map.set("hello", "world", "trusting");
|
||||
|
||||
const reader = outRx.getReader();
|
||||
const timeoutPromise = new Promise((resolve) =>
|
||||
setTimeout(() => resolve("neverHappened"), 100),
|
||||
);
|
||||
|
||||
await expect(
|
||||
shouldNotResolve(reader.read(), { timeout: 100 }),
|
||||
).resolves.toBeUndefined();
|
||||
const result = await Promise.race([
|
||||
outRxQ.next().then((value) => value.value),
|
||||
timeoutPromise,
|
||||
]);
|
||||
|
||||
expect(result).toEqual("neverHappened");
|
||||
});
|
||||
|
||||
test.todo(
|
||||
@@ -502,8 +489,9 @@ test.todo(
|
||||
|
||||
const map = group.createMap();
|
||||
|
||||
const [inRx, _inTx] = newStreamPair<SyncMessage>();
|
||||
const [outRx, outTx] = newStreamPair<SyncMessage>();
|
||||
const [inRx, _inTx] = newQueuePair();
|
||||
const [outRx, outTx] = newQueuePair();
|
||||
const outRxQ = outRx[Symbol.asyncIterator]();
|
||||
|
||||
node.syncManager.addPeer({
|
||||
id: "test",
|
||||
@@ -512,19 +500,18 @@ test.todo(
|
||||
role: "server",
|
||||
});
|
||||
|
||||
const reader = outRx.getReader();
|
||||
// expect((await reader.read()).value).toMatchObject({
|
||||
// expect((await outRxQ.next()).value).toMatchObject({
|
||||
// action: "load",
|
||||
// id: adminID,
|
||||
// });
|
||||
expect((await reader.read()).value).toMatchObject({
|
||||
expect((await outRxQ.next()).value).toMatchObject({
|
||||
action: "load",
|
||||
id: group.core.id,
|
||||
});
|
||||
|
||||
const mapSubscribeMsg = await reader.read();
|
||||
const mapSubscribeMsg = (await outRxQ.next()).value;
|
||||
|
||||
expect(mapSubscribeMsg.value).toEqual({
|
||||
expect(mapSubscribeMsg).toEqual({
|
||||
action: "load",
|
||||
id: map.core.id,
|
||||
header: true,
|
||||
@@ -533,14 +520,14 @@ test.todo(
|
||||
|
||||
map.set("hello", "world", "trusting");
|
||||
|
||||
// expect((await reader.read()).value).toMatchObject(admContEx(admin.id));
|
||||
expect((await reader.read()).value).toMatchObject(
|
||||
// expect((await outRxQ.next()).value).toMatchObject(admContEx(admin.id));
|
||||
expect((await outRxQ.next()).value).toMatchObject(
|
||||
groupContentEx(group),
|
||||
);
|
||||
|
||||
const mapNewContentMsg = await reader.read();
|
||||
const mapNewContentMsg = (await outRxQ.next()).value;
|
||||
|
||||
expect(mapNewContentMsg.value).toEqual({
|
||||
expect(mapNewContentMsg).toEqual({
|
||||
action: "content",
|
||||
id: map.core.id,
|
||||
header: map.core.header,
|
||||
@@ -577,8 +564,9 @@ test.skip("If we add a server peer, newly created coValues are auto-subscribed t
|
||||
|
||||
const group = node.createGroup();
|
||||
|
||||
const [inRx, _inTx] = newStreamPair<SyncMessage>();
|
||||
const [outRx, outTx] = newStreamPair<SyncMessage>();
|
||||
const [inRx, _inTx] = newQueuePair();
|
||||
const [outRx, outTx] = newQueuePair();
|
||||
const outRxQ = outRx[Symbol.asyncIterator]();
|
||||
|
||||
node.syncManager.addPeer({
|
||||
id: "test",
|
||||
@@ -587,31 +575,30 @@ test.skip("If we add a server peer, newly created coValues are auto-subscribed t
|
||||
role: "server",
|
||||
});
|
||||
|
||||
const reader = outRx.getReader();
|
||||
// expect((await reader.read()).value).toMatchObject({
|
||||
// expect((await outRxQ.next()).value).toMatchObject({
|
||||
// action: "load",
|
||||
// id: admin.id,
|
||||
// });
|
||||
expect((await reader.read()).value).toMatchObject({
|
||||
expect((await outRxQ.next()).value).toMatchObject({
|
||||
action: "load",
|
||||
id: group.core.id,
|
||||
});
|
||||
|
||||
const map = group.createMap();
|
||||
|
||||
const mapSubscribeMsg = await reader.read();
|
||||
const mapSubscribeMsg = (await outRxQ.next()).value;
|
||||
|
||||
expect(mapSubscribeMsg.value).toEqual({
|
||||
expect(mapSubscribeMsg).toEqual({
|
||||
action: "load",
|
||||
...map.core.knownState(),
|
||||
} satisfies SyncMessage);
|
||||
|
||||
// expect((await reader.read()).value).toMatchObject(admContEx(adminID));
|
||||
expect((await reader.read()).value).toMatchObject(groupContentEx(group));
|
||||
// expect((await outRxQ.next()).value).toMatchObject(admContEx(adminID));
|
||||
expect((await outRxQ.next()).value).toMatchObject(groupContentEx(group));
|
||||
|
||||
const mapContentMsg = await reader.read();
|
||||
const mapContentMsg = (await outRxQ.next()).value;
|
||||
|
||||
expect(mapContentMsg.value).toEqual({
|
||||
expect(mapContentMsg).toEqual({
|
||||
action: "content",
|
||||
id: map.core.id,
|
||||
header: map.core.header,
|
||||
@@ -631,8 +618,9 @@ test("When we connect a new server peer, we try to sync all existing coValues to
|
||||
|
||||
const map = group.createMap();
|
||||
|
||||
const [inRx, _inTx] = newStreamPair<SyncMessage>();
|
||||
const [outRx, outTx] = newStreamPair<SyncMessage>();
|
||||
const [inRx, _inTx] = newQueuePair();
|
||||
const [outRx, outTx] = newQueuePair();
|
||||
const outRxQ = outRx[Symbol.asyncIterator]();
|
||||
|
||||
node.syncManager.addPeer({
|
||||
id: "test",
|
||||
@@ -641,19 +629,17 @@ test("When we connect a new server peer, we try to sync all existing coValues to
|
||||
role: "server",
|
||||
});
|
||||
|
||||
const reader = outRx.getReader();
|
||||
// const _adminSubscribeMessage = await outRxQ.next();
|
||||
const groupSubscribeMessage = (await outRxQ.next()).value;
|
||||
|
||||
// const _adminSubscribeMessage = await reader.read();
|
||||
const groupSubscribeMessage = await reader.read();
|
||||
|
||||
expect(groupSubscribeMessage.value).toEqual({
|
||||
expect(groupSubscribeMessage).toEqual({
|
||||
action: "load",
|
||||
...group.core.knownState(),
|
||||
} satisfies SyncMessage);
|
||||
|
||||
const secondMessage = await reader.read();
|
||||
const secondMessage = (await outRxQ.next()).value;
|
||||
|
||||
expect(secondMessage.value).toEqual({
|
||||
expect(secondMessage).toEqual({
|
||||
action: "load",
|
||||
...map.core.knownState(),
|
||||
} satisfies SyncMessage);
|
||||
@@ -667,8 +653,9 @@ test("When receiving a subscribe with a known state that is ahead of our own, pe
|
||||
|
||||
const map = group.createMap();
|
||||
|
||||
const [inRx, inTx] = newStreamPair<SyncMessage>();
|
||||
const [outRx, outTx] = newStreamPair<SyncMessage>();
|
||||
const [inRx, inTx] = newQueuePair();
|
||||
const [outRx, outTx] = newQueuePair();
|
||||
const outRxQ = outRx[Symbol.asyncIterator]();
|
||||
|
||||
node.syncManager.addPeer({
|
||||
id: "test",
|
||||
@@ -677,9 +664,7 @@ test("When receiving a subscribe with a known state that is ahead of our own, pe
|
||||
role: "peer",
|
||||
});
|
||||
|
||||
const writer = inTx.getWriter();
|
||||
|
||||
await writer.write({
|
||||
await inTx.push({
|
||||
action: "load",
|
||||
id: map.core.id,
|
||||
header: true,
|
||||
@@ -688,13 +673,11 @@ test("When receiving a subscribe with a known state that is ahead of our own, pe
|
||||
},
|
||||
});
|
||||
|
||||
const reader = outRx.getReader();
|
||||
// expect((await outRxQ.next()).value).toMatchObject(admStateEx(admin.id));
|
||||
expect((await outRxQ.next()).value).toMatchObject(groupStateEx(group));
|
||||
const mapTellKnownState = (await outRxQ.next()).value;
|
||||
|
||||
// expect((await reader.read()).value).toMatchObject(admStateEx(admin.id));
|
||||
expect((await reader.read()).value).toMatchObject(groupStateEx(group));
|
||||
const mapTellKnownState = await reader.read();
|
||||
|
||||
expect(mapTellKnownState.value).toEqual({
|
||||
expect(mapTellKnownState).toEqual({
|
||||
action: "known",
|
||||
...map.core.knownState(),
|
||||
} satisfies SyncMessage);
|
||||
@@ -708,8 +691,9 @@ test.skip("When replaying creation and transactions of a coValue as new content,
|
||||
|
||||
const group = node1.createGroup();
|
||||
|
||||
const [inRx1, inTx1] = newStreamPair<SyncMessage>();
|
||||
const [outRx1, outTx1] = newStreamPair<SyncMessage>();
|
||||
const [inRx1, inTx1] = newQueuePair();
|
||||
const [outRx1, outTx1] = newQueuePair();
|
||||
const outRxQ1 = outRx1[Symbol.asyncIterator]();
|
||||
|
||||
node1.syncManager.addPeer({
|
||||
id: "test2",
|
||||
@@ -718,13 +702,11 @@ test.skip("When replaying creation and transactions of a coValue as new content,
|
||||
role: "server",
|
||||
});
|
||||
|
||||
const to1 = inTx1.getWriter();
|
||||
const from1 = outRx1.getReader();
|
||||
|
||||
const node2 = new LocalNode(admin, newRandomSessionID(admin.id), Crypto);
|
||||
|
||||
const [inRx2, inTx2] = newStreamPair<SyncMessage>();
|
||||
const [outRx2, outTx2] = newStreamPair<SyncMessage>();
|
||||
const [inRx2, inTx2] = newQueuePair();
|
||||
const [outRx2, outTx2] = newQueuePair();
|
||||
const outRxQ2 = outRx2[Symbol.asyncIterator]();
|
||||
|
||||
node2.syncManager.addPeer({
|
||||
id: "test1",
|
||||
@@ -733,65 +715,62 @@ test.skip("When replaying creation and transactions of a coValue as new content,
|
||||
role: "client",
|
||||
});
|
||||
|
||||
const to2 = inTx2.getWriter();
|
||||
const from2 = outRx2.getReader();
|
||||
|
||||
const adminSubscribeMessage = await from1.read();
|
||||
expect(adminSubscribeMessage.value).toMatchObject({
|
||||
const adminSubscribeMessage = (await outRxQ1.next()).value;
|
||||
expect(adminSubscribeMessage).toMatchObject({
|
||||
action: "load",
|
||||
id: admin.id,
|
||||
});
|
||||
const groupSubscribeMsg = await from1.read();
|
||||
expect(groupSubscribeMsg.value).toMatchObject({
|
||||
const groupSubscribeMsg = (await outRxQ1.next()).value;
|
||||
expect(groupSubscribeMsg).toMatchObject({
|
||||
action: "load",
|
||||
id: group.core.id,
|
||||
});
|
||||
|
||||
await to2.write(adminSubscribeMessage.value!);
|
||||
await to2.write(groupSubscribeMsg.value!);
|
||||
await inTx2.push(adminSubscribeMessage);
|
||||
await inTx2.push(groupSubscribeMsg);
|
||||
|
||||
// const adminTellKnownStateMsg = await from2.read();
|
||||
// expect(adminTellKnownStateMsg.value).toMatchObject(admStateEx(admin.id));
|
||||
// const adminTellKnownStateMsg = (await outRxQ2.next()).value;
|
||||
// expect(adminTellKnownStateMsg).toMatchObject(admStateEx(admin.id));
|
||||
|
||||
const groupTellKnownStateMsg = await from2.read();
|
||||
expect(groupTellKnownStateMsg.value).toMatchObject(groupStateEx(group));
|
||||
const groupTellKnownStateMsg = (await outRxQ2.next()).value;
|
||||
expect(groupTellKnownStateMsg).toMatchObject(groupStateEx(group));
|
||||
|
||||
expect(
|
||||
node2.syncManager.peers["test1"]!.optimisticKnownStates[group.core.id],
|
||||
).toBeDefined();
|
||||
|
||||
// await to1.write(adminTellKnownStateMsg.value!);
|
||||
await to1.write(groupTellKnownStateMsg.value!);
|
||||
// await inTx1.push(adminTellKnownStateMsg);
|
||||
await inTx1.push(groupTellKnownStateMsg);
|
||||
|
||||
// const adminContentMsg = await from1.read();
|
||||
// expect(adminContentMsg.value).toMatchObject(admContEx(admin.id));
|
||||
// const adminContentMsg = (await outRxQ1.next()).value;
|
||||
// expect(adminContentMsg).toMatchObject(admContEx(admin.id));
|
||||
|
||||
const groupContentMsg = await from1.read();
|
||||
expect(groupContentMsg.value).toMatchObject(groupContentEx(group));
|
||||
const groupContentMsg = (await outRxQ1.next()).value;
|
||||
expect(groupContentMsg).toMatchObject(groupContentEx(group));
|
||||
|
||||
// await to2.write(adminContentMsg.value!);
|
||||
await to2.write(groupContentMsg.value!);
|
||||
// await inTx2.push(adminContentMsg);
|
||||
await inTx2.push(groupContentMsg);
|
||||
|
||||
const map = group.createMap();
|
||||
|
||||
const mapSubscriptionMsg = await from1.read();
|
||||
expect(mapSubscriptionMsg.value).toMatchObject({
|
||||
const mapSubscriptionMsg = (await outRxQ1.next()).value;
|
||||
expect(mapSubscriptionMsg).toMatchObject({
|
||||
action: "load",
|
||||
id: map.core.id,
|
||||
});
|
||||
|
||||
const mapNewContentMsg = await from1.read();
|
||||
expect(mapNewContentMsg.value).toEqual({
|
||||
const mapNewContentMsg = (await outRxQ1.next()).value;
|
||||
expect(mapNewContentMsg).toEqual({
|
||||
action: "content",
|
||||
id: map.core.id,
|
||||
header: map.core.header,
|
||||
new: {},
|
||||
} satisfies SyncMessage);
|
||||
|
||||
await to2.write(mapSubscriptionMsg.value!);
|
||||
await inTx2.push(mapSubscriptionMsg);
|
||||
|
||||
const mapTellKnownStateMsg = await from2.read();
|
||||
expect(mapTellKnownStateMsg.value).toEqual({
|
||||
const mapTellKnownStateMsg = (await outRxQ2.next()).value;
|
||||
expect(mapTellKnownStateMsg).toEqual({
|
||||
action: "known",
|
||||
id: map.core.id,
|
||||
header: false,
|
||||
@@ -800,13 +779,13 @@ test.skip("When replaying creation and transactions of a coValue as new content,
|
||||
|
||||
expect(node2.coValues[map.core.id]?.state).toEqual("loading");
|
||||
|
||||
await to2.write(mapNewContentMsg.value!);
|
||||
await inTx2.push(mapNewContentMsg);
|
||||
|
||||
map.set("hello", "world", "trusting");
|
||||
|
||||
const mapEditMsg = await from1.read();
|
||||
const mapEditMsg = (await outRxQ1.next()).value;
|
||||
|
||||
await to2.write(mapEditMsg.value!);
|
||||
await inTx2.push(mapEditMsg);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
@@ -818,6 +797,7 @@ test.skip("When replaying creation and transactions of a coValue as new content,
|
||||
});
|
||||
|
||||
test.skip("When loading a coValue on one node, the server node it is requested from replies with all the necessary depended on coValues to make it work", async () => {
|
||||
/*
|
||||
// TODO: this test is mostly correct but also slightly unrealistic, make sure we pass all messages back and forth as expected and then it should work
|
||||
const [admin, session] = randomAnonymousAccountAndSessionID();
|
||||
|
||||
@@ -842,6 +822,7 @@ test.skip("When loading a coValue on one node, the server node it is requested f
|
||||
node2.expectCoValueLoaded(map.core.id).getCurrentContent(),
|
||||
).get("hello"),
|
||||
).toEqual("world");
|
||||
*/
|
||||
});
|
||||
|
||||
test("Can sync a coValue through a server to another client", async () => {
|
||||
@@ -858,24 +839,32 @@ test("Can sync a coValue through a server to another client", async () => {
|
||||
|
||||
const server = new LocalNode(serverUser, serverSession, Crypto);
|
||||
|
||||
const [serverAsPeer, client1AsPeer] = connectedPeers("server", "client1", {
|
||||
peer1role: "server",
|
||||
peer2role: "client",
|
||||
trace: true,
|
||||
});
|
||||
const [serverAsPeerForClient1, client1AsPeer] = await connectedPeers(
|
||||
"serverFor1",
|
||||
"client1",
|
||||
{
|
||||
peer1role: "server",
|
||||
peer2role: "client",
|
||||
trace: true,
|
||||
},
|
||||
);
|
||||
|
||||
client1.syncManager.addPeer(serverAsPeer);
|
||||
client1.syncManager.addPeer(serverAsPeerForClient1);
|
||||
server.syncManager.addPeer(client1AsPeer);
|
||||
|
||||
const client2 = new LocalNode(admin, newRandomSessionID(admin.id), Crypto);
|
||||
|
||||
const [serverAsOtherPeer, client2AsPeer] = connectedPeers(
|
||||
"server",
|
||||
const [serverAsPeerForClient2, client2AsPeer] = connectedPeers(
|
||||
"serverFor2",
|
||||
"client2",
|
||||
{ peer1role: "server", peer2role: "client", trace: true },
|
||||
{
|
||||
peer1role: "server",
|
||||
peer2role: "client",
|
||||
trace: true,
|
||||
},
|
||||
);
|
||||
|
||||
client2.syncManager.addPeer(serverAsOtherPeer);
|
||||
client2.syncManager.addPeer(serverAsPeerForClient2);
|
||||
server.syncManager.addPeer(client2AsPeer);
|
||||
|
||||
const mapOnClient2 = await client2.loadCoValueCore(map.core.id);
|
||||
@@ -902,21 +891,29 @@ test("Can sync a coValue with private transactions through a server to another c
|
||||
|
||||
const server = new LocalNode(serverUser, serverSession, Crypto);
|
||||
|
||||
const [serverAsPeer, client1AsPeer] = connectedPeers("server", "client1", {
|
||||
trace: true,
|
||||
peer1role: "server",
|
||||
peer2role: "client",
|
||||
});
|
||||
const [serverAsPeer, client1AsPeer] = await connectedPeers(
|
||||
"server",
|
||||
"client1",
|
||||
{
|
||||
trace: true,
|
||||
peer1role: "server",
|
||||
peer2role: "client",
|
||||
},
|
||||
);
|
||||
|
||||
client1.syncManager.addPeer(serverAsPeer);
|
||||
server.syncManager.addPeer(client1AsPeer);
|
||||
|
||||
const client2 = new LocalNode(admin, newRandomSessionID(admin.id), Crypto);
|
||||
|
||||
const [serverAsOtherPeer, client2AsPeer] = connectedPeers(
|
||||
const [serverAsOtherPeer, client2AsPeer] = await connectedPeers(
|
||||
"server",
|
||||
"client2",
|
||||
{ trace: true, peer1role: "server", peer2role: "client" },
|
||||
{
|
||||
trace: true,
|
||||
peer1role: "server",
|
||||
peer2role: "client",
|
||||
},
|
||||
);
|
||||
|
||||
client2.syncManager.addPeer(serverAsOtherPeer);
|
||||
@@ -933,13 +930,14 @@ test("Can sync a coValue with private transactions through a server to another c
|
||||
});
|
||||
|
||||
test.skip("When a peer's incoming/readable stream closes, we remove the peer", async () => {
|
||||
/*
|
||||
const [admin, session] = randomAnonymousAccountAndSessionID();
|
||||
const node = new LocalNode(admin, session, Crypto);
|
||||
|
||||
const group = node.createGroup();
|
||||
|
||||
const [inRx, inTx] = newStreamPair<SyncMessage>();
|
||||
const [outRx, outTx] = newStreamPair<SyncMessage>();
|
||||
const [inRx, inTx] = await Effect.runPromise(newStreamPair());
|
||||
const [outRx, outTx] = await Effect.runPromise(newStreamPair());
|
||||
|
||||
node.syncManager.addPeer({
|
||||
id: "test",
|
||||
@@ -948,12 +946,11 @@ test.skip("When a peer's incoming/readable stream closes, we remove the peer", a
|
||||
role: "server",
|
||||
});
|
||||
|
||||
const reader = outRx.getReader();
|
||||
// expect((await reader.read()).value).toMatchObject({
|
||||
// expect(yield* Queue.take(outRxQ)).toMatchObject({
|
||||
// action: "load",
|
||||
// id: admin.id,
|
||||
// });
|
||||
expect((await reader.read()).value).toMatchObject({
|
||||
expect(yield * Queue.take(outRxQ)).toMatchObject({
|
||||
action: "load",
|
||||
id: group.core.id,
|
||||
});
|
||||
@@ -967,8 +964,8 @@ test.skip("When a peer's incoming/readable stream closes, we remove the peer", a
|
||||
...map.core.knownState(),
|
||||
} satisfies SyncMessage);
|
||||
|
||||
// expect((await reader.read()).value).toMatchObject(admContEx(admin.id));
|
||||
expect((await reader.read()).value).toMatchObject(groupContentEx(group));
|
||||
// expect(yield* Queue.take(outRxQ)).toMatchObject(admContEx(admin.id));
|
||||
expect(yield * Queue.take(outRxQ)).toMatchObject(groupContentEx(group));
|
||||
|
||||
const mapContentMsg = await reader.read();
|
||||
|
||||
@@ -984,16 +981,18 @@ test.skip("When a peer's incoming/readable stream closes, we remove the peer", a
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(node.syncManager.peers["test"]).toBeUndefined();
|
||||
*/
|
||||
});
|
||||
|
||||
test.skip("When a peer's outgoing/writable stream closes, we remove the peer", async () => {
|
||||
/*
|
||||
const [admin, session] = randomAnonymousAccountAndSessionID();
|
||||
const node = new LocalNode(admin, session, Crypto);
|
||||
|
||||
const group = node.createGroup();
|
||||
|
||||
const [inRx] = newStreamPair<SyncMessage>();
|
||||
const [outRx, outTx] = newStreamPair<SyncMessage>();
|
||||
const [inRx] = await Effect.runPromise(newStreamPair());
|
||||
const [outRx, outTx] = await Effect.runPromise(newStreamPair());
|
||||
|
||||
node.syncManager.addPeer({
|
||||
id: "test",
|
||||
@@ -1002,12 +1001,11 @@ test.skip("When a peer's outgoing/writable stream closes, we remove the peer", a
|
||||
role: "server",
|
||||
});
|
||||
|
||||
const reader = outRx.getReader();
|
||||
// expect((await reader.read()).value).toMatchObject({
|
||||
// expect(yield* Queue.take(outRxQ)).toMatchObject({
|
||||
// action: "load",
|
||||
// id: admin.id,
|
||||
// });
|
||||
expect((await reader.read()).value).toMatchObject({
|
||||
expect(yield * Queue.take(outRxQ)).toMatchObject({
|
||||
action: "load",
|
||||
id: group.core.id,
|
||||
});
|
||||
@@ -1021,8 +1019,8 @@ test.skip("When a peer's outgoing/writable stream closes, we remove the peer", a
|
||||
...map.core.knownState(),
|
||||
} satisfies SyncMessage);
|
||||
|
||||
// expect((await reader.read()).value).toMatchObject(admContEx(admin.id));
|
||||
expect((await reader.read()).value).toMatchObject(groupContentEx(group));
|
||||
// expect(yield* Queue.take(outRxQ)).toMatchObject(admContEx(admin.id));
|
||||
expect(yield * Queue.take(outRxQ)).toMatchObject(groupContentEx(group));
|
||||
|
||||
const mapContentMsg = await reader.read();
|
||||
|
||||
@@ -1041,6 +1039,7 @@ test.skip("When a peer's outgoing/writable stream closes, we remove the peer", a
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(node.syncManager.peers["test"]).toBeUndefined();
|
||||
*/
|
||||
});
|
||||
|
||||
test("If we start loading a coValue before connecting to a peer that has it, it will load it once we connect", async () => {
|
||||
@@ -1055,7 +1054,7 @@ test("If we start loading a coValue before connecting to a peer that has it, it
|
||||
|
||||
const node2 = new LocalNode(admin, newRandomSessionID(admin.id), Crypto);
|
||||
|
||||
const [node1asPeer, node2asPeer] = connectedPeers("peer1", "peer2", {
|
||||
const [node1asPeer, node2asPeer] = await connectedPeers("peer1", "peer2", {
|
||||
peer1role: "server",
|
||||
peer2role: "client",
|
||||
trace: true,
|
||||
|
||||
@@ -1,5 +1,140 @@
|
||||
# jazz-browser-media-images
|
||||
|
||||
## 0.7.28
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-browser@0.7.28
|
||||
- jazz-tools@0.7.28
|
||||
|
||||
## 0.7.27
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-browser@0.7.27
|
||||
|
||||
## 0.7.26
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-browser@0.7.26
|
||||
- jazz-tools@0.7.26
|
||||
|
||||
## 0.7.25
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.25
|
||||
- jazz-browser@0.7.25
|
||||
|
||||
## 0.7.24
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.24
|
||||
- jazz-browser@0.7.24
|
||||
|
||||
## 0.7.23
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.23
|
||||
- jazz-browser@0.7.23
|
||||
|
||||
## 0.7.22
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-browser@0.7.22
|
||||
|
||||
## 0.7.21
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.21
|
||||
- jazz-browser@0.7.21
|
||||
|
||||
## 0.7.20
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.20
|
||||
- jazz-browser@0.7.20
|
||||
|
||||
## 0.7.19
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.19
|
||||
- jazz-browser@0.7.19
|
||||
|
||||
## 0.7.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-browser@0.7.18
|
||||
- jazz-tools@0.7.18
|
||||
|
||||
## 0.7.17
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-browser@0.7.17
|
||||
- jazz-tools@0.7.17
|
||||
|
||||
## 0.7.16
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.16
|
||||
- jazz-browser@0.7.16
|
||||
|
||||
## 0.7.14
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.14
|
||||
- jazz-browser@0.7.14
|
||||
|
||||
## 0.7.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.13
|
||||
- jazz-browser@0.7.13
|
||||
|
||||
## 0.7.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.12
|
||||
- jazz-browser@0.7.12
|
||||
|
||||
## 0.7.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-browser@0.7.11
|
||||
- jazz-tools@0.7.11
|
||||
|
||||
## 0.7.10
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-browser@0.7.10
|
||||
- jazz-tools@0.7.10
|
||||
|
||||
## 0.7.9
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "jazz-browser-media-images",
|
||||
"version": "0.7.9",
|
||||
"version": "0.7.28",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "src/index.ts",
|
||||
|
||||
@@ -1,5 +1,154 @@
|
||||
# jazz-browser
|
||||
|
||||
## 0.7.28
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.28
|
||||
- cojson-storage-indexeddb@0.7.28
|
||||
- cojson-transport-ws@0.7.28
|
||||
- jazz-tools@0.7.28
|
||||
|
||||
## 0.7.27
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson-transport-ws@0.7.27
|
||||
|
||||
## 0.7.26
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Remove Effect from jazz/cojson internals
|
||||
- Updated dependencies
|
||||
- cojson@0.7.26
|
||||
- cojson-storage-indexeddb@0.7.26
|
||||
- cojson-transport-ws@0.7.26
|
||||
- jazz-tools@0.7.26
|
||||
|
||||
## 0.7.25
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.25
|
||||
|
||||
## 0.7.24
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.24
|
||||
|
||||
## 0.7.23
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.23
|
||||
- jazz-tools@0.7.23
|
||||
- cojson-storage-indexeddb@0.7.23
|
||||
- cojson-transport-ws@0.7.23
|
||||
|
||||
## 0.7.22
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson-transport-ws@0.7.22
|
||||
|
||||
## 0.7.21
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.21
|
||||
|
||||
## 0.7.20
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.20
|
||||
|
||||
## 0.7.19
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.19
|
||||
|
||||
## 0.7.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.18
|
||||
- cojson-storage-indexeddb@0.7.18
|
||||
- cojson-transport-ws@0.7.18
|
||||
- jazz-tools@0.7.18
|
||||
|
||||
## 0.7.17
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.17
|
||||
- cojson-storage-indexeddb@0.7.17
|
||||
- cojson-transport-ws@0.7.17
|
||||
- jazz-tools@0.7.17
|
||||
|
||||
## 0.7.16
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.16
|
||||
|
||||
## 0.7.14
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.14
|
||||
- jazz-tools@0.7.14
|
||||
- cojson-storage-indexeddb@0.7.14
|
||||
- cojson-transport-ws@0.7.14
|
||||
|
||||
## 0.7.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.13
|
||||
|
||||
## 0.7.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.12
|
||||
|
||||
## 0.7.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.11
|
||||
- cojson-storage-indexeddb@0.7.11
|
||||
- jazz-tools@0.7.11
|
||||
|
||||
## 0.7.10
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.10
|
||||
- cojson-storage-indexeddb@0.7.10
|
||||
- jazz-tools@0.7.10
|
||||
|
||||
## 0.7.9
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "jazz-browser",
|
||||
"version": "0.7.9",
|
||||
"version": "0.7.28",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "src/index.ts",
|
||||
@@ -9,8 +9,7 @@
|
||||
"@scure/bip39": "^1.3.0",
|
||||
"cojson": "workspace:*",
|
||||
"cojson-storage-indexeddb": "workspace:*",
|
||||
"effect": "^3.1.5",
|
||||
"isomorphic-streams": "https://github.com/sgwilym/isomorphic-streams.git#aa9394781bfc92f8d7c981be7daf8af4b4cd4fae",
|
||||
"cojson-transport-ws": "workspace:*",
|
||||
"jazz-tools": "workspace:*",
|
||||
"typescript": "^5.1.6"
|
||||
},
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
import {
|
||||
BlockFilename,
|
||||
FSErr,
|
||||
FileSystem,
|
||||
WalFilename,
|
||||
CryptoProvider,
|
||||
} from "cojson";
|
||||
import { Effect } from "effect";
|
||||
import { BlockFilename, FileSystem, WalFilename, CryptoProvider } from "cojson";
|
||||
|
||||
export class OPFSFilesystem implements FileSystem<number, number> {
|
||||
export class OPFSFilesystem
|
||||
implements
|
||||
FileSystem<
|
||||
{ id: number; filename: string },
|
||||
{ id: number; filename: string }
|
||||
>
|
||||
{
|
||||
opfsWorker: Worker;
|
||||
callbacks: Map<number, (event: MessageEvent) => void> = new Map();
|
||||
nextRequestId = 0;
|
||||
@@ -28,18 +27,18 @@ export class OPFSFilesystem implements FileSystem<number, number> {
|
||||
};
|
||||
}
|
||||
|
||||
listFiles(): Effect.Effect<string[], FSErr, never> {
|
||||
return Effect.async((cb) => {
|
||||
listFiles(): Promise<string[]> {
|
||||
return new Promise((resolve) => {
|
||||
const requestId = this.nextRequestId++;
|
||||
performance.mark("listFiles" + requestId);
|
||||
performance.mark("listFiles" + requestId + "_listFiles");
|
||||
this.callbacks.set(requestId, (event) => {
|
||||
performance.mark("listFilesEnd" + requestId);
|
||||
performance.mark("listFilesEnd" + requestId + "_listFiles");
|
||||
performance.measure(
|
||||
"listFiles" + requestId,
|
||||
"listFiles" + requestId,
|
||||
"listFilesEnd" + requestId,
|
||||
"listFiles" + requestId + "_listFiles",
|
||||
"listFiles" + requestId + "_listFiles",
|
||||
"listFilesEnd" + requestId + "_listFiles",
|
||||
);
|
||||
cb(Effect.succeed(event.data.fileNames));
|
||||
resolve(event.data.fileNames);
|
||||
});
|
||||
this.opfsWorker.postMessage({ type: "listFiles", requestId });
|
||||
});
|
||||
@@ -47,22 +46,20 @@ export class OPFSFilesystem implements FileSystem<number, number> {
|
||||
|
||||
openToRead(
|
||||
filename: string,
|
||||
): Effect.Effect<{ handle: number; size: number }, FSErr, never> {
|
||||
return Effect.async((cb) => {
|
||||
): Promise<{ handle: { id: number; filename: string }; size: number }> {
|
||||
return new Promise((resolve) => {
|
||||
const requestId = this.nextRequestId++;
|
||||
performance.mark("openToRead" + requestId);
|
||||
performance.mark("openToRead" + "_" + filename);
|
||||
this.callbacks.set(requestId, (event) => {
|
||||
cb(
|
||||
Effect.succeed({
|
||||
handle: event.data.handle,
|
||||
size: event.data.size,
|
||||
}),
|
||||
);
|
||||
performance.mark("openToReadEnd" + requestId);
|
||||
resolve({
|
||||
handle: { id: event.data.handle, filename },
|
||||
size: event.data.size,
|
||||
});
|
||||
performance.mark("openToReadEnd" + "_" + filename);
|
||||
performance.measure(
|
||||
"openToRead" + requestId,
|
||||
"openToRead" + requestId,
|
||||
"openToReadEnd" + requestId,
|
||||
"openToRead" + "_" + filename,
|
||||
"openToRead" + "_" + filename,
|
||||
"openToReadEnd" + "_" + filename,
|
||||
);
|
||||
});
|
||||
this.opfsWorker.postMessage({
|
||||
@@ -73,18 +70,18 @@ export class OPFSFilesystem implements FileSystem<number, number> {
|
||||
});
|
||||
}
|
||||
|
||||
createFile(filename: string): Effect.Effect<number, FSErr, never> {
|
||||
return Effect.async((cb) => {
|
||||
createFile(filename: string): Promise<{ id: number; filename: string }> {
|
||||
return new Promise((resolve) => {
|
||||
const requestId = this.nextRequestId++;
|
||||
performance.mark("createFile" + requestId);
|
||||
performance.mark("createFile" + "_" + filename);
|
||||
this.callbacks.set(requestId, (event) => {
|
||||
performance.mark("createFileEnd" + requestId);
|
||||
performance.mark("createFileEnd" + "_" + filename);
|
||||
performance.measure(
|
||||
"createFile" + requestId,
|
||||
"createFile" + requestId,
|
||||
"createFileEnd" + requestId,
|
||||
"createFile" + "_" + filename,
|
||||
"createFile" + "_" + filename,
|
||||
"createFileEnd" + "_" + filename,
|
||||
);
|
||||
cb(Effect.succeed(event.data.handle));
|
||||
resolve({ id: event.data.handle, filename });
|
||||
});
|
||||
this.opfsWorker.postMessage({
|
||||
type: "createFile",
|
||||
@@ -94,20 +91,18 @@ export class OPFSFilesystem implements FileSystem<number, number> {
|
||||
});
|
||||
}
|
||||
|
||||
openToWrite(
|
||||
filename: string,
|
||||
): Effect.Effect<FileSystemFileHandle, FSErr, never> {
|
||||
return Effect.async((cb) => {
|
||||
openToWrite(filename: string): Promise<{ id: number; filename: string }> {
|
||||
return new Promise((resolve) => {
|
||||
const requestId = this.nextRequestId++;
|
||||
performance.mark("openToWrite" + requestId);
|
||||
performance.mark("openToWrite" + "_" + filename);
|
||||
this.callbacks.set(requestId, (event) => {
|
||||
performance.mark("openToWriteEnd" + requestId);
|
||||
performance.mark("openToWriteEnd" + "_" + filename);
|
||||
performance.measure(
|
||||
"openToWrite" + requestId,
|
||||
"openToWrite" + requestId,
|
||||
"openToWriteEnd" + requestId,
|
||||
"openToWrite" + "_" + filename,
|
||||
"openToWrite" + "_" + filename,
|
||||
"openToWriteEnd" + "_" + filename,
|
||||
);
|
||||
cb(Effect.succeed(event.data.handle));
|
||||
resolve({ id: event.data.handle, filename });
|
||||
});
|
||||
this.opfsWorker.postMessage({
|
||||
type: "openToWrite",
|
||||
@@ -118,24 +113,24 @@ export class OPFSFilesystem implements FileSystem<number, number> {
|
||||
}
|
||||
|
||||
append(
|
||||
handle: number,
|
||||
handle: { id: number; filename: string },
|
||||
data: Uint8Array,
|
||||
): Effect.Effect<void, FSErr, never> {
|
||||
return Effect.async((cb) => {
|
||||
): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
const requestId = this.nextRequestId++;
|
||||
performance.mark("append" + requestId);
|
||||
performance.mark("append" + "_" + handle.filename);
|
||||
this.callbacks.set(requestId, (_) => {
|
||||
performance.mark("appendEnd" + requestId);
|
||||
performance.mark("appendEnd" + "_" + handle.filename);
|
||||
performance.measure(
|
||||
"append" + requestId,
|
||||
"append" + requestId,
|
||||
"appendEnd" + requestId,
|
||||
"append" + "_" + handle.filename,
|
||||
"append" + "_" + handle.filename,
|
||||
"appendEnd" + "_" + handle.filename,
|
||||
);
|
||||
cb(Effect.succeed(undefined));
|
||||
resolve(undefined);
|
||||
});
|
||||
this.opfsWorker.postMessage({
|
||||
type: "append",
|
||||
handle,
|
||||
handle: handle.id,
|
||||
data,
|
||||
requestId,
|
||||
});
|
||||
@@ -143,25 +138,25 @@ export class OPFSFilesystem implements FileSystem<number, number> {
|
||||
}
|
||||
|
||||
read(
|
||||
handle: number,
|
||||
handle: { id: number; filename: string },
|
||||
offset: number,
|
||||
length: number,
|
||||
): Effect.Effect<Uint8Array, FSErr, never> {
|
||||
return Effect.async((cb) => {
|
||||
): Promise<Uint8Array> {
|
||||
return new Promise((resolve) => {
|
||||
const requestId = this.nextRequestId++;
|
||||
performance.mark("read" + requestId);
|
||||
performance.mark("read" + "_" + handle.filename);
|
||||
this.callbacks.set(requestId, (event) => {
|
||||
performance.mark("readEnd" + requestId);
|
||||
performance.mark("readEnd" + "_" + handle.filename);
|
||||
performance.measure(
|
||||
"read" + requestId,
|
||||
"read" + requestId,
|
||||
"readEnd" + requestId,
|
||||
"read" + "_" + handle.filename,
|
||||
"read" + "_" + handle.filename,
|
||||
"readEnd" + "_" + handle.filename,
|
||||
);
|
||||
cb(Effect.succeed(event.data.data));
|
||||
resolve(event.data.data);
|
||||
});
|
||||
this.opfsWorker.postMessage({
|
||||
type: "read",
|
||||
handle,
|
||||
handle: handle.id,
|
||||
offset,
|
||||
length,
|
||||
requestId,
|
||||
@@ -169,66 +164,64 @@ export class OPFSFilesystem implements FileSystem<number, number> {
|
||||
});
|
||||
}
|
||||
|
||||
close(handle: number): Effect.Effect<void, FSErr, never> {
|
||||
return Effect.async((cb) => {
|
||||
close(handle: { id: number; filename: string }): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
const requestId = this.nextRequestId++;
|
||||
performance.mark("close" + requestId);
|
||||
performance.mark("close" + "_" + handle.filename);
|
||||
this.callbacks.set(requestId, (_) => {
|
||||
performance.mark("closeEnd" + requestId);
|
||||
performance.mark("closeEnd" + "_" + handle.filename);
|
||||
performance.measure(
|
||||
"close" + requestId,
|
||||
"close" + requestId,
|
||||
"closeEnd" + requestId,
|
||||
"close" + "_" + handle.filename,
|
||||
"close" + "_" + handle.filename,
|
||||
"closeEnd" + "_" + handle.filename,
|
||||
);
|
||||
cb(Effect.succeed(undefined));
|
||||
resolve(undefined);
|
||||
});
|
||||
this.opfsWorker.postMessage({
|
||||
type: "close",
|
||||
handle,
|
||||
handle: handle.id,
|
||||
requestId,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
closeAndRename(
|
||||
handle: number,
|
||||
handle: { id: number; filename: string },
|
||||
filename: BlockFilename,
|
||||
): Effect.Effect<void, FSErr, never> {
|
||||
return Effect.async((cb) => {
|
||||
): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
const requestId = this.nextRequestId++;
|
||||
performance.mark("closeAndRename" + requestId);
|
||||
performance.mark("closeAndRename" + "_" + handle.filename);
|
||||
this.callbacks.set(requestId, () => {
|
||||
performance.mark("closeAndRenameEnd" + requestId);
|
||||
performance.mark("closeAndRenameEnd" + "_" + handle.filename);
|
||||
performance.measure(
|
||||
"closeAndRename" + requestId,
|
||||
"closeAndRename" + requestId,
|
||||
"closeAndRenameEnd" + requestId,
|
||||
"closeAndRename" + "_" + handle.filename,
|
||||
"closeAndRename" + "_" + handle.filename,
|
||||
"closeAndRenameEnd" + "_" + handle.filename,
|
||||
);
|
||||
cb(Effect.succeed(undefined));
|
||||
resolve(undefined);
|
||||
});
|
||||
this.opfsWorker.postMessage({
|
||||
type: "closeAndRename",
|
||||
handle,
|
||||
handle: handle.id,
|
||||
filename,
|
||||
requestId,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
removeFile(
|
||||
filename: BlockFilename | WalFilename,
|
||||
): Effect.Effect<void, FSErr, never> {
|
||||
return Effect.async((cb) => {
|
||||
removeFile(filename: BlockFilename | WalFilename): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
const requestId = this.nextRequestId++;
|
||||
performance.mark("removeFile" + requestId);
|
||||
performance.mark("removeFile" + "_" + filename);
|
||||
this.callbacks.set(requestId, () => {
|
||||
performance.mark("removeFileEnd" + requestId);
|
||||
performance.mark("removeFileEnd" + "_" + filename);
|
||||
performance.measure(
|
||||
"removeFile" + requestId,
|
||||
"removeFile" + requestId,
|
||||
"removeFileEnd" + requestId,
|
||||
"removeFile" + "_" + filename,
|
||||
"removeFile" + "_" + filename,
|
||||
"removeFileEnd" + "_" + filename,
|
||||
);
|
||||
cb(Effect.succeed(undefined));
|
||||
resolve(undefined);
|
||||
});
|
||||
this.opfsWorker.postMessage({
|
||||
type: "removeFile",
|
||||
@@ -309,7 +302,7 @@ const opfsWorkerJSSrc = `
|
||||
postMessage({requestId: event.data.requestId, data: buffer, result: "done"});
|
||||
} else if (event.data.type === "close") {
|
||||
const handle = handlesByRequest.get(event.data.handle);
|
||||
console.log("Closing handle", filenamesForHandles.get(handle), event.data.handle, handle);
|
||||
// console.log("Closing handle", filenamesForHandles.get(handle), event.data.handle, handle);
|
||||
handle.flush();
|
||||
handle.close();
|
||||
handlesByRequest.delete(handle);
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
import { ReadableStream, WritableStream } from "isomorphic-streams";
|
||||
import {
|
||||
CoValue,
|
||||
ID,
|
||||
Peer,
|
||||
AgentID,
|
||||
SessionID,
|
||||
SyncMessage,
|
||||
cojsonInternals,
|
||||
InviteSecret,
|
||||
Account,
|
||||
@@ -17,6 +14,7 @@ import { AccountID, LSMStorage } from "cojson";
|
||||
import { AuthProvider } from "./auth/auth.js";
|
||||
import { OPFSFilesystem } from "./OPFSFilesystem.js";
|
||||
import { IDBStorage } from "cojson-storage-indexeddb";
|
||||
import { createWebSocketPeer } from "cojson-transport-ws";
|
||||
export * from "./auth/auth.js";
|
||||
|
||||
/** @category Context Creation */
|
||||
@@ -29,7 +27,7 @@ export type BrowserContext<Acc extends Account> = {
|
||||
/** @category Context Creation */
|
||||
export async function createJazzBrowserContext<Acc extends Account>({
|
||||
auth,
|
||||
peer,
|
||||
peer: peerAddr,
|
||||
reconnectionTimeout: initialReconnectionTimeout = 500,
|
||||
storage = "indexedDB",
|
||||
crypto: customCrypto,
|
||||
@@ -37,13 +35,17 @@ export async function createJazzBrowserContext<Acc extends Account>({
|
||||
auth: AuthProvider<Acc>;
|
||||
peer: `wss://${string}` | `ws://${string}`;
|
||||
reconnectionTimeout?: number;
|
||||
storage?: "indexedDB" | "experimentalOPFSdoNotUseOrYouWillBeFired";
|
||||
storage?: "indexedDB" | "singleTabOPFS";
|
||||
crypto?: CryptoProvider;
|
||||
}): Promise<BrowserContext<Acc>> {
|
||||
const crypto = customCrypto || (await WasmCrypto.create());
|
||||
let sessionDone: () => void;
|
||||
|
||||
const firstWsPeer = createWebSocketPeer(peer);
|
||||
const firstWsPeer = createWebSocketPeer({
|
||||
websocket: new WebSocket(peerAddr),
|
||||
id: peerAddr + "@" + new Date().toISOString(),
|
||||
role: "server",
|
||||
});
|
||||
let shouldTryToReconnect = true;
|
||||
|
||||
let currentReconnectionTimeout = initialReconnectionTimeout;
|
||||
@@ -77,7 +79,7 @@ export async function createJazzBrowserContext<Acc extends Account>({
|
||||
while (shouldTryToReconnect) {
|
||||
if (
|
||||
Object.keys(me._raw.core.node.syncManager.peers).some(
|
||||
(peerId) => peerId.includes(peer),
|
||||
(peerId) => peerId.includes(peerAddr),
|
||||
)
|
||||
) {
|
||||
// TODO: this might drain battery, use listeners instead
|
||||
@@ -107,7 +109,11 @@ export async function createJazzBrowserContext<Acc extends Account>({
|
||||
});
|
||||
|
||||
me._raw.core.node.syncManager.addPeer(
|
||||
createWebSocketPeer(peer),
|
||||
createWebSocketPeer({
|
||||
websocket: new WebSocket(peerAddr),
|
||||
id: peerAddr + "@" + new Date().toISOString(),
|
||||
role: "server",
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -121,13 +127,7 @@ export async function createJazzBrowserContext<Acc extends Account>({
|
||||
shouldTryToReconnect = false;
|
||||
window.removeEventListener("online", onOnline);
|
||||
console.log("Cleaning up node");
|
||||
for (const peer of Object.values(
|
||||
me._raw.core.node.syncManager.peers,
|
||||
)) {
|
||||
peer.outgoing
|
||||
.close()
|
||||
.catch((e) => console.error("Error while closing peer", e));
|
||||
}
|
||||
me._raw.core.node.gracefulShutdown();
|
||||
sessionDone?.();
|
||||
},
|
||||
};
|
||||
@@ -207,140 +207,6 @@ export function getSessionHandleFor(
|
||||
};
|
||||
}
|
||||
|
||||
function websocketReadableStream<T>(ws: WebSocket) {
|
||||
ws.binaryType = "arraybuffer";
|
||||
|
||||
return new ReadableStream<T>({
|
||||
start(controller) {
|
||||
let pingTimeout: ReturnType<typeof setTimeout> | undefined;
|
||||
|
||||
ws.onmessage = (event) => {
|
||||
const msg = JSON.parse(event.data);
|
||||
|
||||
if (pingTimeout) {
|
||||
clearTimeout(pingTimeout);
|
||||
}
|
||||
|
||||
pingTimeout = setTimeout(() => {
|
||||
console.debug("Ping timeout");
|
||||
try {
|
||||
controller.close();
|
||||
ws.close();
|
||||
} catch (e) {
|
||||
console.error(
|
||||
"Error while trying to close ws on ping timeout",
|
||||
e,
|
||||
);
|
||||
}
|
||||
}, 2500);
|
||||
|
||||
if (msg.type === "ping") {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(window as any).jazzPings = (window as any).jazzPings || [];
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(window as any).jazzPings.push({
|
||||
received: Date.now(),
|
||||
sent: msg.time,
|
||||
dc: msg.dc,
|
||||
});
|
||||
return;
|
||||
}
|
||||
controller.enqueue(msg);
|
||||
};
|
||||
const closeListener = () => {
|
||||
controller.close();
|
||||
clearTimeout(pingTimeout);
|
||||
};
|
||||
ws.addEventListener("close", closeListener);
|
||||
ws.addEventListener("error", () => {
|
||||
controller.error(new Error("The WebSocket errored!"));
|
||||
ws.removeEventListener("close", closeListener);
|
||||
});
|
||||
},
|
||||
|
||||
cancel() {
|
||||
ws.close();
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export function createWebSocketPeer(syncAddress: string): Peer {
|
||||
const ws = new WebSocket(syncAddress);
|
||||
|
||||
const incoming = websocketReadableStream<SyncMessage>(ws);
|
||||
const outgoing = websocketWritableStream<SyncMessage>(ws);
|
||||
|
||||
return {
|
||||
id: syncAddress + "@" + new Date().toISOString(),
|
||||
incoming,
|
||||
outgoing,
|
||||
role: "server",
|
||||
};
|
||||
}
|
||||
|
||||
function websocketWritableStream<T>(ws: WebSocket) {
|
||||
const initialQueue = [] as T[];
|
||||
let isOpen = false;
|
||||
|
||||
return new WritableStream<T>({
|
||||
start(controller) {
|
||||
ws.addEventListener("error", (event) => {
|
||||
controller.error(
|
||||
new Error("The WebSocket errored!" + JSON.stringify(event)),
|
||||
);
|
||||
});
|
||||
ws.addEventListener("close", () => {
|
||||
controller.error(
|
||||
new Error("The server closed the connection unexpectedly!"),
|
||||
);
|
||||
});
|
||||
ws.addEventListener("open", () => {
|
||||
for (const item of initialQueue) {
|
||||
ws.send(JSON.stringify(item));
|
||||
}
|
||||
isOpen = true;
|
||||
});
|
||||
},
|
||||
|
||||
async write(chunk) {
|
||||
if (isOpen) {
|
||||
ws.send(JSON.stringify(chunk));
|
||||
// Return immediately, since the web socket gives us no easy way to tell
|
||||
// when the write completes.
|
||||
} else {
|
||||
initialQueue.push(chunk);
|
||||
}
|
||||
},
|
||||
|
||||
close() {
|
||||
return closeWS(1000);
|
||||
},
|
||||
|
||||
abort(reason) {
|
||||
return closeWS(4000, reason && reason.message);
|
||||
},
|
||||
});
|
||||
|
||||
function closeWS(code: number, reasonString?: string) {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
ws.addEventListener(
|
||||
"close",
|
||||
(e) => {
|
||||
if (e.wasClean) {
|
||||
resolve();
|
||||
} else {
|
||||
reject(
|
||||
new Error("The connection was not closed cleanly"),
|
||||
);
|
||||
}
|
||||
},
|
||||
{ once: true },
|
||||
);
|
||||
ws.close(code, reasonString);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/** @category Invite Links */
|
||||
export function createInviteLink<C extends CoValue>(
|
||||
value: C,
|
||||
|
||||
@@ -1,5 +1,148 @@
|
||||
# jazz-autosub
|
||||
|
||||
## 0.7.28
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.28
|
||||
- cojson-transport-ws@0.7.28
|
||||
- jazz-tools@0.7.28
|
||||
|
||||
## 0.7.27
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson-transport-ws@0.7.27
|
||||
|
||||
## 0.7.26
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Remove Effect from jazz/cojson internals
|
||||
- Updated dependencies
|
||||
- cojson@0.7.26
|
||||
- cojson-transport-ws@0.7.26
|
||||
- jazz-tools@0.7.26
|
||||
|
||||
## 0.7.25
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.25
|
||||
|
||||
## 0.7.24
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.24
|
||||
|
||||
## 0.7.23
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.23
|
||||
- jazz-tools@0.7.23
|
||||
- cojson-transport-ws@0.7.23
|
||||
|
||||
## 0.7.22
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson-transport-ws@0.7.22
|
||||
|
||||
## 0.7.21
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.21
|
||||
|
||||
## 0.7.20
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.20
|
||||
|
||||
## 0.7.19
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.19
|
||||
|
||||
## 0.7.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.18
|
||||
- cojson-transport-ws@0.7.18
|
||||
- jazz-tools@0.7.18
|
||||
|
||||
## 0.7.17
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.17
|
||||
- cojson-transport-ws@0.7.17
|
||||
- jazz-tools@0.7.17
|
||||
|
||||
## 0.7.16
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.16
|
||||
|
||||
## 0.7.14
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.14
|
||||
- jazz-tools@0.7.14
|
||||
- cojson-transport-ws@0.7.14
|
||||
|
||||
## 0.7.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.13
|
||||
|
||||
## 0.7.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.12
|
||||
|
||||
## 0.7.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.11
|
||||
- cojson-transport-nodejs-ws@0.7.11
|
||||
- jazz-tools@0.7.11
|
||||
|
||||
## 0.7.10
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.10
|
||||
- cojson-transport-nodejs-ws@0.7.10
|
||||
- jazz-tools@0.7.10
|
||||
|
||||
## 0.7.9
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -5,10 +5,10 @@
|
||||
"types": "src/index.ts",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"version": "0.7.9",
|
||||
"version": "0.7.28",
|
||||
"dependencies": {
|
||||
"cojson": "workspace:*",
|
||||
"cojson-transport-nodejs-ws": "workspace:*",
|
||||
"cojson-transport-ws": "workspace:*",
|
||||
"jazz-tools": "workspace:*",
|
||||
"ws": "^8.14.2"
|
||||
},
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
import {
|
||||
websocketReadableStream,
|
||||
websocketWritableStream,
|
||||
} from "cojson-transport-nodejs-ws";
|
||||
import { WebSocket } from "ws";
|
||||
|
||||
import { AgentSecret, Peer, SessionID, WasmCrypto } from "cojson";
|
||||
import { createWebSocketPeer } from "cojson-transport-ws";
|
||||
import { Account, CoValueClass, ID } from "jazz-tools";
|
||||
import { WebSocket } from "ws";
|
||||
|
||||
/** @category Context Creation */
|
||||
export async function startWorker<Acc extends Account>({
|
||||
@@ -21,14 +17,11 @@ export async function startWorker<Acc extends Account>({
|
||||
syncServer?: string;
|
||||
accountSchema?: CoValueClass<Acc> & typeof Account;
|
||||
}): Promise<{ worker: Acc }> {
|
||||
const ws = new WebSocket(peer);
|
||||
|
||||
const wsPeer: Peer = {
|
||||
const wsPeer: Peer = createWebSocketPeer({
|
||||
id: "upstream",
|
||||
websocket: new WebSocket(peer),
|
||||
role: "server",
|
||||
incoming: websocketReadableStream(ws),
|
||||
outgoing: websocketWritableStream(ws),
|
||||
};
|
||||
});
|
||||
|
||||
if (!accountID) {
|
||||
throw new Error("No accountID provided");
|
||||
@@ -52,17 +45,15 @@ export async function startWorker<Acc extends Account>({
|
||||
crypto: await WasmCrypto.create(),
|
||||
});
|
||||
|
||||
setInterval(() => {
|
||||
setInterval(async () => {
|
||||
if (!worker._raw.core.node.syncManager.peers["upstream"]) {
|
||||
console.log(new Date(), "Reconnecting to upstream " + peer);
|
||||
const ws = new WebSocket(peer);
|
||||
|
||||
const wsPeer: Peer = {
|
||||
const wsPeer: Peer = createWebSocketPeer({
|
||||
id: "upstream",
|
||||
websocket: new WebSocket(peer),
|
||||
role: "server",
|
||||
incoming: websocketReadableStream(ws),
|
||||
outgoing: websocketWritableStream(ws),
|
||||
};
|
||||
});
|
||||
|
||||
worker._raw.core.node.syncManager.addPeer(wsPeer);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,161 @@
|
||||
# jazz-react
|
||||
|
||||
## 0.7.28
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.28
|
||||
- jazz-browser@0.7.28
|
||||
- jazz-tools@0.7.28
|
||||
|
||||
## 0.7.27
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-browser@0.7.27
|
||||
|
||||
## 0.7.26
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Remove Effect from jazz/cojson internals
|
||||
- Updated dependencies
|
||||
- cojson@0.7.26
|
||||
- jazz-browser@0.7.26
|
||||
- jazz-tools@0.7.26
|
||||
|
||||
## 0.7.25
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.25
|
||||
- jazz-browser@0.7.25
|
||||
|
||||
## 0.7.24
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.24
|
||||
- jazz-browser@0.7.24
|
||||
|
||||
## 0.7.23
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Mostly complete OPFS implementation (single-tab only)
|
||||
- Updated dependencies
|
||||
- cojson@0.7.23
|
||||
- jazz-tools@0.7.23
|
||||
- jazz-browser@0.7.23
|
||||
|
||||
## 0.7.22
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- jazz-browser@0.7.22
|
||||
|
||||
## 0.7.21
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.21
|
||||
- jazz-browser@0.7.21
|
||||
|
||||
## 0.7.20
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.20
|
||||
- jazz-browser@0.7.20
|
||||
|
||||
## 0.7.19
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.19
|
||||
- jazz-browser@0.7.19
|
||||
|
||||
## 0.7.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.18
|
||||
- jazz-browser@0.7.18
|
||||
- jazz-tools@0.7.18
|
||||
|
||||
## 0.7.17
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.17
|
||||
- jazz-browser@0.7.17
|
||||
- jazz-tools@0.7.17
|
||||
|
||||
## 0.7.16
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.16
|
||||
- jazz-browser@0.7.16
|
||||
|
||||
## 0.7.15
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Provide current res in ProgressiveImg
|
||||
|
||||
## 0.7.14
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.14
|
||||
- jazz-tools@0.7.14
|
||||
- jazz-browser@0.7.14
|
||||
|
||||
## 0.7.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.13
|
||||
- jazz-browser@0.7.13
|
||||
|
||||
## 0.7.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.12
|
||||
- jazz-browser@0.7.12
|
||||
|
||||
## 0.7.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.11
|
||||
- jazz-browser@0.7.11
|
||||
- jazz-tools@0.7.11
|
||||
|
||||
## 0.7.10
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.10
|
||||
- jazz-browser@0.7.10
|
||||
- jazz-tools@0.7.10
|
||||
|
||||
## 0.7.9
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "jazz-react",
|
||||
"version": "0.7.9",
|
||||
"version": "0.7.28",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "src/index.ts",
|
||||
|
||||
@@ -104,7 +104,10 @@ const DemoAuthBasicUI = ({
|
||||
signUp: (username: string) => void;
|
||||
}) => {
|
||||
const [username, setUsername] = useState<string>("");
|
||||
const darkMode = window.matchMedia("(prefers-color-scheme: dark)").matches;
|
||||
const darkMode =
|
||||
typeof window !== "undefined"
|
||||
? window.matchMedia("(prefers-color-scheme: dark)").matches
|
||||
: false;
|
||||
|
||||
return (
|
||||
<div
|
||||
|
||||
@@ -23,7 +23,7 @@ export function createJazzReactContext<Acc extends Account>({
|
||||
}: {
|
||||
auth: ReactAuthHook<Acc>;
|
||||
peer: `wss://${string}` | `ws://${string}`;
|
||||
storage?: "indexedDB" | "experimentalOPFSdoNotUseOrYouWillBeFired";
|
||||
storage?: "indexedDB" | "singleTabOPFS";
|
||||
}): JazzReactContext<Acc> {
|
||||
const JazzContext = React.createContext<
|
||||
| {
|
||||
|
||||
@@ -9,7 +9,10 @@ export function useProgressiveImg({
|
||||
image: ImageDefinition | null | undefined;
|
||||
maxWidth?: number;
|
||||
}) {
|
||||
const [src, setSrc] = useState<string | undefined>(undefined);
|
||||
const [current, setCurrent] = useState<
|
||||
| { src?: string; res?: `${number}x${number}` | "placeholder" }
|
||||
| undefined
|
||||
>(undefined);
|
||||
|
||||
useEffect(() => {
|
||||
let lastHighestRes: string | undefined;
|
||||
@@ -22,21 +25,28 @@ export function useProgressiveImg({
|
||||
const blob = highestRes.stream.toBlob();
|
||||
if (blob) {
|
||||
const blobURI = URL.createObjectURL(blob);
|
||||
setSrc(blobURI);
|
||||
setCurrent({ src: blobURI, res: highestRes.res });
|
||||
return () => {
|
||||
setTimeout(() => URL.revokeObjectURL(blobURI), 200);
|
||||
};
|
||||
}
|
||||
}
|
||||
} else {
|
||||
setSrc(update?.placeholderDataURL);
|
||||
setCurrent({
|
||||
src: update?.placeholderDataURL,
|
||||
res: "placeholder",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return unsub;
|
||||
}, [image?.id, maxWidth]);
|
||||
|
||||
return { src, originalSize: image?.originalSize };
|
||||
return {
|
||||
src: current?.src,
|
||||
res: current?.res,
|
||||
originalSize: image?.originalSize,
|
||||
};
|
||||
}
|
||||
|
||||
/** @category Media */
|
||||
@@ -47,6 +57,7 @@ export function ProgressiveImg({
|
||||
}: {
|
||||
children: (result: {
|
||||
src: string | undefined;
|
||||
res: `${number}x${number}` | "placeholder" | undefined;
|
||||
originalSize: readonly [number, number] | undefined;
|
||||
}) => React.ReactNode;
|
||||
image: ImageDefinition | null | undefined;
|
||||
|
||||
@@ -1,5 +1,147 @@
|
||||
# jazz-autosub
|
||||
|
||||
## 0.7.28
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.28
|
||||
- cojson-transport-ws@0.7.28
|
||||
- jazz-tools@0.7.28
|
||||
|
||||
## 0.7.27
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson-transport-ws@0.7.27
|
||||
|
||||
## 0.7.26
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.26
|
||||
- cojson-transport-ws@0.7.26
|
||||
- jazz-tools@0.7.26
|
||||
|
||||
## 0.7.25
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.25
|
||||
|
||||
## 0.7.24
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.24
|
||||
|
||||
## 0.7.23
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.23
|
||||
- jazz-tools@0.7.23
|
||||
- cojson-transport-ws@0.7.23
|
||||
|
||||
## 0.7.22
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson-transport-ws@0.7.22
|
||||
|
||||
## 0.7.21
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.21
|
||||
|
||||
## 0.7.20
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.20
|
||||
|
||||
## 0.7.19
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.19
|
||||
|
||||
## 0.7.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.18
|
||||
- cojson-transport-ws@0.7.18
|
||||
- jazz-tools@0.7.18
|
||||
|
||||
## 0.7.17
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.17
|
||||
- cojson-transport-ws@0.7.17
|
||||
- jazz-tools@0.7.17
|
||||
|
||||
## 0.7.16
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.16
|
||||
|
||||
## 0.7.14
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.14
|
||||
- jazz-tools@0.7.14
|
||||
- cojson-transport-ws@0.7.14
|
||||
|
||||
## 0.7.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.13
|
||||
|
||||
## 0.7.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- jazz-tools@0.7.12
|
||||
|
||||
## 0.7.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.11
|
||||
- cojson-transport-nodejs-ws@0.7.11
|
||||
- jazz-tools@0.7.11
|
||||
|
||||
## 0.7.10
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies
|
||||
- cojson@0.7.10
|
||||
- cojson-transport-nodejs-ws@0.7.10
|
||||
- jazz-tools@0.7.10
|
||||
|
||||
## 0.7.9
|
||||
|
||||
### Patch Changes
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user