Compare commits

...

33 Commits

Author SHA1 Message Date
Anselm
ee7e3ee5a7 Publish
- jazz-example-pets@0.0.7
 - jazz-example-todo@0.0.32
 - cojson@0.2.2
 - cojson-simple-sync@0.2.4
 - cojson-storage-sqlite@0.2.4
 - jazz-browser@0.2.3
 - jazz-browser-auth-local@0.2.3
 - jazz-browser-media-images@0.2.3
 - jazz-react@0.2.3
 - jazz-react-auth-local@0.2.3
 - jazz-react-media-images@0.2.3
 - jazz-storage-indexeddb@0.2.3
2023-09-12 15:26:43 +01:00
Anselm
ceeed88fa5 Less verbose error output 2023-09-12 15:26:22 +01:00
Anselm
79353a1d97 Publish
- cojson-simple-sync@0.2.3
 - cojson-storage-sqlite@0.2.3
2023-09-12 15:22:01 +01:00
Anselm
7fdc42c62f Fix migration 2023-09-12 15:21:45 +01:00
Anselm
3a2e854a88 Publish
- cojson-simple-sync@0.2.2
 - cojson-storage-sqlite@0.2.2
2023-09-12 15:19:12 +01:00
Anselm
661a2d023a Fixes #90 for SQLite 2023-09-12 15:18:53 +01:00
Anselm
6ef5b6b2ab Publish
- jazz-example-pets@0.0.6
 - jazz-example-todo@0.0.31
 - jazz-browser@0.2.2
 - jazz-browser-auth-local@0.2.2
 - jazz-browser-media-images@0.2.2
 - jazz-react@0.2.2
 - jazz-react-auth-local@0.2.2
 - jazz-react-media-images@0.2.2
 - jazz-storage-indexeddb@0.2.2
2023-09-12 14:56:31 +01:00
Anselm
1384ebed84 Fix migration 2023-09-12 14:55:57 +01:00
Anselm
17e53f9998 Publish
- jazz-example-pets@0.0.5
 - jazz-example-todo@0.0.30
 - cojson@0.2.1
 - cojson-simple-sync@0.2.1
 - cojson-storage-sqlite@0.2.1
 - jazz-browser@0.2.1
 - jazz-browser-auth-local@0.2.1
 - jazz-browser-media-images@0.2.1
 - jazz-react@0.2.1
 - jazz-react-auth-local@0.2.1
 - jazz-react-media-images@0.2.1
 - jazz-storage-indexeddb@0.2.1
2023-09-12 14:47:50 +01:00
Anselm
cfb1f39efe update docs 2023-09-12 14:47:17 +01:00
Anselm
2234276dcf Implement extra signatures & fix #90 for IndexedDB 2023-09-12 14:42:47 +01:00
Anselm
bb0a6a0600 yield microtask between incoming messages 2023-09-12 11:22:44 +01:00
Anselm
0a6eb0c10a Lots of fixes around streaming 2023-09-12 11:13:19 +01:00
Anselm
88b67d89e0 First implementation of streaming transactions, also fixes #80 2023-09-11 19:29:52 +01:00
Anselm Eickhoff
1a65d826b2 Update pets README.md 2023-09-11 17:24:01 +01:00
Anselm Eickhoff
6c65ec2b46 Merge pull request #81 from gardencmp/publish-pet-example
Publish pet example
2023-09-11 17:21:16 +01:00
Anselm
5b578a832d Fix job name and missing amtrix 2023-09-11 17:13:16 +01:00
Anselm
042afc52d7 Fix interpolation 2023-09-11 17:10:12 +01:00
Anselm
1b83493964 Use matrix and add pets example 2023-09-11 17:09:14 +01:00
Anselm
3b50da1a74 Remove redundant yarn build step 2023-09-11 17:04:42 +01:00
Anselm
8e0fc74d9f Switch to buildx 2023-09-11 17:03:18 +01:00
Anselm Eickhoff
e28326f32c Merge pull request #79 from gardencmp/anselm-gar-155
Make payload of trusting transactions JSON string instead of immediately-parsed JSON
2023-09-11 16:32:30 +01:00
Anselm
d7e8b0b9da Publish
- jazz-example-pets@0.0.4
 - jazz-example-todo@0.0.29
 - cojson@0.2.0
 - cojson-simple-sync@0.2.0
 - cojson-storage-sqlite@0.2.0
 - jazz-browser@0.2.0
 - jazz-browser-auth-local@0.2.0
 - jazz-browser-media-images@0.2.0
 - jazz-react@0.2.0
 - jazz-react-auth-local@0.2.0
 - jazz-react-media-images@0.2.0
 - jazz-storage-indexeddb@0.2.0
2023-09-11 16:19:44 +01:00
Anselm
c46a1f6b0a Update docs 2023-09-11 16:18:39 +01:00
Anselm
7947918278 lint pet example 2023-09-11 16:11:26 +01:00
Anselm
50c36e7255 Make tx.changes stringified 2023-09-11 16:11:17 +01:00
Anselm
c39a7ed1b7 Implement jazz-browser-media-images 2023-09-11 11:44:55 +01:00
Anselm
83762dbb0f Fix getLastItemsPerAccount 2023-09-10 15:36:41 +01:00
Anselm
7c82e12508 Fix filenames in pets example 2023-09-10 15:20:12 +01:00
Anselm
6db149be36 Complete most of the pets example 2023-09-10 15:15:23 +01:00
Anselm
909a101f99 Publish
- jazz-example-pets@0.0.3
 - jazz-example-todo@0.0.28
 - cojson@0.1.12
 - cojson-simple-sync@0.1.13
 - cojson-storage-sqlite@0.1.10
 - jazz-browser@0.1.12
 - jazz-browser-auth-local@0.1.12
 - jazz-react@0.1.14
 - jazz-react-auth-local@0.1.14
 - jazz-storage-indexeddb@0.1.12
2023-09-08 17:29:07 +01:00
Anselm
df0b6fe138 Update docs 2023-09-08 17:28:53 +01:00
Anselm
0543756016 More optimizations and first support for streaming hashing 2023-09-08 17:28:33 +01:00
70 changed files with 3151 additions and 606 deletions

View File

@@ -7,8 +7,11 @@ on:
branches: [ "main" ]
jobs:
build-and-deploy:
build:
runs-on: ubuntu-latest
strategy:
matrix:
example: ["todo", "pets"]
steps:
- uses: actions/checkout@v3
@@ -17,40 +20,50 @@ jobs:
- uses: actions/setup-node@v3
with:
node-version: 18
node-version: 16
cache: 'yarn'
cache-dependency-path: yarn.lock
- name: Nuke Workspace
run: |
rm package.json yarn.lock;
- name: Yarn Build
run: |
yarn install --frozen-lockfile;
yarn build;
working-directory: ./examples/todo
- uses: satackey/action-docker-layer-caching@v0.0.11
continue-on-error: true
with:
key: docker-layer-caching-${{ github.workflow }}-{hash}
restore-keys: |
docker-layer-caching-${{ github.workflow }}-
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to GitHub Container Registry
uses: docker/login-action@v1
uses: docker/login-action@v2
with:
registry: ghcr.io
username: gardencmp
password: ${{ secrets.GITHUB_TOKEN }}
- name: Docker Build & Push
- name: Nuke Workspace
run: |
export DOCKER_TAG=ghcr.io/gardencmp/jazz-example-todo:${{github.head_ref || github.ref_name}}-${{github.sha}}-$(date +%s) ;
docker build . --file Dockerfile --tag $DOCKER_TAG;
docker push $DOCKER_TAG;
echo "DOCKER_TAG=$DOCKER_TAG" >> $GITHUB_ENV
working-directory: ./examples/todo
rm package.json yarn.lock;
- name: Yarn Build
run: |
yarn install --frozen-lockfile;
yarn build;
working-directory: ./examples/${{ matrix.example }}
- name: Docker Build & Push
uses: docker/build-push-action@v4
with:
context: ./examples/${{ matrix.example }}
push: true
tags: ghcr.io/gardencmp/${{github.event.repository.name}}-example-${{ matrix.example }}:${{github.head_ref || github.ref_name}}-${{github.sha}}-${{github.run_number}}-${{github.run_attempt}}
cache-from: type=gha
cache-to: type=gha,mode=max
deploy:
runs-on: ubuntu-latest
needs: build
strategy:
matrix:
example: ["todo", "pets"]
steps:
- uses: actions/checkout@v3
with:
submodules: true
- uses: gacts/install-nomad@v1
- name: Tailscale
uses: tailscale/github-action@v1
@@ -69,9 +82,9 @@ jobs:
export DOCKER_USER=gardencmp;
export DOCKER_PASSWORD=${{ secrets.DOCKER_PULL_PAT }};
export DOCKER_TAG=${{ env.DOCKER_TAG }};
export DOCKER_TAG=ghcr.io/gardencmp/${{github.event.repository.name}}-example-${{ matrix.example }}:${{github.head_ref || github.ref_name}}-${{github.sha}}-${{github.run_number}}-${{github.run_attempt}};
envsubst '${DOCKER_USER} ${DOCKER_PASSWORD} ${DOCKER_TAG} ${BRANCH_SUFFIX} ${BRANCH_SUBDOMAIN}' < job-template.nomad > job-instance.nomad;
cat job-instance.nomad;
NOMAD_ADDR='http://control1v2-london:4646' nomad job run job-instance.nomad;
working-directory: ./examples/todo
working-directory: ./examples/${{ matrix.example }}

282
DOCS.md
View File

@@ -2,6 +2,17 @@
----
## `Media` (namespace in `cojson`)
```typescript
export Media
```
TODO: document
TODO: doc generator not implemented yet
----
## `LocalNode` (class in `cojson`)
@@ -340,7 +351,7 @@ Creates an invite for new members to indirectly join the group, allowing them to
<summary><code>group.createMap(meta)</code> </summary>
```typescript
group.createMap<M extends CoMap<{ [key: string]: JsonValue }, null | JsonObject>>(
group.createMap<M extends CoMap<{ [key: string]: JsonValue | undefined }, null | JsonObject>>(
meta: M["meta"]
): M
```
@@ -405,7 +416,7 @@ TODO: document
## `CoMap` (class in `cojson`)
```typescript
export class CoMap<M extends { [key: string]: JsonValue }, Meta extends JsonObject | null> implements ReadableCoValue {...}
export class CoMap<M extends { [key: string]: JsonValue | undefined }, Meta extends JsonObject | null> implements ReadableCoValue {...}
```
A collaborative map with precise shape `M` and optional static metadata `Meta`
@@ -421,7 +432,7 @@ A collaborative map with precise shape `M` and optional static metadata `Meta`
<summary><code>coMap.id</code> </summary>
```typescript
coMap.id: CoID<CoMap<MapM<M>, Meta>>
coMap.id: CoID<CoMap<M, Meta>>
```
The `CoValue`'s (precisely typed) `CoID`
@@ -655,7 +666,7 @@ Lets you apply edits to a `CoValue`, inside the changer callback, which receives
## `WriteableCoMap` (class in `cojson`)
```typescript
export class WriteableCoMap<M extends { [key: string]: JsonValue }, Meta extends JsonObject | null> extends CoMap<M, Meta> implements WriteableCoValue {...}
export class WriteableCoMap<M extends { [key: string]: JsonValue | undefined }, Meta extends JsonObject | null> extends CoMap<M, Meta> implements WriteableCoValue {...}
```
A collaborative map with precise shape `M` and optional static metadata `Meta`
@@ -671,7 +682,7 @@ A collaborative map with precise shape `M` and optional static metadata `Meta`
<summary><code>writeableCoMap.id</code> (from <code>CoMap</code>) </summary>
```typescript
writeableCoMap.id: CoID<CoMap<MapM<M>, Meta>>
writeableCoMap.id: CoID<CoMap<M, Meta>>
```
The `CoValue`'s (precisely typed) `CoID`
@@ -1574,7 +1585,7 @@ TODO: document
<summary><code>coStream.items</code> (undocumented)</summary>
```typescript
coStream.items: { [key: SessionID]: T[] }
coStream.items: { [key: SessionID]: {item: T, madeAt: number}[] }
```
TODO: document
@@ -1628,6 +1639,44 @@ TODO: document
<details>
<summary><code>coStream.getLastItemsPerAccount()</code> (undocumented)</summary>
```typescript
coStream.getLastItemsPerAccount(): { [account: AccountID]: T | undefined }
```
TODO: document
</details>
<details>
<summary><code>coStream.getLastItemFrom(account)</code> (undocumented)</summary>
```typescript
coStream.getLastItemFrom(
account: AccountID
): undefined | T
```
TODO: document
</details>
<details>
<summary><code>coStream.getLastItemFromMe()</code> (undocumented)</summary>
```typescript
coStream.getLastItemFromMe(): undefined | T
```
TODO: document
</details>
<details>
<summary><code>coStream.toJSON()</code> </summary>
@@ -1755,7 +1804,7 @@ TODO: document
<summary><code>writeableCoStream.items</code> (from <code>CoStream</code>) (undocumented)</summary>
```typescript
writeableCoStream.items: { [key: SessionID]: T[] }
writeableCoStream.items: { [key: SessionID]: {item: T, madeAt: number}[] }
```
TODO: document
@@ -1826,6 +1875,44 @@ TODO: document
<details>
<summary><code>writeableCoStream.getLastItemsPerAccount()</code> (from <code>CoStream</code>) (undocumented)</summary>
```typescript
writeableCoStream.getLastItemsPerAccount(): { [account: AccountID]: T | undefined }
```
TODO: document
</details>
<details>
<summary><code>writeableCoStream.getLastItemFrom(account)</code> (from <code>CoStream</code>) (undocumented)</summary>
```typescript
writeableCoStream.getLastItemFrom(
account: AccountID
): undefined | T
```
TODO: document
</details>
<details>
<summary><code>writeableCoStream.getLastItemFromMe()</code> (from <code>CoStream</code>) (undocumented)</summary>
```typescript
writeableCoStream.getLastItemFromMe(): undefined | T
```
TODO: document
</details>
<details>
<summary><code>writeableCoStream.toJSON()</code> (from <code>CoStream</code>) </summary>
@@ -1933,7 +2020,7 @@ TODO: document
<summary><code>binaryCoStream.items</code> (from <code>CoStream</code>) (undocumented)</summary>
```typescript
binaryCoStream.items: { [key: SessionID]: T[] }
binaryCoStream.items: { [key: SessionID]: {item: T, madeAt: number}[] }
```
TODO: document
@@ -1974,10 +2061,12 @@ The `Group` this `CoValue` belongs to (determining permissions)
### Methods
<details>
<summary><code>binaryCoStream.getBinaryChunks()</code> (undocumented)</summary>
<summary><code>binaryCoStream.getBinaryChunks(allowUnfinished)</code> (undocumented)</summary>
```typescript
binaryCoStream.getBinaryChunks(): undefined | BinaryChunkInfo & {chunks: Uint8Array[], finished: boolean}
binaryCoStream.getBinaryChunks(
allowUnfinished: boolean
): undefined | BinaryChunkInfo & {chunks: Uint8Array[], finished: boolean}
```
TODO: document
@@ -2019,6 +2108,44 @@ TODO: document
<details>
<summary><code>binaryCoStream.getLastItemsPerAccount()</code> (from <code>CoStream</code>) (undocumented)</summary>
```typescript
binaryCoStream.getLastItemsPerAccount(): { [account: AccountID]: T | undefined }
```
TODO: document
</details>
<details>
<summary><code>binaryCoStream.getLastItemFrom(account)</code> (from <code>CoStream</code>) (undocumented)</summary>
```typescript
binaryCoStream.getLastItemFrom(
account: AccountID
): undefined | BinaryStreamItem
```
TODO: document
</details>
<details>
<summary><code>binaryCoStream.getLastItemFromMe()</code> (from <code>CoStream</code>) (undocumented)</summary>
```typescript
binaryCoStream.getLastItemFromMe(): undefined | BinaryStreamItem
```
TODO: document
</details>
<details>
<summary><code>binaryCoStream.toJSON()</code> (from <code>CoStream</code>) </summary>
@@ -2126,7 +2253,7 @@ TODO: document
<summary><code>writeableBinaryCoStream.items</code> (from <code>BinaryCoStream</code>) (undocumented)</summary>
```typescript
writeableBinaryCoStream.items: { [key: SessionID]: T[] }
writeableBinaryCoStream.items: { [key: SessionID]: {item: T, madeAt: number}[] }
```
TODO: document
@@ -2215,10 +2342,12 @@ TODO: document
<details>
<summary><code>writeableBinaryCoStream.getBinaryChunks()</code> (from <code>BinaryCoStream</code>) (undocumented)</summary>
<summary><code>writeableBinaryCoStream.getBinaryChunks(allowUnfinished)</code> (from <code>BinaryCoStream</code>) (undocumented)</summary>
```typescript
writeableBinaryCoStream.getBinaryChunks(): undefined | BinaryChunkInfo & {chunks: Uint8Array[], finished: boolean}
writeableBinaryCoStream.getBinaryChunks(
allowUnfinished: boolean
): undefined | BinaryChunkInfo & {chunks: Uint8Array[], finished: boolean}
```
TODO: document
@@ -2240,6 +2369,44 @@ TODO: document
<details>
<summary><code>writeableBinaryCoStream.getLastItemsPerAccount()</code> (from <code>BinaryCoStream</code>) (undocumented)</summary>
```typescript
writeableBinaryCoStream.getLastItemsPerAccount(): { [account: AccountID]: T | undefined }
```
TODO: document
</details>
<details>
<summary><code>writeableBinaryCoStream.getLastItemFrom(account)</code> (from <code>BinaryCoStream</code>) (undocumented)</summary>
```typescript
writeableBinaryCoStream.getLastItemFrom(
account: AccountID
): undefined | BinaryStreamItem
```
TODO: document
</details>
<details>
<summary><code>writeableBinaryCoStream.getLastItemFromMe()</code> (from <code>BinaryCoStream</code>) (undocumented)</summary>
```typescript
writeableBinaryCoStream.getLastItemFromMe(): undefined | BinaryStreamItem
```
TODO: document
</details>
<details>
<summary><code>writeableBinaryCoStream.toJSON()</code> (from <code>BinaryCoStream</code>) </summary>
@@ -2369,7 +2536,7 @@ TODO: document
<summary><code>coValueCore._decryptionCache</code> (undocumented)</summary>
```typescript
coValueCore._decryptionCache: { [key: Encrypted<JsonValue[], JsonValue>]: JsonValue[] | undefined }
coValueCore._decryptionCache: { [key: Encrypted<JsonValue[], JsonValue>]: Stringified<JsonValue[]> | undefined }
```
TODO: document
@@ -2473,6 +2640,41 @@ TODO: document
<details>
<summary><code>coValueCore.tryAddTransactionsAsync(sessionID, newTransactions, givenExpectedNewHash, newSignature)</code> (undocumented)</summary>
```typescript
coValueCore.tryAddTransactionsAsync(
sessionID: SessionID,
newTransactions: Transaction[],
givenExpectedNewHash: undefined | TEMPLATE_LITERAL,
newSignature: TEMPLATE_LITERAL
): Promise<boolean>
```
TODO: document
</details>
<details>
<summary><code>coValueCore.doAddTransactions(sessionID, newTransactions, newSignature, expectedNewHash, newStreamingHash)</code> (undocumented)</summary>
```typescript
coValueCore.doAddTransactions(
sessionID: SessionID,
newTransactions: Transaction[],
newSignature: TEMPLATE_LITERAL,
expectedNewHash: TEMPLATE_LITERAL,
newStreamingHash: StreamingHash
): void
```
TODO: document
</details>
<details>
<summary><code>coValueCore.subscribe(listener)</code> (undocumented)</summary>
@@ -2502,6 +2704,21 @@ TODO: document
<details>
<summary><code>coValueCore.expectedNewHashAfterAsync(sessionID, newTransactions)</code> (undocumented)</summary>
```typescript
coValueCore.expectedNewHashAfterAsync(
sessionID: SessionID,
newTransactions: Transaction[]
): Promise<{expectedNewHash: TEMPLATE_LITERAL, newStreamingHash: StreamingHash}>
```
TODO: document
</details>
<details>
<summary><code>coValueCore.makeTransaction(changes, privacy)</code> (undocumented)</summary>
@@ -2599,7 +2816,7 @@ TODO: document
```typescript
coValueCore.newContentSince(
knownState: undefined | CoValueKnownState
): undefined | NewContentMessage
): undefined | NewContentMessage[]
```
TODO: document
@@ -2929,7 +3146,7 @@ TODO: doc generator not implemented yet
## `CoValueImpl` (type alias in `cojson`)
```typescript
export type CoValueImpl = CoMap<{ [key: string]: JsonValue }, JsonObject | null> | CoList<JsonValue, JsonObject | null> | CoStream<JsonValue, JsonObject | null> | BinaryCoStream<BinaryCoStreamMeta> | Static<JsonObject>
export type CoValueImpl = CoMap<{ [key: string]: JsonValue | undefined }, JsonObject | null> | CoList<JsonValue, JsonObject | null> | CoStream<JsonValue, JsonObject | null> | BinaryCoStream<BinaryCoStreamMeta> | Static<JsonObject>
```
TODO: document
@@ -3045,6 +3262,28 @@ TODO: document
TODO: doc generator not implemented yet
----
## `cojsonReady` (variabl in `cojson`)
```typescript
export cojsonReady
```
TODO: document
TODO: doc generator not implemented yet
----
## `MAX_RECOMMENDED_TX_SIZE` (variabl in `cojson`)
```typescript
export MAX_RECOMMENDED_TX_SIZE
```
TODO: document
TODO: doc generator not implemented yet
# jazz-react
@@ -3114,17 +3353,6 @@ TODO: doc generator not implemented yet
----
## `createBinaryStreamHandler(onCreated, inGroup, meta?)` (function in `jazz-react`)
```typescript
export function createBinaryStreamHandler(onCreated: (createdStream: C) => void, inGroup: Group, meta: C["meta"]): (event: ChangeEvent) => void
```
TODO: document
TODO: doc generator not implemented yet
----
## `createInviteLink(value, role, {baseURL?})` (function in `jazz-react`)
```typescript

View File

@@ -1,14 +1,14 @@
# Jazz Todo List Example
# Jazz Rate-My-Pet List Example
Live version: https://example-todo.jazz.tools
Live version: https://example-pets.jazz.tools
## Installing & running the example locally
Start by checking out just the example app to a folder:
```bash
npx degit gardencmp/jazz/examples/todo jazz-example-todo
cd jazz-example-todo
npx degit gardencmp/jazz/examples/pets jazz-example-pets
cd jazz-example-pets
```
(This ensures that you have the example app without git history or our multi-package monorepo)
@@ -27,31 +27,17 @@ npm run dev
## Structure
- [`src/basicComponents`](./src/basicComponents) contains simple components to build the UI, unrelated to Jazz (powered by [shadcn/ui](https://ui.shadcn.com))
- [`src/components`](./src/components/) contains helper components that do contain Jazz-specific logic, but are not super relevant to understand the basics of Jazz and CoJSON
- [`src/0_main.tsx`](./src/0_main.tsx), [`src/1_types.ts`](./src/1_types.ts), [`src/2_App.tsx`](./src/2_App.tsx), [`src/3_TodoTable.tsx`](./src/3_TodoTable.tsx), [`src/router.ts`](./src/router.ts) - the main files for this example, see the walkthrough below
TODO
## Walkthrough
### Main parts
- The top-level provider `<WithJazz/>`: [`src/0_main.tsx`](./src/0_main.tsx)
- Defining the data model with CoJSON: [`src/1_types.ts`](./src/1_types.ts)
- Creating todo projects & routing in `<App/>`: [`src/2_App.tsx`](./src/2_App.tsx)
- Reactively rendering a todo project as a table, adding and editing tasks: [`src/3_TodoTable.tsx`](./src/3_TodoTable.tsx)
TODO
### Helpers
- Getting user profiles in `<NameBadge/>`: [`src/components/NameBadge.tsx`](./src/components/NameBadge.tsx)
- (not yet commented) Creating invite links/QR codes with `<InviteButton/>`: [`src/components/InviteButton.tsx`](./src/components/InviteButton.tsx)
- (not yet commented) `location.hash`-based routing and accepting invite links with `useSimpleHashRouterThatAcceptsInvites()` in [`src/router.ts`](./src/router.ts)
This is the whole Todo List app!
TODO
## Questions / problems / feedback

View File

@@ -1,4 +1,4 @@
job "example-todo$BRANCH_SUFFIX" {
job "example-pets$BRANCH_SUFFIX" {
region = "global"
datacenters = ["*"]
@@ -41,7 +41,7 @@ job "example-todo$BRANCH_SUFFIX" {
service {
tags = ["public"]
name = "example-todo$BRANCH_SUFFIX"
name = "example-pets$BRANCH_SUFFIX"
port = "http"
provider = "consul"
}

View File

@@ -1,7 +1,7 @@
{
"name": "jazz-example-pets",
"private": true,
"version": "0.0.2",
"version": "0.0.7",
"type": "module",
"scripts": {
"dev": "vite",
@@ -16,16 +16,16 @@
"@types/qrcode": "^1.5.1",
"class-variance-authority": "^0.7.0",
"clsx": "^2.0.0",
"jazz-react": "^0.1.13",
"jazz-react-auth-local": "^0.1.13",
"jazz-react": "^0.2.3",
"jazz-react-auth-local": "^0.2.3",
"jazz-react-media-images": "^0.2.3",
"lucide-react": "^0.274.0",
"qrcode": "^1.5.3",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"tailwind-merge": "^1.14.0",
"tailwindcss-animate": "^1.0.7",
"uniqolor": "^1.1.0",
"use-debounce": "^9.0.4"
"uniqolor": "^1.1.0"
},
"devDependencies": {
"@types/react": "^18.2.15",

View File

@@ -1,4 +1,4 @@
import { CoMap, CoID, BinaryCoStream, CoStream } from "cojson";
import { CoMap, CoID, CoStream, Media } from "cojson";
/** Walkthrough: Defining the data model with CoJSON
*
@@ -9,18 +9,20 @@ import { CoMap, CoID, BinaryCoStream, CoStream } from "cojson";
export type PetPost = CoMap<{
name: string;
image: CoID<BinaryCoStream>;
image: CoID<Media.ImageDefinition>;
reactions: CoID<PetReactions>;
}>;
export type ReactionType =
| "aww"
| "love"
| "haha"
| "wow"
| "tiny"
| "chonkers"
| "good";
export const REACTION_TYPES = [
"aww",
"love",
"haha",
"wow",
"tiny",
"chonkers",
] as const;
export type ReactionType = (typeof REACTION_TYPES)[number];
export type PetReactions = CoStream<ReactionType>;

View File

@@ -1,5 +1,3 @@
import { useCallback } from "react";
import { useJazz } from "jazz-react";
import { PetPost } from "./1_types";
@@ -7,8 +5,8 @@ import { PetPost } from "./1_types";
import { Button } from "./basicComponents";
import { useSimpleHashRouterThatAcceptsInvites } from "./router";
import { PetPostUI } from "./4_PetPostUI";
import { CreatePetPostForm } from "./4_CreatePetPostForm";
import { RatePetPostUI } from "./4_RatePetPostUI";
import { CreatePetPostForm } from "./3_CreatePetPostForm";
/** Walkthrough: Creating pet posts & routing in `<App/>`
*
@@ -30,7 +28,7 @@ export default function App() {
return (
<div className="flex flex-col h-full items-center justify-start gap-10 pt-10 pb-10 px-5">
{currentPetPostID ? (
<PetPostUI petPostID={currentPetPostID} />
<RatePetPostUI petPostID={currentPetPostID} />
) : (
<CreatePetPostForm onCreate={navigateToPetPostID} />
)}
@@ -47,4 +45,4 @@ export default function App() {
);
}
/** Walkthrough: continue with ./3_TodoTable.tsx */
/** Walkthrough: continue with ./3_CreatePetPostForm.tsx */

View File

@@ -0,0 +1,103 @@
import { ChangeEvent, useCallback, useState } from "react";
import { CoID } from "cojson";
import { useJazz, useTelepathicState } from "jazz-react";
import { createImage } from "jazz-browser-media-images";
import { PetPost, PetReactions } from "./1_types";
import { Input, Button } from "./basicComponents";
import { useLoadImage } from "jazz-react-media-images";
/** Walkthrough: TODO
*/
export function CreatePetPostForm({
onCreate,
}: {
onCreate: (id: CoID<PetPost>) => void;
}) {
const { localNode } = useJazz();
const [newPostId, setNewPostId] = useState<CoID<PetPost> | undefined>(
undefined
);
const newPetPost = useTelepathicState(newPostId);
const onChangeName = useCallback(
(name: string) => {
let petPost = newPetPost;
if (!petPost) {
const petPostGroup = localNode.createGroup();
petPost = petPostGroup.createMap<PetPost>();
const petReactions = petPostGroup.createStream<PetReactions>();
petPost = petPost.edit((petPost) => {
petPost.set("reactions", petReactions.id);
});
setNewPostId(petPost.id);
}
petPost.edit((petPost) => {
petPost.set("name", name);
});
},
[localNode, newPetPost]
);
const onImageSelected = useCallback(
async (event: ChangeEvent<HTMLInputElement>) => {
if (!newPetPost || !event.target.files) return;
const imageDefinition = await createImage(
event.target.files[0],
newPetPost.group
);
newPetPost.edit((petPost) => {
petPost.set("image", imageDefinition.id);
});
},
[newPetPost]
);
const petImage = useLoadImage(newPetPost?.get("image"));
return (
<div className="flex flex-col gap-10">
<p>Share your pet with friends!</p>
<Input
type="text"
placeholder="Pet Name"
className="text-3xl py-6"
onChange={(event) => onChangeName(event.target.value)}
value={newPetPost?.get("name") || ""}
/>
{petImage ? (
<img
className="w-80 max-w-full rounded"
src={petImage.highestResSrc || petImage.placeholderDataURL}
/>
) : (
<Input
type="file"
disabled={!newPetPost?.get("name")}
onChange={onImageSelected}
/>
)}
{newPetPost?.get("name") && newPetPost?.get("image") && (
<Button
onClick={() => {
onCreate(newPetPost.id);
}}
>
Submit Post
</Button>
)}
</div>
);
}

View File

@@ -1,103 +0,0 @@
import { useCallback, useState } from "react";
import { BinaryCoStream, CoID } from "cojson";
import {
useBinaryStream,
useJazz,
useTelepathicState,
} from "jazz-react";
import { PetPost, PetReactions, ReactionType } from "./1_types";
import {
Input,
Button,
} from "./basicComponents";
import { InviteButton } from "./components/InviteButton";
import { NameBadge } from "./components/NameBadge";
import { useDebouncedCallback } from "use-debounce";
import { createBinaryStreamHandler } from "jazz-react";
/** Walkthrough: TODO
*/
export function CreatePetPostForm({
onCreate,
}: {
onCreate: (id: CoID<PetPost>) => void;
}) {
const { localNode } = useJazz();
const [creatingPostId, setCreatingPostId] = useState<
CoID<PetPost> | undefined
>(undefined);
const creatingPetPost = useTelepathicState(creatingPostId);
const onChangeName = useDebouncedCallback((name: string) => {
let petPost = creatingPetPost;
if (!petPost) {
const petPostGroup = localNode.createGroup();
petPost = petPostGroup.createMap<PetPost>();
const reactions = petPostGroup.createStream<PetReactions>();
petPost = petPost.edit((petPost) => {
petPost.set("reactions", reactions.id);
});
setCreatingPostId(petPost.id);
}
petPost.edit((petPost) => {
petPost.set("name", name);
});
}, 200);
const onImageCreated = useCallback(
(image: BinaryCoStream) => {
if (!creatingPetPost) throw new Error("Never get here");
creatingPetPost.edit((petPost) => {
petPost.set("image", image.id);
});
},
[creatingPetPost]
);
const image = useBinaryStream(creatingPetPost?.get("image"));
return (
<div>
<Input
type="text"
placeholder="Pet Name"
onChange={event => onChangeName(event.target.value)}
value={creatingPetPost?.get("name")}
/>
{image ? (
<img src={image.blobURL} />
) : (
creatingPetPost && (
<Input
type="file"
onChange={createBinaryStreamHandler(
onImageCreated,
creatingPetPost.group
)}
/>
)
)}
{creatingPetPost?.get("name") && creatingPetPost?.get("image") && (
<Button
onClick={() => {
onCreate(creatingPetPost.id);
}}
>
Submit Post
</Button>
)}
</div>
);
}

View File

@@ -1,18 +0,0 @@
import { useCallback } from "react";
import { CoID } from "cojson";
import { useTelepathicState } from "jazz-react";
import { PetPost } from "./1_types";
import { InviteButton } from "./components/InviteButton";
import { NameBadge } from "./components/NameBadge";
/** Walkthrough: TODO
*/
export function PetPostUI({ petPostID }: { petPostID: CoID<PetPost> }) {
return (<div>TODO</div>);
}

View File

@@ -0,0 +1,103 @@
import { AccountID, CoID } from "cojson";
import { useTelepathicState } from "jazz-react";
import { PetPost, PetReactions, ReactionType, REACTION_TYPES } from "./1_types";
import { ShareButton } from "./components/ShareButton";
import { NameBadge } from "./components/NameBadge";
import { Button } from "./basicComponents";
import { useLoadImage } from "jazz-react-media-images";
/** Walkthrough: TODO
*/
const reactionEmojiMap: { [reaction in ReactionType]: string } = {
aww: "😍",
love: "❤️",
haha: "😂",
wow: "😮",
tiny: "🐥",
chonkers: "🐘",
};
export function RatePetPostUI({ petPostID }: { petPostID: CoID<PetPost> }) {
const petPost = useTelepathicState(petPostID);
const petReactions = useTelepathicState(petPost?.get("reactions"));
const petImage = useLoadImage(petPost?.get("image"));
return (
<div className="flex flex-col gap-8">
<div className="flex justify-between">
<h1 className="text-3xl font-bold">{petPost?.get("name")}</h1>
<ShareButton petPost={petPost} />
</div>
{petImage && (
<img
className="w-80 max-w-full rounded"
src={petImage.highestResSrc || petImage.placeholderDataURL}
/>
)}
<div className="flex justify-between max-w-xs flex-wrap">
{REACTION_TYPES.map((reactionType) => (
<Button
key={reactionType}
variant={
petReactions?.getLastItemFromMe() === reactionType
? "default"
: "outline"
}
onClick={() => {
petReactions?.edit((reactions) => {
reactions.push(reactionType);
});
}}
title={`React with ${reactionType}`}
className="text-2xl px-2"
>
{reactionEmojiMap[reactionType]}
</Button>
))}
</div>
{petPost?.group.myRole() === "admin" && petReactions && (
<ReactionOverview petReactions={petReactions} />
)}
</div>
);
}
function ReactionOverview({ petReactions }: { petReactions: PetReactions }) {
return (
<div>
<h2>Reactions</h2>
<div className="flex flex-col gap-1">
{REACTION_TYPES.map((reactionType) => {
const accountsWithThisReaction = Object.entries(
petReactions.getLastItemsPerAccount()
).flatMap(([accountID, reaction]) =>
reaction === reactionType ? [accountID] : []
);
if (accountsWithThisReaction.length === 0) return null;
return (
<div
className="flex gap-2 items-center"
key={reactionType}
>
{reactionEmojiMap[reactionType]}{" "}
{accountsWithThisReaction.map((accountID) => (
<NameBadge
key={accountID}
accountID={accountID as AccountID}
/>
))}
</div>
);
})}
</div>
</div>
);
}

View File

@@ -1,7 +1,12 @@
export function TitleAndLogo({name}: {name: string}) {
return <>
<div className="flex items-center gap-2 justify-center mt-5">
import { Toaster } from ".";
export function TitleAndLogo({ name }: { name: string }) {
return (
<>
<div className="flex items-center gap-2 justify-center mt-5">
<img src="jazz-logo.png" className="h-5" /> {name}
</div>
</>
}
<Toaster />
</>
);
}

View File

@@ -1,4 +1,7 @@
export { Button } from "./ui/button";
export { Input } from "./ui/input";
export { Toaster } from "./ui/toaster";
export { useToast } from "./ui/use-toast";
export { Skeleton } from "./ui/skeleton";
export { TitleAndLogo } from "./TitleAndLogo";
export { ThemeProvider } from "./themeProvider";

View File

@@ -0,0 +1,15 @@
import { cn } from "@/basicComponents/lib/utils"
function Skeleton({
className,
...props
}: React.HTMLAttributes<HTMLDivElement>) {
return (
<div
className={cn("animate-pulse rounded-md bg-muted", className)}
{...props}
/>
)
}
export { Skeleton }

View File

@@ -0,0 +1,127 @@
import * as React from "react"
import * as ToastPrimitives from "@radix-ui/react-toast"
import { cva, type VariantProps } from "class-variance-authority"
import { X } from "lucide-react"
import { cn } from "@/basicComponents/lib/utils"
const ToastProvider = ToastPrimitives.Provider
const ToastViewport = React.forwardRef<
React.ElementRef<typeof ToastPrimitives.Viewport>,
React.ComponentPropsWithoutRef<typeof ToastPrimitives.Viewport>
>(({ className, ...props }, ref) => (
<ToastPrimitives.Viewport
ref={ref}
className={cn(
"fixed top-0 z-[100] flex max-h-screen w-full flex-col-reverse p-4 sm:bottom-0 sm:right-0 sm:top-auto sm:flex-col md:max-w-[420px]",
className
)}
{...props}
/>
))
ToastViewport.displayName = ToastPrimitives.Viewport.displayName
const toastVariants = cva(
"group pointer-events-auto relative flex w-full items-center justify-between space-x-4 overflow-hidden rounded-md border p-6 pr-8 shadow-lg transition-all data-[swipe=cancel]:translate-x-0 data-[swipe=end]:translate-x-[var(--radix-toast-swipe-end-x)] data-[swipe=move]:translate-x-[var(--radix-toast-swipe-move-x)] data-[swipe=move]:transition-none data-[state=open]:animate-in data-[state=closed]:animate-out data-[swipe=end]:animate-out data-[state=closed]:fade-out-80 data-[state=closed]:slide-out-to-right-full data-[state=open]:slide-in-from-top-full data-[state=open]:sm:slide-in-from-bottom-full",
{
variants: {
variant: {
default: "border bg-background text-foreground",
destructive:
"destructive group border-destructive bg-destructive text-destructive-foreground",
},
},
defaultVariants: {
variant: "default",
},
}
)
const Toast = React.forwardRef<
React.ElementRef<typeof ToastPrimitives.Root>,
React.ComponentPropsWithoutRef<typeof ToastPrimitives.Root> &
VariantProps<typeof toastVariants>
>(({ className, variant, ...props }, ref) => {
return (
<ToastPrimitives.Root
ref={ref}
className={cn(toastVariants({ variant }), className)}
{...props}
/>
)
})
Toast.displayName = ToastPrimitives.Root.displayName
const ToastAction = React.forwardRef<
React.ElementRef<typeof ToastPrimitives.Action>,
React.ComponentPropsWithoutRef<typeof ToastPrimitives.Action>
>(({ className, ...props }, ref) => (
<ToastPrimitives.Action
ref={ref}
className={cn(
"inline-flex h-8 shrink-0 items-center justify-center rounded-md border bg-transparent px-3 text-sm font-medium ring-offset-background transition-colors hover:bg-secondary focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 group-[.destructive]:border-muted/40 group-[.destructive]:hover:border-destructive/30 group-[.destructive]:hover:bg-destructive group-[.destructive]:hover:text-destructive-foreground group-[.destructive]:focus:ring-destructive",
className
)}
{...props}
/>
))
ToastAction.displayName = ToastPrimitives.Action.displayName
const ToastClose = React.forwardRef<
React.ElementRef<typeof ToastPrimitives.Close>,
React.ComponentPropsWithoutRef<typeof ToastPrimitives.Close>
>(({ className, ...props }, ref) => (
<ToastPrimitives.Close
ref={ref}
className={cn(
"absolute right-2 top-2 rounded-md p-1 text-foreground/50 opacity-0 transition-opacity hover:text-foreground focus:opacity-100 focus:outline-none focus:ring-2 group-hover:opacity-100 group-[.destructive]:text-red-300 group-[.destructive]:hover:text-red-50 group-[.destructive]:focus:ring-red-400 group-[.destructive]:focus:ring-offset-red-600",
className
)}
toast-close=""
{...props}
>
<X className="h-4 w-4" />
</ToastPrimitives.Close>
))
ToastClose.displayName = ToastPrimitives.Close.displayName
const ToastTitle = React.forwardRef<
React.ElementRef<typeof ToastPrimitives.Title>,
React.ComponentPropsWithoutRef<typeof ToastPrimitives.Title>
>(({ className, ...props }, ref) => (
<ToastPrimitives.Title
ref={ref}
className={cn("text-sm font-semibold", className)}
{...props}
/>
))
ToastTitle.displayName = ToastPrimitives.Title.displayName
const ToastDescription = React.forwardRef<
React.ElementRef<typeof ToastPrimitives.Description>,
React.ComponentPropsWithoutRef<typeof ToastPrimitives.Description>
>(({ className, ...props }, ref) => (
<ToastPrimitives.Description
ref={ref}
className={cn("text-sm opacity-90", className)}
{...props}
/>
))
ToastDescription.displayName = ToastPrimitives.Description.displayName
type ToastProps = React.ComponentPropsWithoutRef<typeof Toast>
type ToastActionElement = React.ReactElement<typeof ToastAction>
export {
type ToastProps,
type ToastActionElement,
ToastProvider,
ToastViewport,
Toast,
ToastTitle,
ToastDescription,
ToastClose,
ToastAction,
}

View File

@@ -0,0 +1,33 @@
import {
Toast,
ToastClose,
ToastDescription,
ToastProvider,
ToastTitle,
ToastViewport,
} from "@/basicComponents/ui/toast"
import { useToast } from "@/basicComponents/ui/use-toast"
export function Toaster() {
const { toasts } = useToast()
return (
<ToastProvider>
{toasts.map(function ({ id, title, description, action, ...props }) {
return (
<Toast key={id} {...props}>
<div className="grid gap-1">
{title && <ToastTitle>{title}</ToastTitle>}
{description && (
<ToastDescription>{description}</ToastDescription>
)}
</div>
{action}
<ToastClose />
</Toast>
)
})}
<ToastViewport />
</ToastProvider>
)
}

View File

@@ -0,0 +1,192 @@
// Inspired by react-hot-toast library
import * as React from "react"
import type {
ToastActionElement,
ToastProps,
} from "@/basicComponents/ui/toast"
const TOAST_LIMIT = 1
const TOAST_REMOVE_DELAY = 1000000
type ToasterToast = ToastProps & {
id: string
title?: React.ReactNode
description?: React.ReactNode
action?: ToastActionElement
}
const actionTypes = {
ADD_TOAST: "ADD_TOAST",
UPDATE_TOAST: "UPDATE_TOAST",
DISMISS_TOAST: "DISMISS_TOAST",
REMOVE_TOAST: "REMOVE_TOAST",
} as const
let count = 0
function genId() {
count = (count + 1) % Number.MAX_VALUE
return count.toString()
}
type ActionType = typeof actionTypes
type Action =
| {
type: ActionType["ADD_TOAST"]
toast: ToasterToast
}
| {
type: ActionType["UPDATE_TOAST"]
toast: Partial<ToasterToast>
}
| {
type: ActionType["DISMISS_TOAST"]
toastId?: ToasterToast["id"]
}
| {
type: ActionType["REMOVE_TOAST"]
toastId?: ToasterToast["id"]
}
interface State {
toasts: ToasterToast[]
}
const toastTimeouts = new Map<string, ReturnType<typeof setTimeout>>()
const addToRemoveQueue = (toastId: string) => {
if (toastTimeouts.has(toastId)) {
return
}
const timeout = setTimeout(() => {
toastTimeouts.delete(toastId)
dispatch({
type: "REMOVE_TOAST",
toastId: toastId,
})
}, TOAST_REMOVE_DELAY)
toastTimeouts.set(toastId, timeout)
}
export const reducer = (state: State, action: Action): State => {
switch (action.type) {
case "ADD_TOAST":
return {
...state,
toasts: [action.toast, ...state.toasts].slice(0, TOAST_LIMIT),
}
case "UPDATE_TOAST":
return {
...state,
toasts: state.toasts.map((t) =>
t.id === action.toast.id ? { ...t, ...action.toast } : t
),
}
case "DISMISS_TOAST": {
const { toastId } = action
// ! Side effects ! - This could be extracted into a dismissToast() action,
// but I'll keep it here for simplicity
if (toastId) {
addToRemoveQueue(toastId)
} else {
state.toasts.forEach((toast) => {
addToRemoveQueue(toast.id)
})
}
return {
...state,
toasts: state.toasts.map((t) =>
t.id === toastId || toastId === undefined
? {
...t,
open: false,
}
: t
),
}
}
case "REMOVE_TOAST":
if (action.toastId === undefined) {
return {
...state,
toasts: [],
}
}
return {
...state,
toasts: state.toasts.filter((t) => t.id !== action.toastId),
}
}
}
const listeners: Array<(state: State) => void> = []
let memoryState: State = { toasts: [] }
function dispatch(action: Action) {
memoryState = reducer(memoryState, action)
listeners.forEach((listener) => {
listener(memoryState)
})
}
type Toast = Omit<ToasterToast, "id">
function toast({ ...props }: Toast) {
const id = genId()
const update = (props: ToasterToast) =>
dispatch({
type: "UPDATE_TOAST",
toast: { ...props, id },
})
const dismiss = () => dispatch({ type: "DISMISS_TOAST", toastId: id })
dispatch({
type: "ADD_TOAST",
toast: {
...props,
id,
open: true,
onOpenChange: (open) => {
if (!open) dismiss()
},
},
})
return {
id: id,
dismiss,
update,
}
}
function useToast() {
const [state, setState] = React.useState<State>(memoryState)
React.useEffect(() => {
listeners.push(setState)
return () => {
const index = listeners.indexOf(setState)
if (index > -1) {
listeners.splice(index, 1)
}
}
}, [state])
return {
...state,
toast,
dismiss: (toastId?: string) => dispatch({ type: "DISMISS_TOAST", toastId }),
}
}
export { useToast, toast }

View File

@@ -1,27 +1,27 @@
import { useState } from "react";
import { TodoProject } from "../1_types";
import { PetPost } from "../1_types";
import { createInviteLink } from "jazz-react";
import QRCode from "qrcode";
import { useToast, Button } from "../basicComponents";
export function InviteButton({ list }: { list?: TodoProject }) {
export function ShareButton({ petPost }: { petPost?: PetPost }) {
const [existingInviteLink, setExistingInviteLink] = useState<string>();
const { toast } = useToast();
return (
list?.group.myRole() === "admin" && (
petPost?.group.myRole() === "admin" && (
<Button
size="sm"
className="py-0"
disabled={!list}
disabled={!petPost}
variant="outline"
onClick={async () => {
let inviteLink = existingInviteLink;
if (list && !inviteLink) {
inviteLink = createInviteLink(list, "writer");
if (petPost && !inviteLink) {
inviteLink = createInviteLink(petPost, "writer");
setExistingInviteLink(inviteLink);
}
if (inviteLink) {
@@ -39,7 +39,7 @@ export function InviteButton({ list }: { list?: TodoProject }) {
}
}}
>
Invite
Share
</Button>
)
);

View File

@@ -1,7 +1,7 @@
{
"name": "jazz-example-todo",
"private": true,
"version": "0.0.27",
"version": "0.0.32",
"type": "module",
"scripts": {
"dev": "vite",
@@ -16,8 +16,8 @@
"@types/qrcode": "^1.5.1",
"class-variance-authority": "^0.7.0",
"clsx": "^2.0.0",
"jazz-react": "^0.1.13",
"jazz-react-auth-local": "^0.1.13",
"jazz-react": "^0.2.3",
"jazz-react-auth-local": "^0.2.3",
"lucide-react": "^0.274.0",
"qrcode": "^1.5.3",
"react": "^18.2.0",

View File

@@ -4,7 +4,7 @@
"types": "src/index.ts",
"type": "module",
"license": "MIT",
"version": "0.1.12",
"version": "0.2.4",
"devDependencies": {
"@types/jest": "^29.5.3",
"@types/ws": "^8.5.5",
@@ -16,8 +16,8 @@
"typescript": "5.0.2"
},
"dependencies": {
"cojson": "^0.1.11",
"cojson-storage-sqlite": "^0.1.9",
"cojson": "^0.2.2",
"cojson-storage-sqlite": "^0.2.4",
"ws": "^8.13.0"
},
"scripts": {

View File

@@ -1,13 +1,13 @@
{
"name": "cojson-storage-sqlite",
"type": "module",
"version": "0.1.9",
"version": "0.2.4",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"license": "MIT",
"dependencies": {
"better-sqlite3": "^8.5.2",
"cojson": "^0.1.11",
"cojson": "^0.2.2",
"typescript": "^5.1.6"
},
"scripts": {

View File

@@ -60,7 +60,7 @@ export class SQLiteStorage {
done = result.done;
if (result.value) {
this.handleSyncMessage(result.value);
await this.handleSyncMessage(result.value);
}
}
})();
@@ -98,41 +98,67 @@ export class SQLiteStorage {
const db = Database(filename);
db.pragma("journal_mode = WAL");
db.prepare(
`CREATE TABLE IF NOT EXISTS transactions (
ses INTEGER,
idx INTEGER,
tx TEXT NOT NULL ,
PRIMARY KEY (ses, idx)
) WITHOUT ROWID;`
).run();
const oldVersion = (db.pragma("user_version") as [{user_version: number}])[0].user_version as number;
db.prepare(
`CREATE TABLE IF NOT EXISTS sessions (
rowID INTEGER PRIMARY KEY,
coValue INTEGER NOT NULL,
sessionID TEXT NOT NULL,
lastIdx INTEGER,
lastSignature TEXT,
UNIQUE (sessionID, coValue)
);`
).run();
console.log("DB version", oldVersion);
db.prepare(
`CREATE INDEX IF NOT EXISTS sessionsByCoValue ON sessions (coValue);`
).run();
if (oldVersion === 0) {
console.log("Migration 0 -> 1: Basic schema");
db.prepare(
`CREATE TABLE IF NOT EXISTS transactions (
ses INTEGER,
idx INTEGER,
tx TEXT NOT NULL ,
PRIMARY KEY (ses, idx)
) WITHOUT ROWID;`
).run();
db.prepare(
`CREATE TABLE IF NOT EXISTS coValues (
rowID INTEGER PRIMARY KEY,
id TEXT NOT NULL UNIQUE,
header TEXT NOT NULL UNIQUE
);`
).run();
db.prepare(
`CREATE TABLE IF NOT EXISTS sessions (
rowID INTEGER PRIMARY KEY,
coValue INTEGER NOT NULL,
sessionID TEXT NOT NULL,
lastIdx INTEGER,
lastSignature TEXT,
UNIQUE (sessionID, coValue)
);`
).run();
db.prepare(
`CREATE INDEX IF NOT EXISTS coValuesByID ON coValues (id);`
).run();
db.prepare(
`CREATE INDEX IF NOT EXISTS sessionsByCoValue ON sessions (coValue);`
).run();
db.prepare(
`CREATE TABLE IF NOT EXISTS coValues (
rowID INTEGER PRIMARY KEY,
id TEXT NOT NULL UNIQUE,
header TEXT NOT NULL UNIQUE
);`
).run();
db.prepare(
`CREATE INDEX IF NOT EXISTS coValuesByID ON coValues (id);`
).run();
db.pragma("user_version = 1");
console.log("Migration 0 -> 1: Basic schema - done");
}
if (oldVersion <= 1) {
// fix embarrassing off-by-one error for transaction indices
console.log("Migration 1 -> 2: Fix off-by-one error for transaction indices");
const txs = db.prepare(`SELECT * FROM transactions`).all() as TransactionRow[];
for (const tx of txs) {
db.prepare(`DELETE FROM transactions WHERE ses = ? AND idx = ?`).run(tx.ses, tx.idx);
tx.idx -= 1;
db.prepare(`INSERT INTO transactions (ses, idx, tx) VALUES (?, ?, ?)`).run(tx.ses, tx.idx, tx.tx);
}
db.pragma("user_version = 2");
console.log("Migration 1 -> 2: Fix off-by-one error for transaction indices - done");
}
return new SQLiteStorage(db, fromLocalNode, toLocalNode);
}
@@ -198,7 +224,7 @@ export class SQLiteStorage {
const newTxInSession = this.db
.prepare<[number, number]>(
`SELECT * FROM transactions WHERE ses = ? AND idx > ?`
`SELECT * FROM transactions WHERE ses = ? AND idx >= ?`
)
.all(sessionRow.rowID, firstNewTxIdx) as TransactionRow[];
@@ -217,7 +243,9 @@ export class SQLiteStorage {
? Object.values(newContent.new).flatMap((sessionEntry) =>
sessionEntry.newTransactions.flatMap((tx) => {
if (tx.privacy !== "trusting") return [];
return tx.changes
// TODO: avoid parsing here?
return cojsonInternals
.parseJSON(tx.changes)
.map(
(change) =>
change &&
@@ -338,7 +366,7 @@ export class SQLiteStorage {
lastSignature: msg.new[sessionID]!.lastSignature,
};
const upsertedSession = (this.db
const upsertedSession = this.db
.prepare<[number, string, number, string]>(
`INSERT INTO sessions (coValue, sessionID, lastIdx, lastSignature) VALUES (?, ?, ?, ?)
ON CONFLICT(coValue, sessionID) DO UPDATE SET lastIdx=excluded.lastIdx, lastSignature=excluded.lastSignature
@@ -349,21 +377,21 @@ export class SQLiteStorage {
sessionUpdate.sessionID,
sessionUpdate.lastIdx,
sessionUpdate.lastSignature
) as {rowID: number});
) as { rowID: number };
const sessionRowID = upsertedSession.rowID;
for (const newTransaction of actuallyNewTransactions) {
nextIdx++;
this.db
.prepare<[number, number, string]>(
`INSERT INTO transactions (ses, idx, tx) VALUES (?, ?, ?)`
.prepare<[number, number, string]>(
`INSERT INTO transactions (ses, idx, tx) VALUES (?, ?, ?)`
)
.run(
sessionRowID,
nextIdx,
JSON.stringify(newTransaction)
);
);
nextIdx++;
}
}
}

View File

@@ -5,7 +5,7 @@
"types": "dist/index.d.ts",
"type": "module",
"license": "MIT",
"version": "0.1.11",
"version": "0.2.2",
"devDependencies": {
"@types/jest": "^29.5.3",
"@typescript-eslint/eslint-plugin": "^6.2.1",
@@ -19,9 +19,8 @@
"dependencies": {
"@noble/ciphers": "^0.1.3",
"@noble/curves": "^1.1.0",
"@noble/hashes": "^1.3.1",
"@scure/base": "^1.1.1",
"fast-json-stable-stringify": "https://github.com/tirithen/fast-json-stable-stringify#7a3dcf2",
"hash-wasm": "^4.9.0",
"isomorphic-streams": "https://github.com/sgwilym/isomorphic-streams.git#aa9394781bfc92f8d7c981be7daf8af4b4cd4fae"
},
"scripts": {

View File

@@ -1,7 +1,12 @@
import { newRandomSessionID } from "./coValueCore.js";
import { cojsonReady } from "./index.js";
import { LocalNode } from "./node.js";
import { connectedPeers } from "./streamUtils.js";
beforeEach(async () => {
await cojsonReady;
});
test("Can create a node while creating a new account with profile", async () => {
const { node, accountID, accountSecret, sessionID } =
LocalNode.withNewlyCreatedAccount("Hermes Puggington");

View File

@@ -25,6 +25,7 @@ export function base64URLtoBytes(base64: string) {
}
export function bytesToBase64url(bytes: Uint8Array) {
// const before = performance.now();
const m = bytes.length;
const k = m % 3;
const n = Math.floor(m / 3) * 4 + (k && k + 1);
@@ -43,16 +44,25 @@ export function bytesToBase64url(bytes: Uint8Array) {
let base64 = decoder.decode(new Uint8Array(encoded.buffer, 0, n));
if (k === 1) base64 += "==";
if (k === 2) base64 += "=";
// const after = performance.now();
// console.log(
// "bytesToBase64url bandwidth in MB/s for length",
// (1000 * bytes.length / (after - before)) / (1024 * 1024),
// bytes.length
// );
return base64;
}
const alphabet =
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_";
const lookup = Object.fromEntries(
Array.from(alphabet).map((a, i) => [a.charCodeAt(0), i])
);
const lookup = new Uint8Array(128);
for (const [i, a] of Array.from(alphabet).entries()) {
lookup[a.charCodeAt(0)] = i;
}
lookup["=".charCodeAt(0)] = 0;
const encodeLookup = Object.fromEntries(
Array.from(alphabet).map((a, i) => [i, a.charCodeAt(0)])
);
const encodeLookup = new Uint8Array(64);
for (const [i, a] of Array.from(alphabet).entries()) {
encodeLookup[i] = a.charCodeAt(0);
}

View File

@@ -1,9 +1,14 @@
import { accountOrAgentIDfromSessionID } from "./coValueCore.js";
import { BinaryCoStream } from "./coValues/coStream.js";
import { createdNowUnique } from "./crypto.js";
import { MAX_RECOMMENDED_TX_SIZE, cojsonReady } from "./index.js";
import { LocalNode } from "./node.js";
import { randomAnonymousAccountAndSessionID } from "./testUtils.js";
beforeEach(async () => {
await cojsonReady;
});
test("Empty CoMap works", () => {
const node = new LocalNode(...randomAnonymousAccountAndSessionID());
@@ -377,11 +382,143 @@ test("Can push into BinaryCoStream", () => {
content.edit((editable) => {
editable.startBinaryStream({mimeType: "text/plain", fileName: "test.txt"}, "trusting");
expect(editable.getBinaryChunks()).toEqual({
expect(editable.getBinaryChunks(true)).toEqual({
mimeType: "text/plain",
fileName: "test.txt",
chunks: [],
finished: false,
});
editable.pushBinaryStreamChunk(new Uint8Array([1, 2, 3]), "trusting");
expect(editable.getBinaryChunks(true)).toEqual({
mimeType: "text/plain",
fileName: "test.txt",
chunks: [new Uint8Array([1, 2, 3])],
finished: false,
});
editable.pushBinaryStreamChunk(new Uint8Array([4, 5, 6]), "trusting");
expect(editable.getBinaryChunks(true)).toEqual({
mimeType: "text/plain",
fileName: "test.txt",
chunks: [new Uint8Array([1, 2, 3]), new Uint8Array([4, 5, 6])],
finished: false,
});
editable.endBinaryStream("trusting");
expect(editable.getBinaryChunks()).toEqual({
mimeType: "text/plain",
fileName: "test.txt",
chunks: [new Uint8Array([1, 2, 3]), new Uint8Array([4, 5, 6])],
finished: true,
});
});
});
test("When adding large transactions (small fraction of MAX_RECOMMENDED_TX_SIZE), we store an inbetween signature every time we reach MAX_RECOMMENDED_TX_SIZE and split up newContentSince accordingly", () => {
const node = new LocalNode(...randomAnonymousAccountAndSessionID());
const coValue = node.createCoValue({
type: "costream",
ruleset: { type: "unsafeAllowAll" },
meta: { type: "binary" },
...createdNowUnique(),
});
const content = coValue.getCurrentContent();
if (content.type !== "costream" || content.meta?.type !== "binary" || !(content instanceof BinaryCoStream)) {
throw new Error("Expected binary stream");
}
content.edit((editable) => {
editable.startBinaryStream({mimeType: "text/plain", fileName: "test.txt"}, "trusting");
});
for (let i = 0; i < 10; i++) {
const chunk = new Uint8Array(MAX_RECOMMENDED_TX_SIZE/3 + 100);
content.edit((editable) => {
editable.pushBinaryStreamChunk(chunk, "trusting");
});
}
content.edit((editable) => {
editable.endBinaryStream("trusting");
});
const sessionEntry = coValue._sessions[node.currentSessionID]!;
expect(sessionEntry.transactions.length).toEqual(12);
expect(sessionEntry.signatureAfter[0]).not.toBeDefined();
expect(sessionEntry.signatureAfter[1]).not.toBeDefined();
expect(sessionEntry.signatureAfter[2]).not.toBeDefined();
expect(sessionEntry.signatureAfter[3]).toBeDefined();
expect(sessionEntry.signatureAfter[4]).not.toBeDefined();
expect(sessionEntry.signatureAfter[5]).not.toBeDefined();
expect(sessionEntry.signatureAfter[6]).toBeDefined();
expect(sessionEntry.signatureAfter[7]).not.toBeDefined();
expect(sessionEntry.signatureAfter[8]).not.toBeDefined();
expect(sessionEntry.signatureAfter[9]).toBeDefined();
expect(sessionEntry.signatureAfter[10]).not.toBeDefined();
expect(sessionEntry.signatureAfter[11]).not.toBeDefined();
const newContent = coValue.newContentSince({id: coValue.id, header: false, sessions: {}})!;
expect(newContent.length).toEqual(5)
expect(newContent[0]!.header).toBeDefined();
expect(newContent[1]!.new[node.currentSessionID]!.lastSignature).toEqual(sessionEntry.signatureAfter[3]);
expect(newContent[2]!.new[node.currentSessionID]!.lastSignature).toEqual(sessionEntry.signatureAfter[6]);
expect(newContent[3]!.new[node.currentSessionID]!.lastSignature).toEqual(sessionEntry.signatureAfter[9]);
expect(newContent[4]!.new[node.currentSessionID]!.lastSignature).toEqual(sessionEntry.lastSignature);
});
test("When adding large transactions (bigger than MAX_RECOMMENDED_TX_SIZE), we store an inbetween signature after every large transaction and split up newContentSince accordingly", () => {
const node = new LocalNode(...randomAnonymousAccountAndSessionID());
const coValue = node.createCoValue({
type: "costream",
ruleset: { type: "unsafeAllowAll" },
meta: { type: "binary" },
...createdNowUnique(),
});
const content = coValue.getCurrentContent();
if (content.type !== "costream" || content.meta?.type !== "binary" || !(content instanceof BinaryCoStream)) {
throw new Error("Expected binary stream");
}
content.edit((editable) => {
editable.startBinaryStream({mimeType: "text/plain", fileName: "test.txt"}, "trusting");
});
const chunk = new Uint8Array(MAX_RECOMMENDED_TX_SIZE + 100);
for (let i = 0; i < 3; i++) {
content.edit((editable) => {
editable.pushBinaryStreamChunk(chunk, "trusting");
});
}
content.edit((editable) => {
editable.endBinaryStream("trusting");
});
const sessionEntry = coValue._sessions[node.currentSessionID]!;
expect(sessionEntry.transactions.length).toEqual(5);
expect(sessionEntry.signatureAfter[0]).not.toBeDefined();
expect(sessionEntry.signatureAfter[1]).toBeDefined();
expect(sessionEntry.signatureAfter[2]).toBeDefined();
expect(sessionEntry.signatureAfter[3]).toBeDefined();
expect(sessionEntry.signatureAfter[4]).not.toBeDefined();
const newContent = coValue.newContentSince({id: coValue.id, header: false, sessions: {}})!;
expect(newContent.length).toEqual(5)
expect(newContent[0]!.header).toBeDefined();
expect(newContent[1]!.new[node.currentSessionID]!.lastSignature).toEqual(sessionEntry.signatureAfter[1]);
expect(newContent[2]!.new[node.currentSessionID]!.lastSignature).toEqual(sessionEntry.signatureAfter[2]);
expect(newContent[3]!.new[node.currentSessionID]!.lastSignature).toEqual(sessionEntry.signatureAfter[3]);
expect(newContent[4]!.new[node.currentSessionID]!.lastSignature).toEqual(sessionEntry.lastSignature);
});

View File

@@ -47,7 +47,7 @@ export interface CoValue {
export interface WriteableCoValue extends CoValue {}
export type CoValueImpl =
| CoMap<{ [key: string]: JsonValue }, JsonObject | null>
| CoMap<{ [key: string]: JsonValue | undefined; }, JsonObject | null>
| CoList<JsonValue, JsonObject | null>
| CoStream<JsonValue, JsonObject | null>
| BinaryCoStream<BinaryCoStreamMeta>

View File

@@ -4,6 +4,12 @@ import { createdNowUnique, getAgentSignerSecret, newRandomAgentSecret, sign } fr
import { randomAnonymousAccountAndSessionID } from "./testUtils.js";
import { MapOpPayload } from "./coValues/coMap.js";
import { Role } from "./permissions.js";
import { cojsonReady } from "./index.js";
import { stableStringify } from "./jsonStringify.js";
beforeEach(async () => {
await cojsonReady;
});
test("Can create coValue with new agent credentials and add transaction to it", () => {
const [account, sessionID] = randomAnonymousAccountAndSessionID();
@@ -19,11 +25,11 @@ test("Can create coValue with new agent credentials and add transaction to it",
const transaction: Transaction = {
privacy: "trusting",
madeAt: Date.now(),
changes: [
changes: stableStringify([
{
hello: "world",
},
],
]),
};
const { expectedNewHash } = coValue.expectedNewHashAfter(
@@ -56,11 +62,11 @@ test("transactions with wrong signature are rejected", () => {
const transaction: Transaction = {
privacy: "trusting",
madeAt: Date.now(),
changes: [
changes: stableStringify([
{
hello: "world",
},
],
]),
};
const { expectedNewHash } = coValue.expectedNewHashAfter(
@@ -92,11 +98,11 @@ test("transactions with correctly signed, but wrong hash are rejected", () => {
const transaction: Transaction = {
privacy: "trusting",
madeAt: Date.now(),
changes: [
changes: stableStringify([
{
hello: "world",
},
],
]),
};
const { expectedNewHash } = coValue.expectedNewHashAfter(
@@ -105,11 +111,11 @@ test("transactions with correctly signed, but wrong hash are rejected", () => {
{
privacy: "trusting",
madeAt: Date.now(),
changes: [
changes: stableStringify([
{
hello: "wrong",
},
],
]),
},
]
);
@@ -149,13 +155,13 @@ test("New transactions in a group correctly update owned values, including subsc
const resignationThatWeJustLearnedAbout = {
privacy: "trusting",
madeAt: timeBeforeEdit,
changes: [
changes: stableStringify([
{
op: "set",
key: account.id,
value: "revoked"
} satisfies MapOpPayload<typeof account.id, Role>
]
])
} satisfies Transaction;
const { expectedNewHash } = group.underlyingMap.core.expectedNewHashAfter(sessionID, [

View File

@@ -14,11 +14,11 @@ import {
sign,
verify,
encryptForTransaction,
decryptForTransaction,
KeyID,
decryptKeySecret,
getAgentSignerID,
getAgentSealerID,
decryptRawForTransaction,
} from "./crypto.js";
import { JsonObject, JsonValue } from "./jsonValue.js";
import { base58 } from "@scure/base";
@@ -32,10 +32,10 @@ import { LocalNode } from "./node.js";
import { CoValueKnownState, NewContentMessage } from "./sync.js";
import { AgentID, RawCoID, SessionID, TransactionID } from "./ids.js";
import { CoList } from "./coValues/coList.js";
import {
AccountID,
GeneralizedControlledAccount,
} from "./account.js";
import { AccountID, GeneralizedControlledAccount } from "./account.js";
import { Stringified, stableStringify } from "./jsonStringify.js";
export const MAX_RECOMMENDED_TX_SIZE = 100 * 1024;
export type CoValueHeader = {
type: CoValueImpl["type"];
@@ -64,6 +64,7 @@ type SessionLog = {
transactions: Transaction[];
lastHash?: Hash;
streamingHash: StreamingHash;
signatureAfter: { [txIdx: number]: Signature | undefined };
lastSignature: Signature;
};
@@ -80,14 +81,14 @@ export type PrivateTransaction = {
export type TrustingTransaction = {
privacy: "trusting";
madeAt: number;
changes: JsonValue[];
changes: Stringified<JsonValue[]>;
};
export type Transaction = PrivateTransaction | TrustingTransaction;
export type DecryptedTransaction = {
txID: TransactionID;
changes: JsonValue[];
changes: Stringified<JsonValue[]>;
madeAt: number;
};
@@ -100,7 +101,11 @@ export class CoValueCore {
_sessions: { [key: SessionID]: SessionLog };
_cachedContent?: CoValueImpl;
listeners: Set<(content?: CoValueImpl) => void> = new Set();
_decryptionCache: {[key: Encrypted<JsonValue[], JsonValue>]: JsonValue[] | undefined} = {}
_decryptionCache: {
[key: Encrypted<JsonValue[], JsonValue>]:
| Stringified<JsonValue[]>
| undefined;
} = {};
constructor(
header: CoValueHeader,
@@ -187,10 +192,16 @@ export class CoValueCore {
return false;
}
// const beforeHash = performance.now();
const { expectedNewHash, newStreamingHash } = this.expectedNewHashAfter(
sessionID,
newTransactions
);
// const afterHash = performance.now();
// console.log(
// "Hashing took",
// afterHash - beforeHash
// );
if (givenExpectedNewHash && givenExpectedNewHash !== expectedNewHash) {
console.warn("Invalid hash", {
@@ -200,25 +211,159 @@ export class CoValueCore {
return false;
}
// const beforeVerify = performance.now();
if (!verify(newSignature, expectedNewHash, signerID)) {
console.warn(
"Invalid signature",
"Invalid signature in",
this.id,
newSignature,
expectedNewHash,
signerID
);
return false;
}
// const afterVerify = performance.now();
// console.log(
// "Verify took",
// afterVerify - beforeVerify
// );
this.doAddTransactions(
sessionID,
newTransactions,
newSignature,
expectedNewHash,
newStreamingHash
);
return true;
}
async tryAddTransactionsAsync(
sessionID: SessionID,
newTransactions: Transaction[],
givenExpectedNewHash: Hash | undefined,
newSignature: Signature
): Promise<boolean> {
const signerID = getAgentSignerID(
this.node.resolveAccountAgent(
accountOrAgentIDfromSessionID(sessionID),
"Expected to know signer of transaction"
)
);
if (!signerID) {
console.warn(
"Unknown agent",
accountOrAgentIDfromSessionID(sessionID)
);
return false;
}
const nTxBefore = this.sessions[sessionID]?.transactions.length ?? 0;
// const beforeHash = performance.now();
const { expectedNewHash, newStreamingHash } =
await this.expectedNewHashAfterAsync(sessionID, newTransactions);
// const afterHash = performance.now();
// console.log(
// "Hashing took",
// afterHash - beforeHash
// );
const nTxAfter = this.sessions[sessionID]?.transactions.length ?? 0;
if (nTxAfter !== nTxBefore) {
const newTransactionLengthBefore = newTransactions.length;
newTransactions = newTransactions.slice(nTxAfter - nTxBefore);
console.warn("Transactions changed while async hashing", {
nTxBefore,
nTxAfter,
newTransactionLengthBefore,
remainingNewTransactions: newTransactions.length,
});
}
if (givenExpectedNewHash && givenExpectedNewHash !== expectedNewHash) {
console.warn("Invalid hash", {
expectedNewHash,
givenExpectedNewHash,
});
return false;
}
// const beforeVerify = performance.now();
if (!verify(newSignature, expectedNewHash, signerID)) {
console.warn(
"Invalid signature in",
this.id,
newSignature,
expectedNewHash,
signerID
);
return false;
}
// const afterVerify = performance.now();
// console.log(
// "Verify took",
// afterVerify - beforeVerify
// );
this.doAddTransactions(
sessionID,
newTransactions,
newSignature,
expectedNewHash,
newStreamingHash
);
return true;
}
private doAddTransactions(
sessionID: SessionID,
newTransactions: Transaction[],
newSignature: Signature,
expectedNewHash: Hash,
newStreamingHash: StreamingHash
) {
const transactions = this.sessions[sessionID]?.transactions ?? [];
transactions.push(...newTransactions);
const signatureAfter = this.sessions[sessionID]?.signatureAfter ?? {};
const lastInbetweenSignatureIdx = Object.keys(signatureAfter).reduce(
(max, idx) => (parseInt(idx) > max ? parseInt(idx) : max),
-1
);
const sizeOfTxsSinceLastInbetweenSignature = transactions
.slice(lastInbetweenSignatureIdx + 1)
.reduce(
(sum, tx) =>
sum +
(tx.privacy === "private"
? tx.encryptedChanges.length
: tx.changes.length),
0
);
if (sizeOfTxsSinceLastInbetweenSignature > 100 * 1024) {
// console.log(
// "Saving inbetween signature for tx ",
// sessionID,
// transactions.length - 1,
// sizeOfTxsSinceLastInbetweenSignature
// );
signatureAfter[transactions.length - 1] = newSignature;
}
this._sessions[sessionID] = {
transactions,
lastHash: expectedNewHash,
streamingHash: newStreamingHash,
lastSignature: newSignature,
signatureAfter: signatureAfter,
};
this._cachedContent = undefined;
@@ -229,8 +374,6 @@ export class CoValueCore {
listener(content);
}
}
return true;
}
subscribe(listener: (content?: CoValueImpl) => void): () => void {
@@ -261,6 +404,32 @@ export class CoValueCore {
};
}
async expectedNewHashAfterAsync(
sessionID: SessionID,
newTransactions: Transaction[]
): Promise<{ expectedNewHash: Hash; newStreamingHash: StreamingHash }> {
const streamingHash =
this.sessions[sessionID]?.streamingHash.clone() ??
new StreamingHash();
let before = performance.now();
for (const transaction of newTransactions) {
streamingHash.update(transaction);
const after = performance.now();
if (after - before > 1) {
// console.log("Hashing blocked for", after - before);
await new Promise((resolve) => setTimeout(resolve, 0));
before = performance.now();
}
}
const newStreamingHash = streamingHash.clone();
return {
expectedNewHash: streamingHash.digest(),
newStreamingHash,
};
}
makeTransaction(
changes: JsonValue[],
privacy: "private" | "trusting"
@@ -278,20 +447,24 @@ export class CoValueCore {
);
}
const encrypted = encryptForTransaction(changes, keySecret, {
in: this.id,
tx: this.nextTransactionID(),
});
this._decryptionCache[encrypted] = stableStringify(changes);
transaction = {
privacy: "private",
madeAt,
keyUsed: keyID,
encryptedChanges: encryptForTransaction(changes, keySecret, {
in: this.id,
tx: this.nextTransactionID(),
}),
encryptedChanges: encrypted,
};
} else {
transaction = {
privacy: "trusting",
madeAt,
changes,
changes: stableStringify(changes),
};
}
@@ -361,10 +534,11 @@ export class CoValueCore {
if (!readKey) {
return undefined;
} else {
let decrytedChanges = this._decryptionCache[tx.encryptedChanges];
let decrytedChanges =
this._decryptionCache[tx.encryptedChanges];
if (!decrytedChanges) {
decrytedChanges = decryptForTransaction(
decrytedChanges = decryptRawForTransaction(
tx.encryptedChanges,
readKey,
{
@@ -372,7 +546,8 @@ export class CoValueCore {
tx: txID,
}
);
this._decryptionCache[tx.encryptedChanges] = decrytedChanges;
this._decryptionCache[tx.encryptedChanges] =
decrytedChanges;
}
if (!decrytedChanges) {
@@ -544,47 +719,95 @@ export class CoValueCore {
newContentSince(
knownState: CoValueKnownState | undefined
): NewContentMessage | undefined {
const newContent: NewContentMessage = {
): NewContentMessage[] | undefined {
let currentPiece: NewContentMessage = {
action: "content",
id: this.id,
header: knownState?.header ? undefined : this.header,
new: Object.fromEntries(
Object.entries(this.sessions)
.map(([sessionID, log]) => {
const newTransactions = log.transactions.slice(
knownState?.sessions[sessionID as SessionID] || 0
);
if (
newTransactions.length === 0 ||
!log.lastHash ||
!log.lastSignature
) {
return undefined;
}
return [
sessionID,
{
after:
knownState?.sessions[
sessionID as SessionID
] || 0,
newTransactions,
lastSignature: log.lastSignature,
},
];
})
.filter((x): x is Exclude<typeof x, undefined> => !!x)
),
new: {},
};
if (!newContent.header && Object.keys(newContent.new).length === 0) {
const pieces = [currentPiece];
const sentState: CoValueKnownState["sessions"] = {
...knownState?.sessions,
};
let newTxsWereAdded = true;
let pieceSize = 0;
while (newTxsWereAdded) {
newTxsWereAdded = false;
for (const [sessionID, log] of Object.entries(this.sessions) as [
SessionID,
SessionLog
][]) {
const nextKnownSignatureIdx = Object.keys(log.signatureAfter)
.map(Number)
.sort((a, b) => a - b)
.find((idx) => idx >= (sentState[sessionID] ?? -1));
const txsToAdd = log.transactions.slice(
sentState[sessionID] ?? 0,
nextKnownSignatureIdx === undefined
? undefined
: nextKnownSignatureIdx + 1
);
if (txsToAdd.length === 0) continue;
newTxsWereAdded = true;
const oldPieceSize = pieceSize;
pieceSize += txsToAdd.reduce(
(sum, tx) =>
sum +
(tx.privacy === "private"
? tx.encryptedChanges.length
: tx.changes.length),
0
);
if (pieceSize >= MAX_RECOMMENDED_TX_SIZE) {
currentPiece = {
action: "content",
id: this.id,
header: undefined,
new: {},
};
pieces.push(currentPiece);
pieceSize = pieceSize - oldPieceSize;
}
let sessionEntry = currentPiece.new[sessionID];
if (!sessionEntry) {
sessionEntry = {
after: sentState[sessionID] ?? 0,
newTransactions: [],
lastSignature: "WILL_BE_REPLACED" as Signature
};
currentPiece.new[sessionID] = sessionEntry;
}
sessionEntry.newTransactions.push(...txsToAdd);
sessionEntry.lastSignature = nextKnownSignatureIdx === undefined
? log.lastSignature!
: log.signatureAfter[nextKnownSignatureIdx]!
sentState[sessionID] =
(sentState[sessionID] || 0) + txsToAdd.length;
}
}
const piecesWithContent = pieces.filter(
(piece) => Object.keys(piece.new).length > 0 || piece.header
);
if (piecesWithContent.length === 0) {
return undefined;
}
return newContent;
return piecesWithContent;
}
getDependedOnCoValues(): RawCoID[] {

View File

@@ -4,6 +4,7 @@ import { CoValueCore, accountOrAgentIDfromSessionID } from "../coValueCore.js";
import { SessionID, TransactionID } from "../ids.js";
import { Group } from "../group.js";
import { AccountID, isAccountID } from "../account.js";
import { parseJSON } from "../jsonStringify.js";
type OpID = TransactionID & { changeIdx: number };
@@ -98,7 +99,7 @@ export class CoList<T extends JsonValue, Meta extends JsonObject | null = null>
changes,
madeAt,
} of this.core.getValidSortedTransactions()) {
for (const [changeIdx, changeUntyped] of changes.entries()) {
for (const [changeIdx, changeUntyped] of parseJSON(changes).entries()) {
const change = changeUntyped as ListOpPayload<T>;
if (change.op === "pre" || change.op === "app") {

View File

@@ -4,15 +4,16 @@ import { CoID, ReadableCoValue, WriteableCoValue } from '../coValue.js';
import { CoValueCore, accountOrAgentIDfromSessionID } from '../coValueCore.js';
import { AccountID, isAccountID } from '../account.js';
import { Group } from '../group.js';
import { parseJSON } from '../jsonStringify.js';
type MapOp<K extends string, V extends JsonValue> = {
type MapOp<K extends string, V extends JsonValue | undefined> = {
txID: TransactionID;
madeAt: number;
changeIdx: number;
} & MapOpPayload<K, V>;
// TODO: add after TransactionID[] for conflicts/ordering
export type MapOpPayload<K extends string, V extends JsonValue> = {
export type MapOpPayload<K extends string, V extends JsonValue | undefined> = {
op: "set";
key: K;
value: V;
@@ -22,18 +23,16 @@ export type MapOpPayload<K extends string, V extends JsonValue> = {
key: K;
};
export type MapK<M extends { [key: string]: JsonValue; }> = keyof M & string;
export type MapV<M extends { [key: string]: JsonValue; }> = M[MapK<M>];
export type MapM<M extends { [key: string]: JsonValue; }> = {
[KK in MapK<M>]: M[KK];
}
export type MapK<M extends { [key: string]: JsonValue | undefined; }> = keyof M & string;
export type MapV<M extends { [key: string]: JsonValue | undefined; }> = M[MapK<M>];
/** A collaborative map with precise shape `M` and optional static metadata `Meta` */
export class CoMap<
M extends { [key: string]: JsonValue; },
M extends { [key: string]: JsonValue | undefined; },
Meta extends JsonObject | null = null,
> implements ReadableCoValue {
id: CoID<CoMap<MapM<M>, Meta>>;
id: CoID<CoMap<M, Meta>>;
type = "comap" as const;
core: CoValueCore;
/** @internal */
@@ -43,7 +42,7 @@ export class CoMap<
/** @internal */
constructor(core: CoValueCore) {
this.id = core.id as CoID<CoMap<MapM<M>, Meta>>;
this.id = core.id as CoID<CoMap<M, Meta>>;
this.core = core;
this.ops = {};
@@ -64,7 +63,7 @@ export class CoMap<
for (const { txID, changes, madeAt } of this.core.getValidSortedTransactions()) {
for (const [changeIdx, changeUntyped] of (
changes
parseJSON(changes)
).entries()) {
const change = changeUntyped as MapOpPayload<MapK<M>, MapV<M>>;
let entries = this.ops[change.key];
@@ -207,7 +206,7 @@ export class CoMap<
}
export class WriteableCoMap<
M extends { [key: string]: JsonValue; },
M extends { [key: string]: JsonValue | undefined; },
Meta extends JsonObject | null = null,
> extends CoMap<M, Meta> implements WriteableCoValue {
/** @internal */

View File

@@ -1,9 +1,12 @@
import { JsonObject, JsonValue } from "../jsonValue.js";
import { CoID, ReadableCoValue, WriteableCoValue } from "../coValue.js";
import { CoValueCore } from "../coValueCore.js";
import { CoValueCore, accountOrAgentIDfromSessionID } from "../coValueCore.js";
import { Group } from "../group.js";
import { SessionID } from "../ids.js";
import { base64URLtoBytes, bytesToBase64url } from "../base64url.js";
import { AccountID } from "../index.js";
import { isAccountID } from "../account.js";
import { parseJSON } from "../jsonStringify.js";
export type BinaryChunkInfo = {
mimeType: string;
@@ -17,7 +20,7 @@ export type BinaryStreamStart = {
export type BinaryStreamChunk = {
type: "chunk";
chunk: `U${string}`;
chunk: `binary_U${string}`;
};
export type BinaryStreamEnd = {
@@ -40,7 +43,7 @@ export class CoStream<
type = "costream" as const;
core: CoValueCore;
items: {
[key: SessionID]: T[];
[key: SessionID]: {item: T, madeAt: number}[];
};
constructor(core: CoValueCore) {
@@ -64,16 +67,17 @@ export class CoStream<
for (const {
txID,
madeAt,
changes,
} of this.core.getValidSortedTransactions()) {
for (const changeUntyped of changes) {
for (const changeUntyped of parseJSON(changes)) {
const change = changeUntyped as T;
let entries = this.items[txID.sessionID];
if (!entries) {
entries = [];
this.items[txID.sessionID] = entries;
}
entries.push(change);
entries.push({item: change, madeAt});
}
}
}
@@ -87,13 +91,57 @@ export class CoStream<
);
}
return Object.values(this.items)[0];
return Object.values(this.items)[0]?.map(item => item.item);
}
getLastItemsPerAccount(): {[account: AccountID]: T | undefined} {
const result: {[account: AccountID]: {item: T, madeAt: number} | undefined} = {};
for (const [sessionID, items] of Object.entries(this.items)) {
const account = accountOrAgentIDfromSessionID(sessionID as SessionID);
if (!isAccountID(account)) continue;
if (items.length > 0) {
const lastItemOfSession = items[items.length - 1]!;
if (!result[account] || lastItemOfSession.madeAt > result[account]!.madeAt) {
result[account] = lastItemOfSession;
}
}
}
return Object.fromEntries(Object.entries(result).map(([account, item]) =>
[account, item?.item]
));
}
getLastItemFrom(account: AccountID): T | undefined {
let lastItem: {item: T, madeAt: number} | undefined;
for (const [sessionID, items] of Object.entries(this.items)) {
if (sessionID.startsWith(account)) {
if (items.length > 0) {
const lastItemOfSession = items[items.length - 1]!;
if (!lastItem || lastItemOfSession.madeAt > lastItem.madeAt) {
lastItem = lastItemOfSession;
}
}
}
}
return lastItem?.item;
}
getLastItemFromMe(): T | undefined {
const myAccountID = this.core.node.account.id;
if (!isAccountID(myAccountID)) return undefined;
return this.getLastItemFrom(myAccountID);
}
toJSON(): {
[key: SessionID]: T[];
} {
return this.items;
return Object.fromEntries(Object.entries(this.items).map(([sessionID, items]) =>
[sessionID, items.map(item => item.item)]
));
}
subscribe(listener: (coMap: CoStream<T, Meta>) => void): () => void {
@@ -111,6 +159,8 @@ export class CoStream<
}
}
const binary_U_prefixLength = 8; // "binary_U".length;
export class BinaryCoStream<
Meta extends BinaryCoStreamMeta = { type: "binary" }
>
@@ -119,9 +169,10 @@ export class BinaryCoStream<
{
id!: CoID<BinaryCoStream<Meta>>;
getBinaryChunks():
getBinaryChunks(allowUnfinished?: boolean):
| (BinaryChunkInfo & { chunks: Uint8Array[]; finished: boolean })
| undefined {
// const before = performance.now();
const items = this.getSingleStream();
if (!items) return;
@@ -133,17 +184,19 @@ export class BinaryCoStream<
return;
}
const end = items[items.length - 1];
if (end?.type !== "end" && !allowUnfinished) return;
const chunks: Uint8Array[] = [];
let finished = false;
// let totalLength = 0;
for (const item of items.slice(1)) {
if (item.type === "end") {
return {
mimeType: start.mimeType,
fileName: start.fileName,
totalSizeBytes: start.totalSizeBytes,
chunks,
finished: true,
};
finished = true;
break;
}
if (item.type !== "chunk") {
@@ -151,15 +204,25 @@ export class BinaryCoStream<
return undefined;
}
chunks.push(base64URLtoBytes(item.chunk.slice(1)));
const chunk = base64URLtoBytes(
item.chunk.slice(binary_U_prefixLength)
);
// totalLength += chunk.length;
chunks.push(chunk);
}
// const after = performance.now();
// console.log(
// "getBinaryChunks bandwidth in MB/s",
// (1000 * totalLength) / (after - before) / (1024 * 1024)
// );
return {
mimeType: start.mimeType,
fileName: start.fileName,
totalSizeBytes: start.totalSizeBytes,
chunks,
finished: false,
finished,
};
}
@@ -206,10 +269,7 @@ export class WriteableBinaryCoStream<
}
/** @internal */
push(
item: BinaryStreamItem,
privacy: "private" | "trusting" = "private"
) {
push(item: BinaryStreamItem, privacy: "private" | "trusting" = "private") {
WriteableCoStream.prototype.push.call(this, item, privacy);
}
@@ -230,13 +290,19 @@ export class WriteableBinaryCoStream<
chunk: Uint8Array,
privacy: "private" | "trusting" = "private"
) {
// const before = performance.now();
this.push(
{
type: "chunk",
chunk: `U${bytesToBase64url(chunk)}`,
chunk: `binary_U${bytesToBase64url(chunk)}`,
} satisfies BinaryStreamChunk,
privacy
);
// const after = performance.now();
// console.log(
// "pushBinaryStreamChunk bandwidth in MB/s",
// (1000 * chunk.length) / (after - before) / (1024 * 1024)
// );
}
endBinaryStream(privacy: "private" | "trusting" = "private") {

View File

@@ -21,6 +21,11 @@ import { xsalsa20_poly1305 } from "@noble/ciphers/salsa";
import { blake3 } from "@noble/hashes/blake3";
import stableStringify from "fast-json-stable-stringify";
import { SessionID } from './ids.js';
import { cojsonReady } from './index.js';
beforeEach(async () => {
await cojsonReady;
});
test("Signatures round-trip and use stable stringify", () => {
const data = { b: "world", a: "hello" };

View File

@@ -2,12 +2,39 @@ import { ed25519, x25519 } from "@noble/curves/ed25519";
import { xsalsa20_poly1305, xsalsa20 } from "@noble/ciphers/salsa";
import { JsonValue } from "./jsonValue.js";
import { base58 } from "@scure/base";
import stableStringify from "fast-json-stable-stringify";
import { blake3 } from "@noble/hashes/blake3";
import { randomBytes } from "@noble/ciphers/webcrypto/utils";
import { AgentID, RawCoID, TransactionID } from "./ids.js";
import { base64URLtoBytes, bytesToBase64url } from "./base64url.js";
import { createBLAKE3 } from 'hash-wasm';
import { Stringified, parseJSON, stableStringify } from "./jsonStringify.js";
let blake3Instance: Awaited<ReturnType<typeof createBLAKE3>>;
let blake3HashOnce: (data: Uint8Array) => Uint8Array;
let blake3HashOnceWithContext: (data: Uint8Array, {context}: {context: Uint8Array}) => Uint8Array;
let blake3incrementalUpdateSLOW_WITH_DEVTOOLS: (state: Uint8Array, data: Uint8Array) => Uint8Array;
let blake3digestForState: (state: Uint8Array) => Uint8Array;
export const cryptoReady = new Promise<void>((resolve) => {
createBLAKE3().then(bl3 => {
blake3Instance = bl3;
blake3HashOnce = (data) => {
return bl3.init().update(data).digest('binary');
}
blake3HashOnceWithContext = (data, {context}) => {
return bl3.init().update(context).update(data).digest('binary');
}
blake3incrementalUpdateSLOW_WITH_DEVTOOLS = (state, data) => {
bl3.load(state).update(data);
return bl3.save();
}
blake3digestForState = (state) => {
return bl3.load(state).digest('binary');
}
resolve();
})
});
export type SignerSecret = `signerSecret_z${string}`;
export type SignerID = `signer_z${string}`;
export type Signature = `signature_z${string}`;
@@ -128,7 +155,7 @@ export function seal<T extends JsonValue>(
to: SealerID,
nOnceMaterial: { in: RawCoID; tx: TransactionID }
): Sealed<T> {
const nOnce = blake3(
const nOnce = blake3HashOnce(
textEncoder.encode(stableStringify(nOnceMaterial))
).slice(0, 24);
@@ -153,7 +180,7 @@ export function unseal<T extends JsonValue>(
from: SealerID,
nOnceMaterial: { in: RawCoID; tx: TransactionID }
): T | undefined {
const nOnce = blake3(
const nOnce = blake3HashOnce(
textEncoder.encode(stableStringify(nOnceMaterial))
).slice(0, 24);
@@ -181,28 +208,32 @@ export type Hash = `hash_z${string}`;
export function secureHash(value: JsonValue): Hash {
return `hash_z${base58.encode(
blake3(textEncoder.encode(stableStringify(value)))
blake3HashOnce(textEncoder.encode(stableStringify(value)))
)}`;
}
export class StreamingHash {
state: ReturnType<typeof blake3.create>;
state: Uint8Array;
constructor(fromClone?: ReturnType<typeof blake3.create>) {
this.state = fromClone || blake3.create({});
constructor(fromClone?: Uint8Array) {
this.state = fromClone || blake3Instance.init().save();
}
update(value: JsonValue) {
this.state.update(textEncoder.encode(stableStringify(value)));
const encoded = textEncoder.encode(stableStringify(value))
// const before = performance.now();
this.state = blake3incrementalUpdateSLOW_WITH_DEVTOOLS(this.state, encoded);
// const after = performance.now();
// console.log(`Hashing throughput in MB/s`, 1000 * (encoded.length / (after - before)) / (1024 * 1024));
}
digest(): Hash {
const hash = this.state.digest();
const hash = blake3digestForState(this.state);
return `hash_z${base58.encode(hash)}`;
}
clone(): StreamingHash {
return new StreamingHash(this.state.clone());
return new StreamingHash(new Uint8Array(this.state));
}
}
@@ -211,7 +242,7 @@ export const shortHashLength = 19;
export function shortHash(value: JsonValue): ShortHash {
return `shortHash_z${base58.encode(
blake3(textEncoder.encode(stableStringify(value))).slice(
blake3HashOnce(textEncoder.encode(stableStringify(value))).slice(
0,
shortHashLength
)
@@ -241,7 +272,7 @@ function encrypt<T extends JsonValue, N extends JsonValue>(
const keySecretBytes = base58.decode(
keySecret.substring("keySecret_z".length)
);
const nOnce = blake3(
const nOnce = blake3HashOnce(
textEncoder.encode(stableStringify(nOnceMaterial))
).slice(0, 24);
@@ -285,15 +316,15 @@ export function encryptKeySecret(keys: {
};
}
function decrypt<T extends JsonValue, N extends JsonValue>(
function decryptRaw<T extends JsonValue, N extends JsonValue>(
encrypted: Encrypted<T, N>,
keySecret: KeySecret,
nOnceMaterial: N
): T | undefined {
): Stringified<T> {
const keySecretBytes = base58.decode(
keySecret.substring("keySecret_z".length)
);
const nOnce = blake3(
const nOnce = blake3HashOnce(
textEncoder.encode(stableStringify(nOnceMaterial))
).slice(0, 24);
@@ -302,13 +333,31 @@ function decrypt<T extends JsonValue, N extends JsonValue>(
);
const plaintext = xsalsa20(keySecretBytes, nOnce, ciphertext);
return textDecoder.decode(plaintext) as Stringified<T>;
}
function decrypt<T extends JsonValue, N extends JsonValue>(
encrypted: Encrypted<T, N>,
keySecret: KeySecret,
nOnceMaterial: N
): T | undefined {
try {
return JSON.parse(textDecoder.decode(plaintext));
return parseJSON(decryptRaw(encrypted, keySecret, nOnceMaterial));
} catch (e) {
console.error("Decryption error", e)
return undefined;
}
}
export function decryptRawForTransaction<T extends JsonValue>(
encrypted: Encrypted<T, { in: RawCoID; tx: TransactionID }>,
keySecret: KeySecret,
nOnceMaterial: { in: RawCoID; tx: TransactionID }
): Stringified<T> | undefined {
return decryptRaw(encrypted, keySecret, nOnceMaterial);
}
export function decryptForTransaction<T extends JsonValue>(
encrypted: Encrypted<T, { in: RawCoID; tx: TransactionID }>,
keySecret: KeySecret,
@@ -365,11 +414,11 @@ export function agentSecretFromSecretSeed(secretSeed: Uint8Array): AgentSecret {
}
return `sealerSecret_z${base58.encode(
blake3(secretSeed, {
blake3HashOnceWithContext(secretSeed, {
context: textEncoder.encode("seal"),
})
)}/signerSecret_z${base58.encode(
blake3(secretSeed, {
blake3HashOnceWithContext(secretSeed, {
context: textEncoder.encode("sign"),
})
)}`;

View File

@@ -1,6 +1,10 @@
import { LocalNode, CoMap, CoList, CoStream, BinaryCoStream } from "./index";
import { LocalNode, CoMap, CoList, CoStream, BinaryCoStream, cojsonReady } from "./index";
import { randomAnonymousAccountAndSessionID } from "./testUtils";
beforeEach(async () => {
await cojsonReady;
});
test("Can create a CoMap in a group", () => {
const node = new LocalNode(...randomAnonymousAccountAndSessionID());

View File

@@ -238,7 +238,7 @@ export class Group {
/** Creates a new `CoMap` within this group, with the specified specialized
* `CoMap` type `M` and optional static metadata. */
createMap<M extends CoMap<{ [key: string]: JsonValue }, JsonObject | null>>(
createMap<M extends CoMap<{ [key: string]: JsonValue | undefined; }, JsonObject | null>>(
meta?: M["meta"]
): M {
return this.node

View File

@@ -1,4 +1,4 @@
import { CoValueCore, newRandomSessionID } from "./coValueCore.js";
import { CoValueCore, newRandomSessionID, MAX_RECOMMENDED_TX_SIZE } from "./coValueCore.js";
import { LocalNode } from "./node.js";
import type { CoValue, ReadableCoValue } from "./coValue.js";
import { CoMap, WriteableCoMap } from "./coValues/coMap.js";
@@ -18,12 +18,14 @@ import {
agentSecretFromSecretSeed,
secretSeedLength,
shortHashLength,
cryptoReady
} from "./crypto.js";
import { connectedPeers } from "./streamUtils.js";
import { AnonymousControlledAccount, ControlledAccount } from "./account.js";
import { rawCoIDtoBytes, rawCoIDfromBytes } from "./ids.js";
import { Group, expectGroupContent } from "./group.js";
import { base64URLtoBytes, bytesToBase64url } from "./base64url.js";
import { parseJSON } from "./jsonStringify.js";
import type { SessionID, AgentID } from "./ids.js";
import type { CoID, CoValueImpl } from "./coValue.js";
@@ -33,6 +35,7 @@ import type { SyncMessage, Peer } from "./sync.js";
import type { AgentSecret } from "./crypto.js";
import type { AccountID, Profile } from "./account.js";
import type { InviteSecret } from "./group.js";
import type * as Media from "./media.js";
type Value = JsonValue | CoValueImpl;
@@ -52,7 +55,8 @@ export const cojsonInternals = {
shortHashLength,
expectGroupContent,
base64URLtoBytes,
bytesToBase64url
bytesToBase64url,
parseJSON
};
export {
@@ -69,6 +73,8 @@ export {
CoValueCore,
AnonymousControlledAccount,
ControlledAccount,
cryptoReady as cojsonReady,
MAX_RECOMMENDED_TX_SIZE
};
export type {
@@ -88,6 +94,7 @@ export type {
AgentSecret,
InviteSecret,
SyncMessage,
Media
};
// eslint-disable-next-line @typescript-eslint/no-namespace

View File

@@ -0,0 +1,66 @@
// adapted from fast-json-stable-stringify (https://github.com/epoberezkin/fast-json-stable-stringify)
export type Stringified<T> = string & { __type: T };
export function stableStringify<T>(data: T): Stringified<T>
export function stableStringify(data: undefined): undefined
export function stableStringify<T>(data: T | undefined): Stringified<T> | undefined {
const cycles = false;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const seen: any[] = [];
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let node = data as any;
if (node && node.toJSON && typeof node.toJSON === "function") {
node = node.toJSON();
}
if (node === undefined) return;
if (typeof node == "number")
return (isFinite(node) ? "" + node : "null") as Stringified<T>;
if (typeof node !== "object") {
if (
typeof node === "string" &&
(node.startsWith("encrypted_U") || node.startsWith("binary_U"))
) {
return `"${node}"` as Stringified<T>;
}
return JSON.stringify(node) as Stringified<T>;
}
let i, out;
if (Array.isArray(node)) {
out = "[";
for (i = 0; i < node.length; i++) {
if (i) out += ",";
out += stableStringify(node[i]) || "null";
}
return (out + "]") as Stringified<T>;
}
if (node === null) return "null" as Stringified<T>;
if (seen.indexOf(node) !== -1) {
if (cycles) return JSON.stringify("__cycle__") as Stringified<T>;
throw new TypeError("Converting circular structure to JSON");
}
const seenIndex = seen.push(node) - 1;
const keys = Object.keys(node).sort();
out = "";
for (i = 0; i < keys.length; i++) {
const key = keys[i]!;
const value = stableStringify(node[key]);
if (!value) continue;
if (out) out += ",";
out += JSON.stringify(key) + ":" + value;
}
seen.splice(seenIndex, 1);
return ("{" + out + "}") as Stringified<T>;
}
export function parseJSON<T>(json: Stringified<T>): T {
return JSON.parse(json);
}

View File

@@ -3,4 +3,4 @@ import { RawCoID } from './ids.js';
export type JsonAtom = string | number | boolean | null;
export type JsonValue = JsonAtom | JsonArray | JsonObject | RawCoID;
export type JsonArray = JsonValue[];
export type JsonObject = { [key: string]: JsonValue; };
export type JsonObject = { [key: string]: JsonValue | undefined; };

View File

@@ -0,0 +1,9 @@
import { CoMap } from './coValues/coMap.js'
import { CoID } from './coValue.js'
import { BinaryCoStream } from './coValues/coStream.js'
export type ImageDefinition = CoMap<{
originalSize: [number, number];
placeholderDataURL?: string;
[res: `${number}x${number}`]: CoID<BinaryCoStream>;
}>;

View File

@@ -208,7 +208,7 @@ export class LocalNode {
reject(
new Error("Couldn't find invite before timeout")
),
1000
2000
);
});

View File

@@ -17,7 +17,11 @@ import {
groupWithTwoAdmins,
groupWithTwoAdminsHighLevel,
} from "./testUtils.js";
import { AnonymousControlledAccount } from "./index.js";
import { AnonymousControlledAccount, cojsonReady } from "./index.js";
beforeEach(async () => {
await cojsonReady;
});
test("Initial admin can add another admin to a group", () => {
groupWithTwoAdmins();

View File

@@ -15,6 +15,7 @@ import {
AccountID,
Profile,
} from "./account.js";
import { parseJSON } from "./jsonStringify.js";
export type PermissionsDef =
| { type: "group"; initialAdmin: AccountID | AgentID }
@@ -76,11 +77,13 @@ export function determineValidTransactions(
// console.log("before", { memberState, validTransactions });
const transactor = accountOrAgentIDfromSessionID(sessionID);
const change = tx.changes[0] as
const changes = parseJSON(tx.changes)
const change = changes[0] as
| MapOpPayload<AccountID | AgentID, Role>
| MapOpPayload<"readKey", JsonValue>
| MapOpPayload<"profile", CoID<Profile>>;
if (tx.changes.length !== 1) {
if (changes.length !== 1) {
console.warn("Group transaction must have exactly one change");
continue;
}

View File

@@ -34,7 +34,14 @@ export function connectedPeers(
trace &&
console.debug(
`${peer2id} -> ${peer1id}`,
JSON.stringify(chunk, null, 2)
JSON.stringify(
chunk,
(k, v) =>
(k === "changes" || k === "encryptedChanges")
? v.slice(0, 20) + "..."
: v,
2
)
);
controller.enqueue(chunk);
},
@@ -52,7 +59,14 @@ export function connectedPeers(
trace &&
console.debug(
`${peer1id} -> ${peer2id}`,
JSON.stringify(chunk, null, 2)
JSON.stringify(
chunk,
(k, v) =>
(k === "changes" || k === "encryptedChanges")
? v.slice(0, 20) + "..."
: v,
2
)
);
controller.enqueue(chunk);
},
@@ -102,16 +116,22 @@ export function newStreamPair<T>(): [ReadableStream<T>, WritableStream<T>] {
},
});
let lastWritePromise = Promise.resolve();
const writable = new WritableStream<T>({
async write(chunk) {
const enqueue = await enqueuePromise;
if (readerClosed) {
throw new Error("Reader closed");
} else {
// make sure write resolves before corresponding read
setTimeout(() => {
enqueue(chunk);
})
// make sure write resolves before corresponding read, but make sure writes are still in order
await lastWritePromise;
lastWritePromise = new Promise((resolve) => {
setTimeout(() => {
enqueue(chunk);
resolve();
});
});
}
},
async abort(reason) {

View File

@@ -1,23 +1,21 @@
import { newRandomSessionID } from "./coValueCore.js";
import { LocalNode } from "./node.js";
import { Peer, PeerID, SyncMessage } from "./sync.js";
import { SyncMessage } from "./sync.js";
import { expectMap } from "./coValue.js";
import { MapOpPayload } from "./coValues/coMap.js";
import { Group } from "./group.js";
import {
ReadableStream,
WritableStream,
TransformStream,
} from "isomorphic-streams";
import {
randomAnonymousAccountAndSessionID,
shouldNotResolve,
} from "./testUtils.js";
import {
connectedPeers,
newStreamPair
} from "./streamUtils.js";
import { connectedPeers, newStreamPair } from "./streamUtils.js";
import { AccountID } from "./account.js";
import { cojsonReady } from "./index.js";
import { stableStringify } from "./jsonStringify.js";
beforeEach(async () => {
await cojsonReady;
});
test("Node replies with initial tx and header to empty subscribe", async () => {
const [admin, session] = randomAnonymousAccountAndSessionID();
@@ -84,13 +82,13 @@ test("Node replies with initial tx and header to empty subscribe", async () => {
privacy: "trusting" as const,
madeAt: map.core.sessions[node.currentSessionID]!
.transactions[0]!.madeAt,
changes: [
changes: stableStringify([
{
op: "set",
key: "hello",
value: "world",
} satisfies MapOpPayload<string, string>,
],
]),
},
],
lastSignature:
@@ -162,13 +160,13 @@ test("Node replies with only new tx to subscribe with some known state", async (
privacy: "trusting" as const,
madeAt: map.core.sessions[node.currentSessionID]!
.transactions[1]!.madeAt,
changes: [
changes: stableStringify([
{
op: "set",
key: "goodbye",
value: "world",
} satisfies MapOpPayload<string, string>,
],
]),
},
],
lastSignature:
@@ -251,13 +249,13 @@ test("After subscribing, node sends own known state and new txs to peer", async
privacy: "trusting" as const,
madeAt: map.core.sessions[node.currentSessionID]!
.transactions[0]!.madeAt,
changes: [
changes: stableStringify([
{
op: "set",
key: "hello",
value: "world",
} satisfies MapOpPayload<string, string>,
],
]),
},
],
lastSignature:
@@ -283,13 +281,13 @@ test("After subscribing, node sends own known state and new txs to peer", async
privacy: "trusting" as const,
madeAt: map.core.sessions[node.currentSessionID]!
.transactions[1]!.madeAt,
changes: [
changes: stableStringify([
{
op: "set",
key: "goodbye",
value: "world",
} satisfies MapOpPayload<string, string>,
],
]),
},
],
lastSignature:
@@ -362,13 +360,13 @@ test("Client replies with known new content to tellKnownState from server", asyn
privacy: "trusting" as const,
madeAt: map.core.sessions[node.currentSessionID]!
.transactions[0]!.madeAt,
changes: [
changes: stableStringify([
{
op: "set",
key: "hello",
value: "world",
} satisfies MapOpPayload<string, string>,
],
]),
},
],
lastSignature:
@@ -438,8 +436,9 @@ test("No matter the optimistic known state, node respects invalid known state me
editable.set("goodbye", "world", "trusting");
});
const _mapEditMsg1 = await reader.read();
const _mapEditMsg2 = await reader.read();
const _mapEditMsgs = await reader.read();
console.log("Sending correction");
await writer.write({
action: "known",
@@ -465,13 +464,13 @@ test("No matter the optimistic known state, node respects invalid known state me
privacy: "trusting" as const,
madeAt: map.core.sessions[node.currentSessionID]!
.transactions[1]!.madeAt,
changes: [
changes: stableStringify([
{
op: "set",
key: "goodbye",
value: "world",
} satisfies MapOpPayload<string, string>,
],
]),
},
],
lastSignature:
@@ -568,13 +567,13 @@ test("If we add a server peer, all updates to all coValues are sent to it, even
privacy: "trusting" as const,
madeAt: map.core.sessions[node.currentSessionID]!
.transactions[0]!.madeAt,
changes: [
changes: stableStringify([
{
op: "set",
key: "hello",
value: "world",
} satisfies MapOpPayload<string, string>,
],
]),
},
],
lastSignature:

View File

@@ -9,6 +9,7 @@ import {
WritableStreamDefaultWriter,
} from "isomorphic-streams";
import { RawCoID, SessionID } from "./ids.js";
import { stableStringify } from "./jsonStringify.js";
export type CoValueKnownState = {
id: RawCoID;
@@ -214,14 +215,32 @@ export class SyncManager {
await this.sendNewContentIncludingDependencies(id, peer);
}
const newContent = coValue.newContentSince(
const newContentPieces = coValue.newContentSince(
peer.optimisticKnownStates[id]
);
if (newContent) {
await this.trySendToPeer(peer, newContent);
if (newContentPieces) {
const optimisticKnownStateBefore =
peer.optimisticKnownStates[id] || emptyKnownState(id);
const sendPieces = async () => {
for (const [i, piece] of newContentPieces.entries()) {
// console.log(
// `${id} -> ${peer.id}: Sending content piece ${i + 1}/${newContentPieces.length} header: ${!!piece.header}`,
// // Object.values(piece.new).map((s) => s.newTransactions)
// );
await this.trySendToPeer(peer, piece);
}
};
sendPieces().catch((e) => {
console.error("Error sending new content piece, retrying", e);
peer.optimisticKnownStates[id] = optimisticKnownStateBefore;
return this.sendNewContentIncludingDependencies(id, peer);
});
peer.optimisticKnownStates[id] = combinedKnownStates(
peer.optimisticKnownStates[id] || emptyKnownState(id),
optimisticKnownStateBefore,
coValue.knownState()
);
}
@@ -260,15 +279,21 @@ export class SyncManager {
for await (const msg of peerState.incoming) {
try {
await this.handleSyncMessage(msg, peerState);
await new Promise<void>((resolve) => {
setTimeout(resolve, 0);
});
} catch (e) {
console.error(
`Error reading from peer ${peer.id}, handling msg`,
JSON.stringify(msg),
JSON.stringify(msg, (k, v) =>
k === "changes" || k === "encryptedChanges"
? v.slice(0, 20) + "..."
: v
),
e
);
}
}
console.log("DONE!!!");
} catch (e) {
console.error(`Error reading from peer ${peer.id}`, e);
}
@@ -445,15 +470,43 @@ export class SyncManager {
const newTransactions =
newContentForSession.newTransactions.slice(alreadyKnownOffset);
const success = coValue.tryAddTransactions(
if (newTransactions.length === 0) {
continue;
}
const before = performance.now();
const success = await coValue.tryAddTransactionsAsync(
sessionID,
newTransactions,
undefined,
newContentForSession.lastSignature
);
const after = performance.now();
if (after - before > 10) {
const totalTxLength = newTransactions
.map((t) =>
t.privacy === "private"
? t.encryptedChanges.length
: t.changes.length
)
.reduce((a, b) => a + b, 0);
console.log(
`Adding incoming transactions took ${(
after - before
).toFixed(2)}ms for ${totalTxLength} bytes = bandwidth: ${(
(1000 * totalTxLength) /
(after - before) /
(1024 * 1024)
).toFixed(2)} MB/s`
);
}
if (!success) {
console.error("Failed to add transactions", newTransactions);
console.error(
"Failed to add transactions",
msg.id,
newTransactions
);
continue;
}
@@ -478,18 +531,9 @@ export class SyncManager {
}
async handleCorrection(msg: KnownStateMessage, peer: PeerState) {
const coValue = this.local.expectCoValueLoaded(msg.id);
peer.optimisticKnownStates[msg.id] = msg;
peer.optimisticKnownStates[msg.id] = combinedKnownStates(
msg,
coValue.knownState()
);
const newContent = coValue.newContentSince(msg);
if (newContent) {
await this.trySendToPeer(peer, newContent);
}
return this.sendNewContentIncludingDependencies(msg.id, peer);
}
handleUnsubscribe(_msg: DoneMessage) {

View File

@@ -1,11 +1,11 @@
{
"name": "jazz-browser-auth-local",
"version": "0.1.11",
"version": "0.2.3",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"license": "MIT",
"dependencies": {
"jazz-browser": "^0.1.11",
"jazz-browser": "^0.2.3",
"typescript": "^5.1.6"
},
"scripts": {

View File

@@ -128,7 +128,7 @@ async function signUp(
},
user: {
id: webAuthNCredentialPayload,
name: username + `(${new Date().toLocaleString()})`,
name: username + ` (${new Date().toLocaleString()})`,
displayName: username,
},
pubKeyCredParams: [{ alg: -7, type: "public-key" }],

View File

@@ -0,0 +1,17 @@
module.exports = {
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
],
parser: '@typescript-eslint/parser',
plugins: ['@typescript-eslint'],
parserOptions: {
project: "./tsconfig.json",
},
root: true,
rules: {
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": ["error", { "argsIgnorePattern": "^_", "varsIgnorePattern": "^_" }],
"@typescript-eslint/no-floating-promises": "error",
},
};

View File

@@ -0,0 +1,171 @@
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
# Logs
logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# Runtime data
pids
_.pid
_.seed
\*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
\*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
\*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
\*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
.cache
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.\*
.DS_Store

View File

@@ -0,0 +1,2 @@
coverage
node_modules

View File

@@ -0,0 +1,21 @@
{
"name": "jazz-browser-media-images",
"version": "0.2.3",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"license": "MIT",
"dependencies": {
"cojson": "^0.2.2",
"image-blob-reduce": "^4.1.0",
"jazz-browser": "^0.2.3",
"typescript": "^5.1.6"
},
"scripts": {
"lint": "eslint src/**/*.ts",
"build": "npm run lint && rm -rf ./dist && tsc --declaration --sourceMap --outDir dist",
"prepublishOnly": "npm run build"
},
"devDependencies": {
"@types/image-blob-reduce": "^4.1.1"
}
}

View File

@@ -0,0 +1,333 @@
import { CoID, Group, LocalNode, Media } from "cojson";
import ImageBlobReduce from "image-blob-reduce";
import Pica from "pica";
import {
createBinaryStreamFromBlob,
readBlobFromBinaryStream,
} from "jazz-browser";
const pica = new Pica();
export async function createImage(
image: Blob | File,
inGroup: Group
): Promise<Media.ImageDefinition> {
let originalWidth!: number;
let originalHeight!: number;
const Reducer = new ImageBlobReduce({ pica });
Reducer.after("_blob_to_image", (env) => {
originalWidth =
(env as unknown as { orientation: number }).orientation & 4
? env.image.height
: env.image.width;
originalHeight =
(env as unknown as { orientation: number }).orientation & 4
? env.image.width
: env.image.height;
return Promise.resolve(env);
});
const placeholderDataURL = (
await Reducer.toCanvas(image, { max: 8 })
).toDataURL("image/png");
let imageDefinition = inGroup.createMap<Media.ImageDefinition>();
imageDefinition = imageDefinition.edit((imageDefinition) => {
imageDefinition.set("originalSize", [originalWidth, originalHeight]);
imageDefinition.set("placeholderDataURL", placeholderDataURL);
});
setTimeout(async () => {
const max256 = await Reducer.toBlob(image, { max: 256 });
if (originalWidth > 256 || originalHeight > 256) {
const width =
originalWidth > originalHeight
? 256
: Math.round(256 * (originalWidth / originalHeight));
const height =
originalHeight > originalWidth
? 256
: Math.round(256 * (originalHeight / originalWidth));
const binaryStreamId = (
await createBinaryStreamFromBlob(max256, inGroup)
).id;
imageDefinition.edit((imageDefinition) => {
imageDefinition.set(`${width}x${height}`, binaryStreamId);
});
}
await new Promise((resolve) => setTimeout(resolve, 0));
const max1024 = await Reducer.toBlob(image, { max: 1024 });
if (originalWidth > 1024 || originalHeight > 1024) {
const width =
originalWidth > originalHeight
? 1024
: Math.round(1024 * (originalWidth / originalHeight));
const height =
originalHeight > originalWidth
? 1024
: Math.round(1024 * (originalHeight / originalWidth));
const binaryStreamId = (
await createBinaryStreamFromBlob(max1024, inGroup)
).id;
imageDefinition.edit((imageDefinition) => {
imageDefinition.set(`${width}x${height}`, binaryStreamId);
});
}
await new Promise((resolve) => setTimeout(resolve, 0));
const max2048 = await Reducer.toBlob(image, { max: 2048 });
if (originalWidth > 2048 || originalHeight > 2048) {
const width =
originalWidth > originalHeight
? 2048
: Math.round(2048 * (originalWidth / originalHeight));
const height =
originalHeight > originalWidth
? 2048
: Math.round(2048 * (originalHeight / originalWidth));
const binaryStreamId = (
await createBinaryStreamFromBlob(max2048, inGroup)
).id;
imageDefinition.edit((imageDefinition) => {
imageDefinition.set(`${width}x${height}`, binaryStreamId);
});
}
await new Promise((resolve) => setTimeout(resolve, 0));
const originalBinaryStreamId = (
await createBinaryStreamFromBlob(image, inGroup)
).id;
imageDefinition.edit((imageDefinition) => {
imageDefinition.set(
`${originalWidth}x${originalHeight}`,
originalBinaryStreamId
);
});
}, 0);
return imageDefinition;
}
export type LoadingImageInfo = {
originalSize?: [number, number];
placeholderDataURL?: string;
highestResSrc?: string;
};
export function loadImage(
imageID: CoID<Media.ImageDefinition>,
localNode: LocalNode,
progressiveCallback: (update: LoadingImageInfo) => void
): () => void {
let unsubscribe: (() => void) | undefined;
let stopped = false;
const resState: {
[res: `${number}x${number}`]:
| { state: "queued" }
| { state: "waiting" }
| { state: "loading"; doneOrFailed: Promise<void> }
| { state: "loaded"; blobURL: string }
| { state: "revoked" }
| { state: "failed" }
| undefined;
} = {};
const cleanUp = () => {
stopped = true;
for (const [res, entry] of Object.entries(resState)) {
if (entry?.state === "loaded") {
URL.revokeObjectURL(entry.blobURL);
resState[res as `${number}x${number}`] = { state: "revoked" };
}
}
unsubscribe?.();
};
localNode
.load(imageID)
.then((imageDefinition) => {
if (stopped) return;
unsubscribe = imageDefinition.subscribe(async (imageDefinition) => {
if (stopped) return;
const originalSize = imageDefinition.get("originalSize");
const placeholderDataURL =
imageDefinition.get("placeholderDataURL");
const resolutions = imageDefinition
.keys()
.filter(
(key): key is `${number}x${number}` =>
!!key.match(/\d+x\d+/)
)
.sort((a, b) => {
const widthA = Number(a.split("x")[0]);
const widthB = Number(b.split("x")[0]);
return widthA - widthB;
});
const startLoading = async () => {
const notYetQueuedOrLoading = resolutions.filter(
(res) => !resState[res]
);
// console.log(
// "Loading iteration",
// resolutions,
// resState,
// notYetQueuedOrLoading
// );
for (const res of notYetQueuedOrLoading) {
resState[res] = { state: "queued" };
}
for (const res of notYetQueuedOrLoading) {
if (stopped) return;
resState[res] = { state: "waiting" };
const binaryStreamId = imageDefinition.get(res)!;
// console.log(
// "Loading image res",
// imageID,
// res,
// binaryStreamId
// );
const binaryStream = await localNode.load(
binaryStreamId
);
if (stopped) return;
if (!binaryStream) {
resState[res] = { state: "failed" };
console.error(
"Loading image res failed",
imageID,
res,
binaryStreamId
);
return;
}
await new Promise<void>((resolveFullyLoaded) => {
const unsubFromStream = binaryStream.subscribe(
async (_) => {
if (stopped) return;
const currentState = resState[res];
if (currentState?.state === "loading") {
await currentState.doneOrFailed;
// console.log(
// "Retrying image res after previous attempt",
// imageID,
// res,
// binaryStreamId
// );
}
if (resState[res]?.state === "loaded") {
return;
}
const doneOrFailed = new Promise<void>(
// eslint-disable-next-line no-async-promise-executor
async (resolveDoneOrFailed) => {
const blob =
await readBlobFromBinaryStream(
binaryStreamId,
localNode
);
if (stopped) return;
if (!blob) {
// console.log(
// "Image res not available yet",
// imageID,
// res,
// binaryStreamId
// );
resolveDoneOrFailed();
return;
}
const blobURL =
URL.createObjectURL(blob);
resState[res] = {
state: "loaded",
blobURL,
};
// console.log(
// "Loaded image res",
// imageID,
// res,
// binaryStreamId
// );
progressiveCallback({
originalSize,
placeholderDataURL,
highestResSrc: blobURL,
});
unsubFromStream();
resolveDoneOrFailed();
await new Promise((resolve) =>
setTimeout(resolve, 0)
);
resolveFullyLoaded();
}
);
resState[res] = {
state: "loading",
doneOrFailed,
};
}
);
});
}
};
if (
!Object.values(resState).some(
(entry) => entry?.state === "loaded"
)
) {
progressiveCallback({
originalSize,
placeholderDataURL,
});
}
startLoading().catch((err) => {
console.error("Error loading image", imageID, err);
cleanUp();
});
});
})
.catch((err) => {
console.error("Error loading image", imageID, err);
cleanUp();
});
return cleanUp;
}

View File

@@ -0,0 +1,16 @@
{
"compilerOptions": {
"lib": ["ESNext", "DOM"],
"module": "esnext",
"target": "ES2020",
"moduleResolution": "bundler",
"moduleDetection": "force",
"strict": true,
"skipLibCheck": true,
"jsx": "react",
"forceConsistentCasingInFileNames": true,
"noUncheckedIndexedAccess": true,
"esModuleInterop": true,
},
"include": ["./src/**/*"],
}

View File

@@ -0,0 +1,75 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
"@noble/ciphers@^0.1.3":
version "0.1.4"
resolved "https://registry.yarnpkg.com/@noble/ciphers/-/ciphers-0.1.4.tgz#96327dca147829ed9eee0d96cfdf7c57915765f0"
integrity sha512-d3ZR8vGSpy3v/nllS+bD/OMN5UZqusWiQqkyj7AwzTnhXFH72pF5oB4Ach6DQ50g5kXxC28LdaYBEpsyv9KOUQ==
"@noble/curves@^1.1.0":
version "1.1.0"
resolved "https://registry.yarnpkg.com/@noble/curves/-/curves-1.1.0.tgz#f13fc667c89184bc04cccb9b11e8e7bae27d8c3d"
integrity sha512-091oBExgENk/kGj3AZmtBDMpxQPDtxQABR2B9lb1JbVTs6ytdzZNwvhxQ4MWasRNEzlbEH8jCWFCwhF/Obj5AA==
dependencies:
"@noble/hashes" "1.3.1"
"@noble/hashes@1.3.1", "@noble/hashes@^1.3.1":
version "1.3.1"
resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.3.1.tgz#8831ef002114670c603c458ab8b11328406953a9"
integrity sha512-EbqwksQwz9xDRGfDST86whPBgM65E0OH/pCgqW0GBVzO22bNE+NuIbeTb714+IfSjU3aRk47EUvXIb5bTsenKA==
"@scure/base@^1.1.1":
version "1.1.1"
resolved "https://registry.yarnpkg.com/@scure/base/-/base-1.1.1.tgz#ebb651ee52ff84f420097055f4bf46cfba403938"
integrity sha512-ZxOhsSyxYwLJj3pLZCefNitxsj093tb2vq90mp2txoYeBqbcjDjqFhyM8eUjq/uFm6zJ+mUuqxlS2FkuSY1MTA==
"@types/prop-types@*":
version "15.7.5"
resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf"
integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==
"@types/react@^18.2.19":
version "18.2.19"
resolved "https://registry.yarnpkg.com/@types/react/-/react-18.2.19.tgz#f77cb2c8307368e624d464a25b9675fa35f95a8b"
integrity sha512-e2S8wmY1ePfM517PqCG80CcE48Xs5k0pwJzuDZsfE8IZRRBfOMCF+XqnFxu6mWtyivum1MQm4aco+WIt6Coimw==
dependencies:
"@types/prop-types" "*"
"@types/scheduler" "*"
csstype "^3.0.2"
"@types/scheduler@*":
version "0.16.3"
resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.3.tgz#cef09e3ec9af1d63d2a6cc5b383a737e24e6dcf5"
integrity sha512-5cJ8CB4yAx7BH1oMvdU0Jh9lrEXyPkar6F9G/ERswkCuvP4KQZfZkSjcMbAICCpQTN4OuZn8tz0HiKv9TGZgrQ==
cojson@^0.0.14:
version "0.0.14"
resolved "https://registry.yarnpkg.com/cojson/-/cojson-0.0.14.tgz#e7b190ade1efc20d6f0fa12411d7208cdc0f19f7"
integrity sha512-TFenIGswEEhnZlCmq+B1NZPztjovZ72AjK1YkkZca54ZFbB1lAHdPt2hqqu/QBO24C9+6DtuoS2ixm6gbSBWCg==
dependencies:
"@noble/ciphers" "^0.1.3"
"@noble/curves" "^1.1.0"
"@noble/hashes" "^1.3.1"
"@scure/base" "^1.1.1"
fast-json-stable-stringify "^2.1.0"
isomorphic-streams "https://github.com/sgwilym/isomorphic-streams.git#aa9394781bfc92f8d7c981be7daf8af4b4cd4fae"
csstype@^3.0.2:
version "3.1.2"
resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.2.tgz#1d4bf9d572f11c14031f0436e1c10bc1f571f50b"
integrity sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ==
fast-json-stable-stringify@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633"
integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==
"isomorphic-streams@git+https://github.com/sgwilym/isomorphic-streams.git#aa9394781bfc92f8d7c981be7daf8af4b4cd4fae":
version "1.0.3"
resolved "git+https://github.com/sgwilym/isomorphic-streams.git#aa9394781bfc92f8d7c981be7daf8af4b4cd4fae"
typescript@^5.1.6:
version "5.1.6"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.1.6.tgz#02f8ac202b6dad2c0dd5e0913745b47a37998274"
integrity sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA==

View File

@@ -1,12 +1,12 @@
{
"name": "jazz-browser",
"version": "0.1.11",
"version": "0.2.3",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"license": "MIT",
"dependencies": {
"cojson": "^0.1.11",
"jazz-storage-indexeddb": "^0.1.11",
"cojson": "^0.2.2",
"jazz-storage-indexeddb": "^0.2.3",
"typescript": "^5.1.6"
},
"scripts": {

View File

@@ -1,5 +1,7 @@
import { BinaryCoStream, InviteSecret } from "cojson";
import { BinaryCoStreamMeta } from "cojson";
import { MAX_RECOMMENDED_TX_SIZE } from "cojson";
import { cojsonReady } from "cojson";
import {
LocalNode,
cojsonInternals,
@@ -30,6 +32,7 @@ export async function createBrowserNode({
syncAddress?: string;
reconnectionTimeout?: number;
}): Promise<BrowserNodeHandle> {
await cojsonReady;
let sessionDone: () => void;
const firstWsPeer = createWebSocketPeer(syncAddress);
@@ -70,6 +73,10 @@ export async function createBrowserNode({
node,
done: () => {
shouldTryToReconnect = false;
console.log("Cleaning up node")
for (const peer of Object.values(node.sync.peers)) {
peer.outgoing.close().catch(e => console.error("Error while closing peer", e));
}
sessionDone?.();
},
};
@@ -91,9 +98,7 @@ export type SessionHandle = {
done: () => void;
};
function getSessionHandleFor(
accountID: AccountID | AgentID
): SessionHandle {
function getSessionHandleFor(accountID: AccountID | AgentID): SessionHandle {
let done!: () => void;
const donePromise = new Promise<void>((resolve) => {
done = resolve;
@@ -176,15 +181,25 @@ function websocketReadableStream<T>(ws: WebSocket) {
pingTimeout = setTimeout(() => {
console.debug("Ping timeout");
controller.close();
ws.close();
try {
controller.close();
ws.close();
} catch (e) {
console.error(
"Error while trying to close ws on ping timeout",
e
);
}
}, 2500);
return;
}
controller.enqueue(msg);
};
const closeListener = () => controller.close();
const closeListener = () => {
controller.close();
clearTimeout(pingTimeout);
};
ws.addEventListener("close", closeListener);
ws.addEventListener("error", () => {
controller.error(new Error("The WebSocket errored!"));
@@ -305,7 +320,9 @@ export function createInviteLink(
return `${baseURL}#invitedTo=${value.id}&${inviteSecret}`;
}
export function parseInviteLink<C extends CoValueImpl>(inviteURL: string):
export function parseInviteLink<C extends CoValueImpl>(
inviteURL: string
):
| {
valueID: CoID<C>;
inviteSecret: InviteSecret;
@@ -322,7 +339,9 @@ export function parseInviteLink<C extends CoValueImpl>(inviteURL: string):
return { valueID, inviteSecret };
}
export function consumeInviteLinkFromWindowLocation<C extends CoValueImpl>(node: LocalNode): Promise<
export function consumeInviteLinkFromWindowLocation<C extends CoValueImpl>(
node: LocalNode
): Promise<
| {
valueID: CoID<C>;
inviteSecret: string;
@@ -349,26 +368,37 @@ export function consumeInviteLinkFromWindowLocation<C extends CoValueImpl>(node:
});
}
export async function createBinaryStreamFromBlob<C extends BinaryCoStream<BinaryCoStreamMeta>>(blob: Blob | File, inGroup: Group, meta: C["meta"] = {type: "binary"}): Promise<C> {
export async function createBinaryStreamFromBlob<
C extends BinaryCoStream<BinaryCoStreamMeta>
>(
blob: Blob | File,
inGroup: Group,
meta: C["meta"] = { type: "binary" }
): Promise<C> {
let stream = inGroup.createBinaryStream(meta);
const reader = new FileReader();
const done = new Promise<void>((resolve) => {
reader.onload = () => {
reader.onload = async () => {
const data = new Uint8Array(reader.result as ArrayBuffer);
stream = stream.edit(stream => {
stream = stream.edit((stream) => {
stream.startBinaryStream({
mimeType: blob.type,
totalSizeBytes: blob.size,
fileName: blob instanceof File ? blob.name : undefined,
});
const chunkSize = 100 * 1024;
}) as C;// TODO: fix this
const chunkSize = MAX_RECOMMENDED_TX_SIZE;
for (let idx = 0; idx < data.length; idx += chunkSize) {
stream.pushBinaryStreamChunk(data.slice(idx, idx + chunkSize));
stream = stream.edit((stream) => {
stream.pushBinaryStreamChunk(
data.slice(idx, idx + chunkSize)
);
}) as C; // TODO: fix this
await new Promise((resolve) => setTimeout(resolve, 0));
}
stream = stream.edit((stream) => {
stream.endBinaryStream();
}) as C; // TODO: fix this
resolve();
@@ -381,22 +411,24 @@ export async function createBinaryStreamFromBlob<C extends BinaryCoStream<Binary
return stream;
}
export async function readBlobFromBinaryStream<C extends BinaryCoStream<BinaryCoStreamMeta>>(streamId: CoID<C>, node: LocalNode, allowUnfinished?: boolean): Promise<Blob | undefined> {
export async function readBlobFromBinaryStream<
C extends BinaryCoStream<BinaryCoStreamMeta>
>(
streamId: CoID<C>,
node: LocalNode,
allowUnfinished?: boolean
): Promise<Blob | undefined> {
const stream = await node.load<C>(streamId);
if (!stream) {
return undefined;
}
const chunks = stream.getBinaryChunks();
const chunks = stream.getBinaryChunks(allowUnfinished);
if (!chunks) {
return undefined;
}
if (!allowUnfinished && !chunks.finished) {
return undefined;
}
return new Blob(chunks.chunks, { type: chunks.mimeType });
}
}

View File

@@ -1,12 +1,12 @@
{
"name": "jazz-react-auth-local",
"version": "0.1.13",
"version": "0.2.3",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"license": "MIT",
"dependencies": {
"jazz-browser-auth-local": "^0.1.11",
"jazz-react": "^0.1.13",
"jazz-browser-auth-local": "^0.2.3",
"jazz-react": "^0.2.3",
"typescript": "^5.1.6"
},
"devDependencies": {

View File

@@ -0,0 +1,17 @@
module.exports = {
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
],
parser: '@typescript-eslint/parser',
plugins: ['@typescript-eslint'],
parserOptions: {
project: "./tsconfig.json",
},
root: true,
rules: {
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": ["error", { "argsIgnorePattern": "^_", "varsIgnorePattern": "^_" }],
"@typescript-eslint/no-floating-promises": "error",
},
};

View File

@@ -0,0 +1,171 @@
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
# Logs
logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# Runtime data
pids
_.pid
_.seed
\*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
\*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
\*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
\*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
.cache
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.\*
.DS_Store

View File

@@ -0,0 +1,2 @@
coverage
node_modules

View File

@@ -0,0 +1,26 @@
{
"name": "jazz-react-media-images",
"version": "0.2.3",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"license": "MIT",
"dependencies": {
"cojson": "^0.2.2",
"jazz-browser": "^0.2.3",
"jazz-browser-media-images": "^0.2.3",
"jazz-react": "^0.2.3",
"typescript": "^5.1.6"
},
"devDependencies": {
"@types/react": "^18.2.19"
},
"peerDependencies": {
"react": "17 - 18"
},
"scripts": {
"lint": "eslint src/**/*.tsx",
"build": "npm run lint && rm -rf ./dist && tsc --declaration --sourceMap --outDir dist",
"prepublishOnly": "npm run build"
},
"gitHead": "33c27053293b4801b968c61d5c4c989f93a67d13"
}

View File

@@ -0,0 +1,24 @@
import { CoID, Media } from "cojson";
import { loadImage, LoadingImageInfo } from "jazz-browser-media-images";
import { useJazz } from "jazz-react";
import { useEffect, useState } from "react";
export { createImage } from "jazz-browser-media-images";
export function useLoadImage(
imageID?: CoID<Media.ImageDefinition>
): LoadingImageInfo | undefined {
const { localNode } = useJazz();
const [imageInfo, setImageInfo] = useState<LoadingImageInfo>();
useEffect(() => {
if (!imageID) return;
const unsubscribe = loadImage(imageID, localNode, (imageInfo) => {
setImageInfo(imageInfo);
});
return unsubscribe;
}, [imageID, localNode]);
return imageInfo;
}

View File

@@ -0,0 +1,16 @@
{
"compilerOptions": {
"lib": ["ESNext", "DOM"],
"module": "esnext",
"target": "ES2020",
"moduleResolution": "bundler",
"moduleDetection": "force",
"strict": true,
"skipLibCheck": true,
"jsx": "react",
"forceConsistentCasingInFileNames": true,
"noUncheckedIndexedAccess": true,
"esModuleInterop": true,
},
"include": ["./src/**/*"],
}

View File

@@ -0,0 +1,75 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
"@noble/ciphers@^0.1.3":
version "0.1.4"
resolved "https://registry.yarnpkg.com/@noble/ciphers/-/ciphers-0.1.4.tgz#96327dca147829ed9eee0d96cfdf7c57915765f0"
integrity sha512-d3ZR8vGSpy3v/nllS+bD/OMN5UZqusWiQqkyj7AwzTnhXFH72pF5oB4Ach6DQ50g5kXxC28LdaYBEpsyv9KOUQ==
"@noble/curves@^1.1.0":
version "1.1.0"
resolved "https://registry.yarnpkg.com/@noble/curves/-/curves-1.1.0.tgz#f13fc667c89184bc04cccb9b11e8e7bae27d8c3d"
integrity sha512-091oBExgENk/kGj3AZmtBDMpxQPDtxQABR2B9lb1JbVTs6ytdzZNwvhxQ4MWasRNEzlbEH8jCWFCwhF/Obj5AA==
dependencies:
"@noble/hashes" "1.3.1"
"@noble/hashes@1.3.1", "@noble/hashes@^1.3.1":
version "1.3.1"
resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.3.1.tgz#8831ef002114670c603c458ab8b11328406953a9"
integrity sha512-EbqwksQwz9xDRGfDST86whPBgM65E0OH/pCgqW0GBVzO22bNE+NuIbeTb714+IfSjU3aRk47EUvXIb5bTsenKA==
"@scure/base@^1.1.1":
version "1.1.1"
resolved "https://registry.yarnpkg.com/@scure/base/-/base-1.1.1.tgz#ebb651ee52ff84f420097055f4bf46cfba403938"
integrity sha512-ZxOhsSyxYwLJj3pLZCefNitxsj093tb2vq90mp2txoYeBqbcjDjqFhyM8eUjq/uFm6zJ+mUuqxlS2FkuSY1MTA==
"@types/prop-types@*":
version "15.7.5"
resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf"
integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==
"@types/react@^18.2.19":
version "18.2.19"
resolved "https://registry.yarnpkg.com/@types/react/-/react-18.2.19.tgz#f77cb2c8307368e624d464a25b9675fa35f95a8b"
integrity sha512-e2S8wmY1ePfM517PqCG80CcE48Xs5k0pwJzuDZsfE8IZRRBfOMCF+XqnFxu6mWtyivum1MQm4aco+WIt6Coimw==
dependencies:
"@types/prop-types" "*"
"@types/scheduler" "*"
csstype "^3.0.2"
"@types/scheduler@*":
version "0.16.3"
resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.3.tgz#cef09e3ec9af1d63d2a6cc5b383a737e24e6dcf5"
integrity sha512-5cJ8CB4yAx7BH1oMvdU0Jh9lrEXyPkar6F9G/ERswkCuvP4KQZfZkSjcMbAICCpQTN4OuZn8tz0HiKv9TGZgrQ==
cojson@^0.0.14:
version "0.0.14"
resolved "https://registry.yarnpkg.com/cojson/-/cojson-0.0.14.tgz#e7b190ade1efc20d6f0fa12411d7208cdc0f19f7"
integrity sha512-TFenIGswEEhnZlCmq+B1NZPztjovZ72AjK1YkkZca54ZFbB1lAHdPt2hqqu/QBO24C9+6DtuoS2ixm6gbSBWCg==
dependencies:
"@noble/ciphers" "^0.1.3"
"@noble/curves" "^1.1.0"
"@noble/hashes" "^1.3.1"
"@scure/base" "^1.1.1"
fast-json-stable-stringify "^2.1.0"
isomorphic-streams "https://github.com/sgwilym/isomorphic-streams.git#aa9394781bfc92f8d7c981be7daf8af4b4cd4fae"
csstype@^3.0.2:
version "3.1.2"
resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.2.tgz#1d4bf9d572f11c14031f0436e1c10bc1f571f50b"
integrity sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ==
fast-json-stable-stringify@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633"
integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==
"isomorphic-streams@git+https://github.com/sgwilym/isomorphic-streams.git#aa9394781bfc92f8d7c981be7daf8af4b4cd4fae":
version "1.0.3"
resolved "git+https://github.com/sgwilym/isomorphic-streams.git#aa9394781bfc92f8d7c981be7daf8af4b4cd4fae"
typescript@^5.1.6:
version "5.1.6"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.1.6.tgz#02f8ac202b6dad2c0dd5e0913745b47a37998274"
integrity sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA==

View File

@@ -1,12 +1,12 @@
{
"name": "jazz-react",
"version": "0.1.13",
"version": "0.2.3",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"license": "MIT",
"dependencies": {
"cojson": "^0.1.11",
"jazz-browser": "^0.1.11",
"cojson": "^0.2.2",
"jazz-browser": "^0.2.3",
"typescript": "^5.1.6"
},
"devDependencies": {

View File

@@ -8,14 +8,9 @@ import {
CojsonInternalTypes,
BinaryCoStream,
BinaryCoStreamMeta,
Group,
} from "cojson";
import React, { ChangeEvent, useEffect, useState } from "react";
import {
AuthProvider,
createBinaryStreamFromBlob,
createBrowserNode,
} from "jazz-browser";
import React, { useEffect, useState } from "react";
import { AuthProvider, createBrowserNode } from "jazz-browser";
import { readBlobFromBinaryStream } from "jazz-browser";
export {
@@ -52,6 +47,7 @@ export function WithJazz({
useEffect(() => {
let done: (() => void) | undefined = undefined;
let stop = false;
(async () => {
const nodeHandle = await createBrowserNode({
@@ -62,6 +58,11 @@ export function WithJazz({
undefined,
});
if (stop) {
nodeHandle.done();
return;
}
setNode(nodeHandle.node);
done = nodeHandle.done;
@@ -70,6 +71,7 @@ export function WithJazz({
});
return () => {
stop = true;
done && done();
};
}, [auth, syncAddress]);
@@ -183,23 +185,11 @@ export function useBinaryStream<C extends BinaryCoStream<BinaryCoStreamMeta>>(
.catch((e) => console.error("Failed to read binary stream", e));
}, [stream, localNode]);
useEffect(() => {
return () => {
blob && URL.revokeObjectURL(blob.blobURL);
};
}, [blob?.blobURL]);
return blob;
}
export function createBinaryStreamHandler<
C extends BinaryCoStream<BinaryCoStreamMeta>
>(
onCreated: (createdStream: C) => void,
inGroup: Group,
meta: C["meta"] = {type: "binary"}
): (event: ChangeEvent) => void {
return (event) => {
const file = (event.target as HTMLInputElement).files?.[0];
if (!file) return;
createBinaryStreamFromBlob(file, inGroup, meta)
.then(onCreated)
.catch((e) => console.error("Failed to create binary stream", e));
};
}

View File

@@ -1,11 +1,11 @@
{
"name": "jazz-storage-indexeddb",
"version": "0.1.11",
"version": "0.2.3",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"license": "MIT",
"dependencies": {
"cojson": "^0.1.11",
"cojson": "^0.2.2",
"typescript": "^5.1.6"
},
"devDependencies": {

View File

@@ -1,4 +1,12 @@
import { cojsonInternals, SessionID, SyncMessage, Peer, CojsonInternalTypes } from "cojson";
import {
cojsonInternals,
SessionID,
SyncMessage,
Peer,
CojsonInternalTypes,
MAX_RECOMMENDED_TX_SIZE,
} from "cojson";
import { Signature } from "cojson/dist/crypto";
import {
ReadableStream,
WritableStream,
@@ -18,6 +26,7 @@ type SessionRow = {
sessionID: SessionID;
lastIdx: number;
lastSignature: CojsonInternalTypes.Signature;
bytesSinceLastSignature?: number;
};
type StoredSessionRow = SessionRow & { rowID: number };
@@ -28,6 +37,12 @@ type TransactionRow = {
tx: CojsonInternalTypes.Transaction;
};
type SignatureAfterRow = {
ses: number;
idx: number;
signature: CojsonInternalTypes.Signature;
};
export class IDBStorage {
db: IDBDatabase;
fromLocalNode!: ReadableStreamDefaultReader<SyncMessage>;
@@ -49,7 +64,7 @@ export class IDBStorage {
done = result.done;
if (result.value) {
this.handleSyncMessage(result.value);
await this.handleSyncMessage(result.value);
}
}
})();
@@ -82,42 +97,63 @@ export class IDBStorage {
toLocalNode: WritableStream<SyncMessage>
) {
const dbPromise = new Promise<IDBDatabase>((resolve, reject) => {
const request = indexedDB.open("jazz-storage", 1);
const request = indexedDB.open("jazz-storage", 4);
request.onerror = () => {
reject(request.error);
};
request.onsuccess = () => {
resolve(request.result);
};
request.onupgradeneeded = () => {
request.onupgradeneeded = async (ev) => {
const db = request.result;
if (ev.oldVersion === 0) {
const coValues = db.createObjectStore("coValues", {
autoIncrement: true,
keyPath: "rowID",
});
const coValues = db.createObjectStore("coValues", {
autoIncrement: true,
keyPath: "rowID",
});
coValues.createIndex("coValuesById", "id", {
unique: true,
});
const sessions = db.createObjectStore("sessions", {
autoIncrement: true,
keyPath: "rowID",
});
sessions.createIndex("sessionsByCoValue", "coValue");
sessions.createIndex(
"uniqueSessions",
["coValue", "sessionID"],
{
coValues.createIndex("coValuesById", "id", {
unique: true,
}
);
});
db.createObjectStore("transactions", {
keyPath: ["ses", "idx"],
});
const sessions = db.createObjectStore("sessions", {
autoIncrement: true,
keyPath: "rowID",
});
sessions.createIndex("sessionsByCoValue", "coValue");
sessions.createIndex(
"uniqueSessions",
["coValue", "sessionID"],
{
unique: true,
}
);
db.createObjectStore("transactions", {
keyPath: ["ses", "idx"],
});
}
if (ev.oldVersion <= 1) {
db.createObjectStore("signatureAfter", {
keyPath: ["ses", "idx"],
});
}
if (ev.oldVersion !== 0 && ev.oldVersion <= 3) {
// fix embarrassing off-by-one error for transaction indices
console.log("Migration: fixing off-by-one error");
const transaction = (ev.target as unknown as {transaction: IDBTransaction}).transaction;
const txsStore = transaction.objectStore("transactions");
const txs = await promised(txsStore.getAll());
for (const tx of txs) {
await promised(txsStore.delete([tx.ses, tx.idx]));
tx.idx -= 1;
await promised(txsStore.add(tx));
}
console.log("Migration: fixing off-by-one error - done");
}
};
});
@@ -147,10 +183,12 @@ export class IDBStorage {
coValues,
sessions,
transactions,
signatureAfter,
}: {
coValues: IDBObjectStore;
sessions: IDBObjectStore;
transactions: IDBObjectStore;
signatureAfter: IDBObjectStore;
},
asDependencyOf?: CojsonInternalTypes.RawCoID
) {
@@ -170,12 +208,14 @@ export class IDBStorage {
sessions: {},
};
const newContent: CojsonInternalTypes.NewContentMessage = {
action: "content",
id: theirKnown.id,
header: theirKnown.header ? undefined : coValueRow?.header,
new: {},
};
const newContentPieces: CojsonInternalTypes.NewContentMessage[] = [
{
action: "content",
id: theirKnown.id,
header: theirKnown.header ? undefined : coValueRow?.header,
new: {},
},
];
for (const sessionRow of allOurSessions) {
ourKnown.sessions[sessionRow.sessionID] = sessionRow.lastIdx;
@@ -187,6 +227,21 @@ export class IDBStorage {
const firstNewTxIdx =
theirKnown.sessions[sessionRow.sessionID] || 0;
const signaturesAndIdxs = await promised<SignatureAfterRow[]>(
signatureAfter.getAll(
IDBKeyRange.bound(
[sessionRow.rowID, firstNewTxIdx],
[sessionRow.rowID, Infinity]
)
)
);
console.log(
theirKnown.id,
"signaturesAndIdxs",
JSON.stringify(signaturesAndIdxs)
);
const newTxInSession = await promised<TransactionRow[]>(
transactions.getAll(
IDBKeyRange.bound(
@@ -196,36 +251,83 @@ export class IDBStorage {
)
);
newContent.new[sessionRow.sessionID] = {
after: firstNewTxIdx,
lastSignature: sessionRow.lastSignature,
newTransactions: newTxInSession.map((row) => row.tx),
};
let idx = firstNewTxIdx;
console.log(
theirKnown.id,
"newTxInSession",
newTxInSession.length
);
for (const tx of newTxInSession) {
let sessionEntry =
newContentPieces[newContentPieces.length - 1]!.new[
sessionRow.sessionID
];
if (!sessionEntry) {
sessionEntry = {
after: idx,
lastSignature: "WILL_BE_REPLACED" as Signature,
newTransactions: [],
};
newContentPieces[newContentPieces.length - 1]!.new[
sessionRow.sessionID
] = sessionEntry;
}
sessionEntry.newTransactions.push(tx.tx);
if (
signaturesAndIdxs[0] &&
idx === signaturesAndIdxs[0].idx
) {
sessionEntry.lastSignature =
signaturesAndIdxs[0].signature;
signaturesAndIdxs.shift();
newContentPieces.push({
action: "content",
id: theirKnown.id,
new: {},
});
} else if (
idx ===
firstNewTxIdx + newTxInSession.length - 1
) {
sessionEntry.lastSignature = sessionRow.lastSignature;
}
idx += 1;
}
}
}
const dependedOnCoValues =
coValueRow?.header.ruleset.type === "group"
? Object.values(newContent.new).flatMap((sessionEntry) =>
sessionEntry.newTransactions.flatMap((tx) => {
if (tx.privacy !== "trusting") return [];
return tx.changes
.map(
(change) =>
change &&
typeof change === "object" &&
"op" in change &&
change.op === "set" &&
"key" in change &&
change.key
)
.filter(
(key): key is CojsonInternalTypes.RawCoID =>
typeof key === "string" &&
key.startsWith("co_")
);
})
)
? newContentPieces
.flatMap((piece) => Object.values(piece.new))
.flatMap((sessionEntry) =>
sessionEntry.newTransactions.flatMap((tx) => {
if (tx.privacy !== "trusting") return [];
// TODO: avoid parse here?
return cojsonInternals
.parseJSON(tx.changes)
.map(
(change) =>
change &&
typeof change === "object" &&
"op" in change &&
change.op === "set" &&
"key" in change &&
change.key
)
.filter(
(
key
): key is CojsonInternalTypes.RawCoID =>
typeof key === "string" &&
key.startsWith("co_")
);
})
)
: coValueRow?.header.ruleset.type === "ownedByGroup"
? [coValueRow?.header.ruleset.group]
: [];
@@ -233,7 +335,7 @@ export class IDBStorage {
for (const dependedOnCoValue of dependedOnCoValues) {
await this.sendNewContentAfter(
{ id: dependedOnCoValue, header: false, sessions: {} },
{ coValues, sessions, transactions },
{ coValues, sessions, transactions, signatureAfter },
asDependencyOf || theirKnown.id
);
}
@@ -244,8 +346,15 @@ export class IDBStorage {
asDependencyOf,
});
if (newContent.header || Object.keys(newContent.new).length > 0) {
await this.toLocalNode.write(newContent);
const nonEmptyNewContentPieces = newContentPieces.filter(
(piece) => piece.header || Object.keys(piece.new).length > 0
);
console.log(theirKnown.id, nonEmptyNewContentPieces);
for (const piece of nonEmptyNewContentPieces) {
await this.toLocalNode.write(piece);
await new Promise((resolve) => setTimeout(resolve, 0));
}
}
@@ -254,7 +363,7 @@ export class IDBStorage {
}
async handleContent(msg: CojsonInternalTypes.NewContentMessage) {
const { coValues, sessions, transactions } =
const { coValues, sessions, transactions, signatureAfter } =
this.inTransaction("readwrite");
let storedCoValueRowID = (
@@ -325,18 +434,39 @@ export class IDBStorage {
const actuallyNewOffset =
(sessionRow?.lastIdx || 0) -
(msg.new[sessionID]?.after || 0);
const actuallyNewTransactions =
newTransactions.slice(actuallyNewOffset);
let newBytesSinceLastSignature =
(sessionRow?.bytesSinceLastSignature || 0) +
actuallyNewTransactions.reduce(
(sum, tx) =>
sum +
(tx.privacy === "private"
? tx.encryptedChanges.length
: tx.changes.length),
0
);
const newLastIdx =
(sessionRow?.lastIdx || 0) + actuallyNewTransactions.length;
let shouldWriteSignature = false;
if (newBytesSinceLastSignature > MAX_RECOMMENDED_TX_SIZE) {
shouldWriteSignature = true;
newBytesSinceLastSignature = 0;
}
let nextIdx = sessionRow?.lastIdx || 0;
const sessionUpdate = {
coValue: storedCoValueRowID,
sessionID: sessionID,
lastIdx:
(sessionRow?.lastIdx || 0) +
actuallyNewTransactions.length,
lastIdx: newLastIdx,
lastSignature: msg.new[sessionID]!.lastSignature,
bytesSinceLastSignature: newBytesSinceLastSignature,
};
const sessionRowID = (await promised(
@@ -350,8 +480,18 @@ export class IDBStorage {
)
)) as number;
if (shouldWriteSignature) {
await promised(
signatureAfter.put({
ses: sessionRowID,
// TODO: newLastIdx is a misnomer, it's actually more like nextIdx or length
idx: newLastIdx - 1,
signature: msg.new[sessionID]!.lastSignature,
} satisfies SignatureAfterRow)
);
}
for (const newTransaction of actuallyNewTransactions) {
nextIdx++;
await promised(
transactions.add({
ses: sessionRowID,
@@ -359,6 +499,7 @@ export class IDBStorage {
tx: newTransaction,
} satisfies TransactionRow)
);
nextIdx++;
}
}
}
@@ -382,9 +523,10 @@ export class IDBStorage {
coValues: IDBObjectStore;
sessions: IDBObjectStore;
transactions: IDBObjectStore;
signatureAfter: IDBObjectStore;
} {
const tx = this.db.transaction(
["coValues", "sessions", "transactions"],
["coValues", "sessions", "transactions", "signatureAfter"],
mode
);
@@ -401,8 +543,9 @@ export class IDBStorage {
const coValues = tx.objectStore("coValues");
const sessions = tx.objectStore("sessions");
const transactions = tx.objectStore("transactions");
const signatureAfter = tx.objectStore("signatureAfter");
return { coValues, sessions, transactions };
return { coValues, sessions, transactions, signatureAfter };
}
}

View File

@@ -853,7 +853,7 @@
dependencies:
"@noble/hashes" "1.3.1"
"@noble/hashes@1.3.1", "@noble/hashes@^1.3.1":
"@noble/hashes@1.3.1":
version "1.3.1"
resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.3.1.tgz#8831ef002114670c603c458ab8b11328406953a9"
integrity sha512-EbqwksQwz9xDRGfDST86whPBgM65E0OH/pCgqW0GBVzO22bNE+NuIbeTb714+IfSjU3aRk47EUvXIb5bTsenKA==
@@ -1589,6 +1589,13 @@
resolved "https://registry.yarnpkg.com/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz#0ea7b61496902b95890dc4c3a116b60cb8dae812"
integrity sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ==
"@types/image-blob-reduce@^4.1.1":
version "4.1.1"
resolved "https://registry.yarnpkg.com/@types/image-blob-reduce/-/image-blob-reduce-4.1.1.tgz#3c04b47809fe5a69d652bebfc118cd74f65742bd"
integrity sha512-Oe2EPjW+iZSsXccxZPebqHqXAUaOLir3eQVqPx0ryXeJZdCZx+gYvWBZtqYEcluP6f3bll1m06ahT26bX0+LOg==
dependencies:
"@types/pica" "*"
"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1":
version "2.0.4"
resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44"
@@ -1641,6 +1648,11 @@
resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz#d3357479a0fdfdd5907fe67e17e0a85c906e1301"
integrity sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==
"@types/pica@*":
version "9.0.1"
resolved "https://registry.yarnpkg.com/@types/pica/-/pica-9.0.1.tgz#adbdfc1190bb33a9da68d1fe501c2483dae3b142"
integrity sha512-hTsYxcy0MqIOKzeALuh3zOHyozBlndxV/bX9X52GBFq2XUQchZF6T0vcRYeT5P1ggmswi2LlIwHAH+bKWxxalg==
"@types/prop-types@*":
version "15.7.5"
resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf"
@@ -3745,10 +3757,6 @@ fast-json-stable-stringify@2.x, fast-json-stable-stringify@^2.0.0, fast-json-sta
resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633"
integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==
"fast-json-stable-stringify@https://github.com/tirithen/fast-json-stable-stringify#7a3dcf2":
version "2.0.0"
resolved "https://github.com/tirithen/fast-json-stable-stringify#7a3dcf2e086222fcee52d354d50a6a80dea97aed"
fast-levenshtein@^2.0.6:
version "2.0.6"
resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917"
@@ -4219,6 +4227,11 @@ globby@11.1.0, globby@^11.1.0:
merge2 "^1.4.1"
slash "^3.0.0"
glur@^1.1.2:
version "1.1.2"
resolved "https://registry.yarnpkg.com/glur/-/glur-1.1.2.tgz#f20ea36db103bfc292343921f1f91e83c3467689"
integrity sha512-l+8esYHTKOx2G/Aao4lEQ0bnHWg4fWtJbVoZZT9Knxi01pB8C80BR85nONLFwkkQoFRCmXY+BUcGZN3yZ2QsRA==
"got@^ 12.6.1":
version "12.6.1"
resolved "https://registry.yarnpkg.com/got/-/got-12.6.1.tgz#8869560d1383353204b5a9435f782df9c091f549"
@@ -4307,6 +4320,11 @@ has@^1.0.3:
dependencies:
function-bind "^1.1.1"
hash-wasm@^4.9.0:
version "4.9.0"
resolved "https://registry.yarnpkg.com/hash-wasm/-/hash-wasm-4.9.0.tgz#7e9dcc9f7d6bd0cc802f2a58f24edce999744206"
integrity sha512-7SW7ejyfnRxuOc7ptQHSf4LDoZaWOivfzqw+5rpcQku0nHfmicPKE51ra9BiRLAmT8+gGLestr1XroUkqdjL6w==
hdr-histogram-js@^2.0.1:
version "2.0.3"
resolved "https://registry.yarnpkg.com/hdr-histogram-js/-/hdr-histogram-js-2.0.3.tgz#0b860534655722b6e3f3e7dca7b78867cf43dcb5"
@@ -4448,6 +4466,13 @@ ignore@^5.0.4, ignore@^5.2.0, ignore@^5.2.4:
resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324"
integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==
image-blob-reduce@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/image-blob-reduce/-/image-blob-reduce-4.1.0.tgz#45f1e146ceaa45079025febe307f9b1e8b6833c9"
integrity sha512-iljleP8Fr7tS1ezrAazWi30abNPYXtBGXb9R9oTZDWObqiKq18AQJGTUb0wkBOtdCZ36/IirkuuAIIHTjBJIjA==
dependencies:
pica "^9.0.0"
import-fresh@^3.2.1:
version "3.3.0"
resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b"
@@ -5917,6 +5942,14 @@ multimatch@5.0.0:
arrify "^2.0.1"
minimatch "^3.0.4"
multimath@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/multimath/-/multimath-2.0.0.tgz#0d37acf67c328f30e3d8c6b0d3209e6082710302"
integrity sha512-toRx66cAMJ+Ccz7pMIg38xSIrtnbozk0dchXezwQDMgQmbGpfxjtv68H+L00iFL8hxDaVjrmwAFSb3I6bg8Q2g==
dependencies:
glur "^1.1.2"
object-assign "^4.1.1"
mute-stream@0.0.8:
version "0.0.8"
resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d"
@@ -6262,7 +6295,7 @@ nx@16.6.0, "nx@>=16.5.1 < 17":
"@nx/nx-win32-arm64-msvc" "16.6.0"
"@nx/nx-win32-x64-msvc" "16.6.0"
object-assign@^4.0.1:
object-assign@^4.0.1, object-assign@^4.1.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==
@@ -6614,6 +6647,16 @@ pend@~1.2.0:
resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50"
integrity sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==
pica@^9.0.0:
version "9.0.1"
resolved "https://registry.yarnpkg.com/pica/-/pica-9.0.1.tgz#9ba5a5e81fc09dca9800abef9fb8388434b18b2f"
integrity sha512-v0U4vY6Z3ztz9b4jBIhCD3WYoecGXCQeCsYep+sXRefViL+mVVoTL+wqzdPeE+GpBFsRUtQZb6dltvAt2UkMtQ==
dependencies:
glur "^1.1.2"
multimath "^2.0.0"
object-assign "^4.1.1"
webworkify "^1.5.0"
picocolors@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c"
@@ -8092,11 +8135,6 @@ uri-js@^4.2.2:
dependencies:
punycode "^2.1.0"
use-debounce@^9.0.4:
version "9.0.4"
resolved "https://registry.yarnpkg.com/use-debounce/-/use-debounce-9.0.4.tgz#51d25d856fbdfeb537553972ce3943b897f1ac85"
integrity sha512-6X8H/mikbrt0XE8e+JXRtZ8yYVvKkdYRfmIhWZYsP8rcNs9hk3APV8Ua2mFkKRLcJKVdnX2/Vwrmg2GWKUQEaQ==
util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
@@ -8291,6 +8329,11 @@ webidl-conversions@^3.0.0:
resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871"
integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==
webworkify@^1.5.0:
version "1.5.0"
resolved "https://registry.yarnpkg.com/webworkify/-/webworkify-1.5.0.tgz#734ad87a774de6ebdd546e1d3e027da5b8f4a42c"
integrity sha512-AMcUeyXAhbACL8S2hqqdqOLqvJ8ylmIbNwUIqQujRSouf4+eUFaXbG6F1Rbu+srlJMmxQWsiU7mOJi0nMBfM1g==
whatwg-url@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d"