Compare commits
118 Commits
cojson-sto
...
jazz-tools
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
114898d8a9 | ||
|
|
962213c712 | ||
|
|
427df8fcbb | ||
|
|
c40aad55dc | ||
|
|
dfca5926de | ||
|
|
9815ec61f0 | ||
|
|
fca60d213e | ||
|
|
b4fdab475b | ||
|
|
958c122c36 | ||
|
|
5842838371 | ||
|
|
acd908fbc2 | ||
|
|
4e61d1d191 | ||
|
|
9f6079b6c6 | ||
|
|
4033d78fa6 | ||
|
|
83af94c850 | ||
|
|
70fe856713 | ||
|
|
42e4afc42b | ||
|
|
0e6797b222 | ||
|
|
3634eaf8e9 | ||
|
|
58dfda3d0f | ||
|
|
d304b0bcb5 | ||
|
|
44f5a3f5a2 | ||
|
|
ebb3ce1c25 | ||
|
|
a67bba0dcf | ||
|
|
4a72c26e42 | ||
|
|
084cb5936d | ||
|
|
8a3be85e97 | ||
|
|
1a7f2b7379 | ||
|
|
caac82dffd | ||
|
|
27b48378e5 | ||
|
|
cfd3c3ca5c | ||
|
|
41f26b7a4f | ||
|
|
c57ebb1cea | ||
|
|
259aded5cc | ||
|
|
1f5e091dd7 | ||
|
|
bbb1c44977 | ||
|
|
4327ecbfdf | ||
|
|
114c10bc77 | ||
|
|
cecdf29721 | ||
|
|
bd717fc0d7 | ||
|
|
739fff68b3 | ||
|
|
d49cab0afa | ||
|
|
ffebb4fdaf | ||
|
|
32565f0e53 | ||
|
|
61a5889bea | ||
|
|
82bd3e1ea6 | ||
|
|
b800a6fba2 | ||
|
|
1b6dbfdfff | ||
|
|
061a70f1b3 | ||
|
|
f1c1e0dafd | ||
|
|
c3912fdb37 | ||
|
|
356bfa4860 | ||
|
|
38446668c4 | ||
|
|
e2bb3b8015 | ||
|
|
11dcfd703d | ||
|
|
0b09d23bd1 | ||
|
|
879b726537 | ||
|
|
66bbd03262 | ||
|
|
c09b63698f | ||
|
|
bed7db0a33 | ||
|
|
8ff3e234c1 | ||
|
|
296da5a5c4 | ||
|
|
700a4f1ba1 | ||
|
|
6f6663d825 | ||
|
|
844cdc907f | ||
|
|
9e32d4cb92 | ||
|
|
85dc6ba148 | ||
|
|
16c4d27e00 | ||
|
|
69170fe0e0 | ||
|
|
a646ba54b3 | ||
|
|
45d60fc3c8 | ||
|
|
6f0c399ccd | ||
|
|
9b1d52d183 | ||
|
|
6247fac6c5 | ||
|
|
f27a2c541e | ||
|
|
2317a23fd4 | ||
|
|
26994684d7 | ||
|
|
14a5e036a4 | ||
|
|
5b1c1ca522 | ||
|
|
a9c8458c51 | ||
|
|
5f31d6cbe1 | ||
|
|
477fd8a62d | ||
|
|
90999ee709 | ||
|
|
38065f0cdf | ||
|
|
c77d16cdb3 | ||
|
|
9410084e6a | ||
|
|
e67c5838a9 | ||
|
|
a141cbc7f7 | ||
|
|
6a5352cf3a | ||
|
|
27762637ee | ||
|
|
dcebe34891 | ||
|
|
99d510815f | ||
|
|
928962c08b | ||
|
|
cdadd6db1d | ||
|
|
d45b8ae70b | ||
|
|
445a58c864 | ||
|
|
1895b474ea | ||
|
|
8990ff39a5 | ||
|
|
71e4c97255 | ||
|
|
577e960e28 | ||
|
|
f232f75d40 | ||
|
|
e1a7f829b4 | ||
|
|
f82177b9da | ||
|
|
c1c553bad0 | ||
|
|
588ea02f63 | ||
|
|
ddc69f2268 | ||
|
|
7c62689319 | ||
|
|
df7011167c | ||
|
|
28a785acb0 | ||
|
|
3ee557bfbe | ||
|
|
af94255166 | ||
|
|
4a0dea3f75 | ||
|
|
6a42bc9655 | ||
|
|
c6c8a7f6b7 | ||
|
|
133dd0e26d | ||
|
|
815339272f | ||
|
|
9c1f340029 | ||
|
|
b72ea9608d |
@@ -6,7 +6,6 @@
|
||||
"fixed": [
|
||||
[
|
||||
"cojson",
|
||||
"cojson-storage",
|
||||
"cojson-storage-indexeddb",
|
||||
"cojson-storage-sqlite",
|
||||
"cojson-transport-ws",
|
||||
|
||||
52
.github/workflows/playwright-homepage.yml
vendored
52
.github/workflows/playwright-homepage.yml
vendored
@@ -1,52 +0,0 @@
|
||||
name: Playwright Tests - Homepage
|
||||
|
||||
concurrency:
|
||||
# For pushes, this lets concurrent runs happen, so each push gets a result.
|
||||
# But for other events (e.g. PRs), we can cancel the previous runs.
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.sha || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main"]
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
jobs:
|
||||
test:
|
||||
timeout-minutes: 60
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: true
|
||||
|
||||
- name: Setup Source Code
|
||||
uses: ./.github/actions/source-code/
|
||||
|
||||
- name: Install root dependencies
|
||||
run: pnpm install && pnpm exec turbo build --filter="./packages/*"
|
||||
|
||||
- name: Install project dependencies
|
||||
run: pnpm install
|
||||
working-directory: ./homepage/homepage
|
||||
|
||||
- name: Pnpm Build
|
||||
run: pnpm exec turbo build
|
||||
working-directory: ./homepage/homepage
|
||||
|
||||
- name: Install Playwright Browsers
|
||||
run: pnpm exec playwright install
|
||||
working-directory: ./homepage/homepage
|
||||
|
||||
- name: Run Playwright tests
|
||||
run: pnpm exec playwright test
|
||||
working-directory: ./homepage/homepage
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: failure()
|
||||
with:
|
||||
name: homepage-playwright-report
|
||||
path: ./homepage/homepage/playwright-report/
|
||||
retention-days: 30
|
||||
158
.github/workflows/playwright.yml
vendored
158
.github/workflows/playwright.yml
vendored
@@ -19,21 +19,7 @@ jobs:
|
||||
continue-on-error: true
|
||||
strategy:
|
||||
matrix:
|
||||
project: [
|
||||
"tests/e2e",
|
||||
"examples/chat",
|
||||
"examples/chat-svelte",
|
||||
"examples/clerk",
|
||||
"examples/betterauth",
|
||||
"examples/file-share-svelte",
|
||||
"examples/form",
|
||||
"examples/inspector",
|
||||
"examples/music-player",
|
||||
"examples/organization",
|
||||
"starters/react-passkey-auth",
|
||||
"starters/svelte-passkey-auth",
|
||||
"tests/jazz-svelte"
|
||||
]
|
||||
shard: ["1/2", "2/2"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -43,25 +29,129 @@ jobs:
|
||||
- name: Setup Source Code
|
||||
uses: ./.github/actions/source-code/
|
||||
|
||||
- name: Pnpm Build
|
||||
run: |
|
||||
if [ -f .env.test ]; then
|
||||
cp .env.test .env
|
||||
fi
|
||||
pnpm turbo build
|
||||
working-directory: ./${{ matrix.project }}
|
||||
|
||||
- name: Install Playwright Browsers
|
||||
run: pnpm exec playwright install
|
||||
working-directory: ./${{ matrix.project }}
|
||||
|
||||
- name: Run Playwright tests
|
||||
run: pnpm exec playwright test
|
||||
working-directory: ./${{ matrix.project }}
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: failure()
|
||||
with:
|
||||
name: ${{ hashFiles(format('{0}/package.json', matrix.project)) }}-playwright-report
|
||||
path: ./${{ matrix.project }}/playwright-report/
|
||||
retention-days: 30
|
||||
- name: Run Playwright tests for shard ${{ matrix.shard }}
|
||||
run: |
|
||||
# Parse shard information (e.g., "1/2" -> shard_num=1, total_shards=2)
|
||||
IFS='/' read -r shard_num total_shards <<< "${{ matrix.shard }}"
|
||||
shard_index=$((shard_num - 1)) # Convert to 0-based index
|
||||
|
||||
# Debug: Print parsed values
|
||||
echo "Parsed shard_num: $shard_num"
|
||||
echo "Parsed total_shards: $total_shards"
|
||||
echo "Calculated shard_index: $shard_index"
|
||||
|
||||
# Define all projects to test
|
||||
all_projects=(
|
||||
"tests/e2e"
|
||||
"examples/chat"
|
||||
"examples/chat-svelte"
|
||||
"examples/clerk"
|
||||
"examples/betterauth"
|
||||
"examples/file-share-svelte"
|
||||
"examples/form"
|
||||
"examples/inspector"
|
||||
"examples/music-player"
|
||||
"examples/organization"
|
||||
"starters/react-passkey-auth"
|
||||
"starters/svelte-passkey-auth"
|
||||
"tests/jazz-svelte"
|
||||
)
|
||||
|
||||
# Calculate which projects this shard should run
|
||||
shard_projects=()
|
||||
for i in "${!all_projects[@]}"; do
|
||||
if [ $((i % total_shards)) -eq $shard_index ]; then
|
||||
shard_projects+=("${all_projects[i]}")
|
||||
fi
|
||||
done
|
||||
|
||||
# Track project results
|
||||
overall_exit_code=0
|
||||
failed_projects=()
|
||||
passed_projects=()
|
||||
|
||||
echo "=== Running tests for shard ${{ matrix.shard }} ==="
|
||||
echo "Projects in this shard:"
|
||||
printf '%s\n' "${shard_projects[@]}"
|
||||
echo
|
||||
|
||||
# Run tests for each project
|
||||
for project in "${shard_projects[@]}"; do
|
||||
echo "=== Testing project: $project ==="
|
||||
|
||||
# Check if project directory exists
|
||||
if [ ! -d "$project" ]; then
|
||||
echo "❌ FAILED: Project directory $project does not exist"
|
||||
failed_projects+=("$project (directory not found)")
|
||||
overall_exit_code=1
|
||||
continue
|
||||
fi
|
||||
|
||||
# Check if project has package.json
|
||||
if [ ! -f "$project/package.json" ]; then
|
||||
echo "❌ FAILED: No package.json found in $project"
|
||||
failed_projects+=("$project (no package.json)")
|
||||
overall_exit_code=1
|
||||
continue
|
||||
fi
|
||||
|
||||
# Build the project
|
||||
echo "🔨 Building $project..."
|
||||
cd "$project"
|
||||
|
||||
if [ -f .env.test ]; then
|
||||
cp .env.test .env
|
||||
fi
|
||||
|
||||
if ! pnpm turbo build; then
|
||||
echo "❌ BUILD FAILED: $project"
|
||||
failed_projects+=("$project (build failed)")
|
||||
overall_exit_code=1
|
||||
cd - > /dev/null
|
||||
continue
|
||||
fi
|
||||
|
||||
# Run Playwright tests
|
||||
echo "🧪 Running Playwright tests for $project..."
|
||||
if ! pnpm exec playwright test; then
|
||||
echo "❌ TESTS FAILED: $project"
|
||||
failed_projects+=("$project (tests failed)")
|
||||
overall_exit_code=1
|
||||
else
|
||||
echo "✅ TESTS PASSED: $project"
|
||||
passed_projects+=("$project")
|
||||
fi
|
||||
|
||||
cd - > /dev/null
|
||||
echo "=== Finished testing $project ==="
|
||||
echo
|
||||
done
|
||||
|
||||
# Print summary report
|
||||
echo "=========================================="
|
||||
echo "📊 TEST SUMMARY FOR SHARD ${{ matrix.shard }}"
|
||||
echo "=========================================="
|
||||
|
||||
if [ ${#passed_projects[@]} -gt 0 ]; then
|
||||
echo "✅ PASSED (${#passed_projects[@]}):"
|
||||
printf ' - %s\n' "${passed_projects[@]}"
|
||||
echo
|
||||
fi
|
||||
|
||||
if [ ${#failed_projects[@]} -gt 0 ]; then
|
||||
echo "❌ FAILED (${#failed_projects[@]}):"
|
||||
printf ' - %s\n' "${failed_projects[@]}"
|
||||
echo
|
||||
fi
|
||||
|
||||
|
||||
echo "Total projects in shard: ${#shard_projects[@]}"
|
||||
echo "Passed: ${#passed_projects[@]}"
|
||||
echo "Failed: ${#failed_projects[@]}"
|
||||
echo "=========================================="
|
||||
|
||||
# Exit with overall status
|
||||
exit $overall_exit_code
|
||||
|
||||
@@ -56,7 +56,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"include": ["packages/cojson-storage*/**", "cojson-transport-ws/**"],
|
||||
"include": ["packages/cojson/src/storage/*/**", "cojson-transport-ws/**"],
|
||||
"linter": {
|
||||
"enabled": true,
|
||||
"rules": {
|
||||
|
||||
@@ -13,13 +13,13 @@
|
||||
"@bacons/text-decoder": "^0.0.0",
|
||||
"@bam.tech/react-native-image-resizer": "^3.0.11",
|
||||
"@react-native-community/netinfo": "11.4.1",
|
||||
"expo": "~53.0.9",
|
||||
"expo": "54.0.0-canary-20250701-6a945c5",
|
||||
"expo-clipboard": "^7.1.4",
|
||||
"expo-secure-store": "~14.2.3",
|
||||
"expo-sqlite": "~15.2.10",
|
||||
"jazz-tools": "workspace:*",
|
||||
"react": "19.0.0",
|
||||
"react-native": "0.79.2",
|
||||
"react": "19.1.0",
|
||||
"react-native": "0.80.0",
|
||||
"react-native-get-random-values": "^1.11.0",
|
||||
"readable-stream": "^4.7.0"
|
||||
},
|
||||
@@ -29,4 +29,4 @@
|
||||
"typescript": "~5.8.3"
|
||||
},
|
||||
"private": true
|
||||
}
|
||||
}
|
||||
@@ -11,11 +11,11 @@ react {
|
||||
// The root of your project, i.e. where "package.json" lives. Default is '../..'
|
||||
// root = file("../../")
|
||||
// The folder where the react-native NPM package is. Default is ../../node_modules/react-native
|
||||
// reactNativeDir = file("../../node_modules/react-native")
|
||||
reactNativeDir = file("../../../../node_modules/react-native")
|
||||
// The folder where the react-native Codegen package is. Default is ../../node_modules/@react-native/codegen
|
||||
// codegenDir = file("../../node_modules/@react-native/codegen")
|
||||
codegenDir = file("../../../../node_modules/@react-native/codegen")
|
||||
// The cli.js file which is the React Native CLI entrypoint. Default is ../../node_modules/react-native/cli.js
|
||||
// cliFile = file("../../node_modules/react-native/cli.js")
|
||||
cliFile = file("../../../../node_modules/react-native/cli.js")
|
||||
|
||||
/* Variants */
|
||||
// The list of variants to that are debuggable. For those we're going to
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
pluginManagement { includeBuild("../node_modules/@react-native/gradle-plugin") }
|
||||
pluginManagement { includeBuild("../../../node_modules/@react-native/gradle-plugin") }
|
||||
plugins { id("com.facebook.react.settings") }
|
||||
extensions.configure(com.facebook.react.ReactSettingsExtension){ ex -> ex.autolinkLibrariesFromCommand() }
|
||||
rootProject.name = 'ChatRN'
|
||||
include ':app'
|
||||
includeBuild('../node_modules/@react-native/gradle-plugin')
|
||||
includeBuild('../../../node_modules/@react-native/gradle-plugin')
|
||||
|
||||
@@ -380,7 +380,7 @@
|
||||
"$(inherited)",
|
||||
" ",
|
||||
);
|
||||
REACT_NATIVE_PATH = "${PODS_ROOT}/../../node_modules/react-native";
|
||||
REACT_NATIVE_PATH = "${PODS_ROOT}/../../../../node_modules/react-native";
|
||||
SDKROOT = iphoneos;
|
||||
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) DEBUG";
|
||||
USE_HERMES = true;
|
||||
@@ -452,7 +452,7 @@
|
||||
"$(inherited)",
|
||||
" ",
|
||||
);
|
||||
REACT_NATIVE_PATH = "${PODS_ROOT}/../../node_modules/react-native";
|
||||
REACT_NATIVE_PATH = "${PODS_ROOT}/../../../../node_modules/react-native";
|
||||
SDKROOT = iphoneos;
|
||||
USE_HERMES = true;
|
||||
VALIDATE_PRODUCT = YES;
|
||||
|
||||
@@ -2370,87 +2370,87 @@ PODS:
|
||||
- Yoga (0.0.0)
|
||||
|
||||
DEPENDENCIES:
|
||||
- boost (from `../node_modules/react-native/third-party-podspecs/boost.podspec`)
|
||||
- DoubleConversion (from `../node_modules/react-native/third-party-podspecs/DoubleConversion.podspec`)
|
||||
- fast_float (from `../node_modules/react-native/third-party-podspecs/fast_float.podspec`)
|
||||
- FBLazyVector (from `../node_modules/react-native/Libraries/FBLazyVector`)
|
||||
- fmt (from `../node_modules/react-native/third-party-podspecs/fmt.podspec`)
|
||||
- glog (from `../node_modules/react-native/third-party-podspecs/glog.podspec`)
|
||||
- hermes-engine (from `../node_modules/react-native/sdks/hermes-engine/hermes-engine.podspec`)
|
||||
- boost (from `../../../node_modules/react-native/third-party-podspecs/boost.podspec`)
|
||||
- DoubleConversion (from `../../../node_modules/react-native/third-party-podspecs/DoubleConversion.podspec`)
|
||||
- fast_float (from `../../../node_modules/react-native/third-party-podspecs/fast_float.podspec`)
|
||||
- FBLazyVector (from `../../../node_modules/react-native/Libraries/FBLazyVector`)
|
||||
- fmt (from `../../../node_modules/react-native/third-party-podspecs/fmt.podspec`)
|
||||
- glog (from `../../../node_modules/react-native/third-party-podspecs/glog.podspec`)
|
||||
- hermes-engine (from `../../../node_modules/react-native/sdks/hermes-engine/hermes-engine.podspec`)
|
||||
- "op-sqlite (from `../../../node_modules/@op-engineering/op-sqlite`)"
|
||||
- RCT-Folly (from `../node_modules/react-native/third-party-podspecs/RCT-Folly.podspec`)
|
||||
- RCTDeprecation (from `../node_modules/react-native/ReactApple/Libraries/RCTFoundation/RCTDeprecation`)
|
||||
- RCTRequired (from `../node_modules/react-native/Libraries/Required`)
|
||||
- RCTTypeSafety (from `../node_modules/react-native/Libraries/TypeSafety`)
|
||||
- React (from `../node_modules/react-native/`)
|
||||
- React-callinvoker (from `../node_modules/react-native/ReactCommon/callinvoker`)
|
||||
- React-Core (from `../node_modules/react-native/`)
|
||||
- React-Core/RCTWebSocket (from `../node_modules/react-native/`)
|
||||
- React-CoreModules (from `../node_modules/react-native/React/CoreModules`)
|
||||
- React-cxxreact (from `../node_modules/react-native/ReactCommon/cxxreact`)
|
||||
- React-debug (from `../node_modules/react-native/ReactCommon/react/debug`)
|
||||
- React-defaultsnativemodule (from `../node_modules/react-native/ReactCommon/react/nativemodule/defaults`)
|
||||
- React-domnativemodule (from `../node_modules/react-native/ReactCommon/react/nativemodule/dom`)
|
||||
- React-Fabric (from `../node_modules/react-native/ReactCommon`)
|
||||
- React-FabricComponents (from `../node_modules/react-native/ReactCommon`)
|
||||
- React-FabricImage (from `../node_modules/react-native/ReactCommon`)
|
||||
- React-featureflags (from `../node_modules/react-native/ReactCommon/react/featureflags`)
|
||||
- React-featureflagsnativemodule (from `../node_modules/react-native/ReactCommon/react/nativemodule/featureflags`)
|
||||
- React-graphics (from `../node_modules/react-native/ReactCommon/react/renderer/graphics`)
|
||||
- React-hermes (from `../node_modules/react-native/ReactCommon/hermes`)
|
||||
- React-idlecallbacksnativemodule (from `../node_modules/react-native/ReactCommon/react/nativemodule/idlecallbacks`)
|
||||
- React-ImageManager (from `../node_modules/react-native/ReactCommon/react/renderer/imagemanager/platform/ios`)
|
||||
- React-jserrorhandler (from `../node_modules/react-native/ReactCommon/jserrorhandler`)
|
||||
- React-jsi (from `../node_modules/react-native/ReactCommon/jsi`)
|
||||
- React-jsiexecutor (from `../node_modules/react-native/ReactCommon/jsiexecutor`)
|
||||
- React-jsinspector (from `../node_modules/react-native/ReactCommon/jsinspector-modern`)
|
||||
- React-jsinspectorcdp (from `../node_modules/react-native/ReactCommon/jsinspector-modern/cdp`)
|
||||
- React-jsinspectornetwork (from `../node_modules/react-native/ReactCommon/jsinspector-modern/network`)
|
||||
- React-jsinspectortracing (from `../node_modules/react-native/ReactCommon/jsinspector-modern/tracing`)
|
||||
- React-jsitooling (from `../node_modules/react-native/ReactCommon/jsitooling`)
|
||||
- React-jsitracing (from `../node_modules/react-native/ReactCommon/hermes/executor/`)
|
||||
- React-logger (from `../node_modules/react-native/ReactCommon/logger`)
|
||||
- React-Mapbuffer (from `../node_modules/react-native/ReactCommon`)
|
||||
- React-microtasksnativemodule (from `../node_modules/react-native/ReactCommon/react/nativemodule/microtasks`)
|
||||
- RCT-Folly (from `../../../node_modules/react-native/third-party-podspecs/RCT-Folly.podspec`)
|
||||
- RCTDeprecation (from `../../../node_modules/react-native/ReactApple/Libraries/RCTFoundation/RCTDeprecation`)
|
||||
- RCTRequired (from `../../../node_modules/react-native/Libraries/Required`)
|
||||
- RCTTypeSafety (from `../../../node_modules/react-native/Libraries/TypeSafety`)
|
||||
- React (from `../../../node_modules/react-native/`)
|
||||
- React-callinvoker (from `../../../node_modules/react-native/ReactCommon/callinvoker`)
|
||||
- React-Core (from `../../../node_modules/react-native/`)
|
||||
- React-Core/RCTWebSocket (from `../../../node_modules/react-native/`)
|
||||
- React-CoreModules (from `../../../node_modules/react-native/React/CoreModules`)
|
||||
- React-cxxreact (from `../../../node_modules/react-native/ReactCommon/cxxreact`)
|
||||
- React-debug (from `../../../node_modules/react-native/ReactCommon/react/debug`)
|
||||
- React-defaultsnativemodule (from `../../../node_modules/react-native/ReactCommon/react/nativemodule/defaults`)
|
||||
- React-domnativemodule (from `../../../node_modules/react-native/ReactCommon/react/nativemodule/dom`)
|
||||
- React-Fabric (from `../../../node_modules/react-native/ReactCommon`)
|
||||
- React-FabricComponents (from `../../../node_modules/react-native/ReactCommon`)
|
||||
- React-FabricImage (from `../../../node_modules/react-native/ReactCommon`)
|
||||
- React-featureflags (from `../../../node_modules/react-native/ReactCommon/react/featureflags`)
|
||||
- React-featureflagsnativemodule (from `../../../node_modules/react-native/ReactCommon/react/nativemodule/featureflags`)
|
||||
- React-graphics (from `../../../node_modules/react-native/ReactCommon/react/renderer/graphics`)
|
||||
- React-hermes (from `../../../node_modules/react-native/ReactCommon/hermes`)
|
||||
- React-idlecallbacksnativemodule (from `../../../node_modules/react-native/ReactCommon/react/nativemodule/idlecallbacks`)
|
||||
- React-ImageManager (from `../../../node_modules/react-native/ReactCommon/react/renderer/imagemanager/platform/ios`)
|
||||
- React-jserrorhandler (from `../../../node_modules/react-native/ReactCommon/jserrorhandler`)
|
||||
- React-jsi (from `../../../node_modules/react-native/ReactCommon/jsi`)
|
||||
- React-jsiexecutor (from `../../../node_modules/react-native/ReactCommon/jsiexecutor`)
|
||||
- React-jsinspector (from `../../../node_modules/react-native/ReactCommon/jsinspector-modern`)
|
||||
- React-jsinspectorcdp (from `../../../node_modules/react-native/ReactCommon/jsinspector-modern/cdp`)
|
||||
- React-jsinspectornetwork (from `../../../node_modules/react-native/ReactCommon/jsinspector-modern/network`)
|
||||
- React-jsinspectortracing (from `../../../node_modules/react-native/ReactCommon/jsinspector-modern/tracing`)
|
||||
- React-jsitooling (from `../../../node_modules/react-native/ReactCommon/jsitooling`)
|
||||
- React-jsitracing (from `../../../node_modules/react-native/ReactCommon/hermes/executor/`)
|
||||
- React-logger (from `../../../node_modules/react-native/ReactCommon/logger`)
|
||||
- React-Mapbuffer (from `../../../node_modules/react-native/ReactCommon`)
|
||||
- React-microtasksnativemodule (from `../../../node_modules/react-native/ReactCommon/react/nativemodule/microtasks`)
|
||||
- react-native-get-random-values (from `../../../node_modules/react-native-get-random-values`)
|
||||
- react-native-mmkv (from `../../../node_modules/react-native-mmkv`)
|
||||
- "react-native-netinfo (from `../../../node_modules/@react-native-community/netinfo`)"
|
||||
- react-native-safe-area-context (from `../node_modules/react-native-safe-area-context`)
|
||||
- React-NativeModulesApple (from `../node_modules/react-native/ReactCommon/react/nativemodule/core/platform/ios`)
|
||||
- React-oscompat (from `../node_modules/react-native/ReactCommon/oscompat`)
|
||||
- React-perflogger (from `../node_modules/react-native/ReactCommon/reactperflogger`)
|
||||
- React-performancetimeline (from `../node_modules/react-native/ReactCommon/react/performance/timeline`)
|
||||
- React-RCTActionSheet (from `../node_modules/react-native/Libraries/ActionSheetIOS`)
|
||||
- React-RCTAnimation (from `../node_modules/react-native/Libraries/NativeAnimation`)
|
||||
- React-RCTAppDelegate (from `../node_modules/react-native/Libraries/AppDelegate`)
|
||||
- React-RCTBlob (from `../node_modules/react-native/Libraries/Blob`)
|
||||
- React-RCTFabric (from `../node_modules/react-native/React`)
|
||||
- React-RCTFBReactNativeSpec (from `../node_modules/react-native/React`)
|
||||
- React-RCTImage (from `../node_modules/react-native/Libraries/Image`)
|
||||
- React-RCTLinking (from `../node_modules/react-native/Libraries/LinkingIOS`)
|
||||
- React-RCTNetwork (from `../node_modules/react-native/Libraries/Network`)
|
||||
- React-RCTRuntime (from `../node_modules/react-native/React/Runtime`)
|
||||
- React-RCTSettings (from `../node_modules/react-native/Libraries/Settings`)
|
||||
- React-RCTText (from `../node_modules/react-native/Libraries/Text`)
|
||||
- React-RCTVibration (from `../node_modules/react-native/Libraries/Vibration`)
|
||||
- React-rendererconsistency (from `../node_modules/react-native/ReactCommon/react/renderer/consistency`)
|
||||
- React-renderercss (from `../node_modules/react-native/ReactCommon/react/renderer/css`)
|
||||
- React-rendererdebug (from `../node_modules/react-native/ReactCommon/react/renderer/debug`)
|
||||
- React-rncore (from `../node_modules/react-native/ReactCommon`)
|
||||
- React-RuntimeApple (from `../node_modules/react-native/ReactCommon/react/runtime/platform/ios`)
|
||||
- React-RuntimeCore (from `../node_modules/react-native/ReactCommon/react/runtime`)
|
||||
- React-runtimeexecutor (from `../node_modules/react-native/ReactCommon/runtimeexecutor`)
|
||||
- React-RuntimeHermes (from `../node_modules/react-native/ReactCommon/react/runtime`)
|
||||
- React-runtimescheduler (from `../node_modules/react-native/ReactCommon/react/renderer/runtimescheduler`)
|
||||
- React-timing (from `../node_modules/react-native/ReactCommon/react/timing`)
|
||||
- React-utils (from `../node_modules/react-native/ReactCommon/react/utils`)
|
||||
- react-native-safe-area-context (from `../../../node_modules/react-native-safe-area-context`)
|
||||
- React-NativeModulesApple (from `../../../node_modules/react-native/ReactCommon/react/nativemodule/core/platform/ios`)
|
||||
- React-oscompat (from `../../../node_modules/react-native/ReactCommon/oscompat`)
|
||||
- React-perflogger (from `../../../node_modules/react-native/ReactCommon/reactperflogger`)
|
||||
- React-performancetimeline (from `../../../node_modules/react-native/ReactCommon/react/performance/timeline`)
|
||||
- React-RCTActionSheet (from `../../../node_modules/react-native/Libraries/ActionSheetIOS`)
|
||||
- React-RCTAnimation (from `../../../node_modules/react-native/Libraries/NativeAnimation`)
|
||||
- React-RCTAppDelegate (from `../../../node_modules/react-native/Libraries/AppDelegate`)
|
||||
- React-RCTBlob (from `../../../node_modules/react-native/Libraries/Blob`)
|
||||
- React-RCTFabric (from `../../../node_modules/react-native/React`)
|
||||
- React-RCTFBReactNativeSpec (from `../../../node_modules/react-native/React`)
|
||||
- React-RCTImage (from `../../../node_modules/react-native/Libraries/Image`)
|
||||
- React-RCTLinking (from `../../../node_modules/react-native/Libraries/LinkingIOS`)
|
||||
- React-RCTNetwork (from `../../../node_modules/react-native/Libraries/Network`)
|
||||
- React-RCTRuntime (from `../../../node_modules/react-native/React/Runtime`)
|
||||
- React-RCTSettings (from `../../../node_modules/react-native/Libraries/Settings`)
|
||||
- React-RCTText (from `../../../node_modules/react-native/Libraries/Text`)
|
||||
- React-RCTVibration (from `../../../node_modules/react-native/Libraries/Vibration`)
|
||||
- React-rendererconsistency (from `../../../node_modules/react-native/ReactCommon/react/renderer/consistency`)
|
||||
- React-renderercss (from `../../../node_modules/react-native/ReactCommon/react/renderer/css`)
|
||||
- React-rendererdebug (from `../../../node_modules/react-native/ReactCommon/react/renderer/debug`)
|
||||
- React-rncore (from `../../../node_modules/react-native/ReactCommon`)
|
||||
- React-RuntimeApple (from `../../../node_modules/react-native/ReactCommon/react/runtime/platform/ios`)
|
||||
- React-RuntimeCore (from `../../../node_modules/react-native/ReactCommon/react/runtime`)
|
||||
- React-runtimeexecutor (from `../../../node_modules/react-native/ReactCommon/runtimeexecutor`)
|
||||
- React-RuntimeHermes (from `../../../node_modules/react-native/ReactCommon/react/runtime`)
|
||||
- React-runtimescheduler (from `../../../node_modules/react-native/ReactCommon/react/renderer/runtimescheduler`)
|
||||
- React-timing (from `../../../node_modules/react-native/ReactCommon/react/timing`)
|
||||
- React-utils (from `../../../node_modules/react-native/ReactCommon/react/utils`)
|
||||
- ReactAppDependencyProvider (from `build/generated/ios`)
|
||||
- ReactCodegen (from `build/generated/ios`)
|
||||
- ReactCommon/turbomodule/core (from `../node_modules/react-native/ReactCommon`)
|
||||
- ReactCommon/turbomodule/core (from `../../../node_modules/react-native/ReactCommon`)
|
||||
- "RNCClipboard (from `../../../node_modules/@react-native-clipboard/clipboard`)"
|
||||
- RNScreens (from `../node_modules/react-native-screens`)
|
||||
- RNScreens (from `../../../node_modules/react-native-screens`)
|
||||
- SocketRocket (~> 0.7.1)
|
||||
- Yoga (from `../node_modules/react-native/ReactCommon/yoga`)
|
||||
- Yoga (from `../../../node_modules/react-native/ReactCommon/yoga`)
|
||||
|
||||
SPEC REPOS:
|
||||
trunk:
|
||||
@@ -2458,88 +2458,88 @@ SPEC REPOS:
|
||||
|
||||
EXTERNAL SOURCES:
|
||||
boost:
|
||||
:podspec: "../node_modules/react-native/third-party-podspecs/boost.podspec"
|
||||
:podspec: "../../../node_modules/react-native/third-party-podspecs/boost.podspec"
|
||||
DoubleConversion:
|
||||
:podspec: "../node_modules/react-native/third-party-podspecs/DoubleConversion.podspec"
|
||||
:podspec: "../../../node_modules/react-native/third-party-podspecs/DoubleConversion.podspec"
|
||||
fast_float:
|
||||
:podspec: "../node_modules/react-native/third-party-podspecs/fast_float.podspec"
|
||||
:podspec: "../../../node_modules/react-native/third-party-podspecs/fast_float.podspec"
|
||||
FBLazyVector:
|
||||
:path: "../node_modules/react-native/Libraries/FBLazyVector"
|
||||
:path: "../../../node_modules/react-native/Libraries/FBLazyVector"
|
||||
fmt:
|
||||
:podspec: "../node_modules/react-native/third-party-podspecs/fmt.podspec"
|
||||
:podspec: "../../../node_modules/react-native/third-party-podspecs/fmt.podspec"
|
||||
glog:
|
||||
:podspec: "../node_modules/react-native/third-party-podspecs/glog.podspec"
|
||||
:podspec: "../../../node_modules/react-native/third-party-podspecs/glog.podspec"
|
||||
hermes-engine:
|
||||
:podspec: "../node_modules/react-native/sdks/hermes-engine/hermes-engine.podspec"
|
||||
:podspec: "../../../node_modules/react-native/sdks/hermes-engine/hermes-engine.podspec"
|
||||
:tag: hermes-2025-05-06-RNv0.80.0-4eb6132a5bf0450bf4c6c91987675381d7ac8bca
|
||||
op-sqlite:
|
||||
:path: "../../../node_modules/@op-engineering/op-sqlite"
|
||||
RCT-Folly:
|
||||
:podspec: "../node_modules/react-native/third-party-podspecs/RCT-Folly.podspec"
|
||||
:podspec: "../../../node_modules/react-native/third-party-podspecs/RCT-Folly.podspec"
|
||||
RCTDeprecation:
|
||||
:path: "../node_modules/react-native/ReactApple/Libraries/RCTFoundation/RCTDeprecation"
|
||||
:path: "../../../node_modules/react-native/ReactApple/Libraries/RCTFoundation/RCTDeprecation"
|
||||
RCTRequired:
|
||||
:path: "../node_modules/react-native/Libraries/Required"
|
||||
:path: "../../../node_modules/react-native/Libraries/Required"
|
||||
RCTTypeSafety:
|
||||
:path: "../node_modules/react-native/Libraries/TypeSafety"
|
||||
:path: "../../../node_modules/react-native/Libraries/TypeSafety"
|
||||
React:
|
||||
:path: "../node_modules/react-native/"
|
||||
:path: "../../../node_modules/react-native/"
|
||||
React-callinvoker:
|
||||
:path: "../node_modules/react-native/ReactCommon/callinvoker"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/callinvoker"
|
||||
React-Core:
|
||||
:path: "../node_modules/react-native/"
|
||||
:path: "../../../node_modules/react-native/"
|
||||
React-CoreModules:
|
||||
:path: "../node_modules/react-native/React/CoreModules"
|
||||
:path: "../../../node_modules/react-native/React/CoreModules"
|
||||
React-cxxreact:
|
||||
:path: "../node_modules/react-native/ReactCommon/cxxreact"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/cxxreact"
|
||||
React-debug:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/debug"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/debug"
|
||||
React-defaultsnativemodule:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/nativemodule/defaults"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/nativemodule/defaults"
|
||||
React-domnativemodule:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/nativemodule/dom"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/nativemodule/dom"
|
||||
React-Fabric:
|
||||
:path: "../node_modules/react-native/ReactCommon"
|
||||
:path: "../../../node_modules/react-native/ReactCommon"
|
||||
React-FabricComponents:
|
||||
:path: "../node_modules/react-native/ReactCommon"
|
||||
:path: "../../../node_modules/react-native/ReactCommon"
|
||||
React-FabricImage:
|
||||
:path: "../node_modules/react-native/ReactCommon"
|
||||
:path: "../../../node_modules/react-native/ReactCommon"
|
||||
React-featureflags:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/featureflags"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/featureflags"
|
||||
React-featureflagsnativemodule:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/nativemodule/featureflags"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/nativemodule/featureflags"
|
||||
React-graphics:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/renderer/graphics"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/renderer/graphics"
|
||||
React-hermes:
|
||||
:path: "../node_modules/react-native/ReactCommon/hermes"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/hermes"
|
||||
React-idlecallbacksnativemodule:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/nativemodule/idlecallbacks"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/nativemodule/idlecallbacks"
|
||||
React-ImageManager:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/renderer/imagemanager/platform/ios"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/renderer/imagemanager/platform/ios"
|
||||
React-jserrorhandler:
|
||||
:path: "../node_modules/react-native/ReactCommon/jserrorhandler"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/jserrorhandler"
|
||||
React-jsi:
|
||||
:path: "../node_modules/react-native/ReactCommon/jsi"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/jsi"
|
||||
React-jsiexecutor:
|
||||
:path: "../node_modules/react-native/ReactCommon/jsiexecutor"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/jsiexecutor"
|
||||
React-jsinspector:
|
||||
:path: "../node_modules/react-native/ReactCommon/jsinspector-modern"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/jsinspector-modern"
|
||||
React-jsinspectorcdp:
|
||||
:path: "../node_modules/react-native/ReactCommon/jsinspector-modern/cdp"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/jsinspector-modern/cdp"
|
||||
React-jsinspectornetwork:
|
||||
:path: "../node_modules/react-native/ReactCommon/jsinspector-modern/network"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/jsinspector-modern/network"
|
||||
React-jsinspectortracing:
|
||||
:path: "../node_modules/react-native/ReactCommon/jsinspector-modern/tracing"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/jsinspector-modern/tracing"
|
||||
React-jsitooling:
|
||||
:path: "../node_modules/react-native/ReactCommon/jsitooling"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/jsitooling"
|
||||
React-jsitracing:
|
||||
:path: "../node_modules/react-native/ReactCommon/hermes/executor/"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/hermes/executor/"
|
||||
React-logger:
|
||||
:path: "../node_modules/react-native/ReactCommon/logger"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/logger"
|
||||
React-Mapbuffer:
|
||||
:path: "../node_modules/react-native/ReactCommon"
|
||||
:path: "../../../node_modules/react-native/ReactCommon"
|
||||
React-microtasksnativemodule:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/nativemodule/microtasks"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/nativemodule/microtasks"
|
||||
react-native-get-random-values:
|
||||
:path: "../../../node_modules/react-native-get-random-values"
|
||||
react-native-mmkv:
|
||||
@@ -2547,75 +2547,75 @@ EXTERNAL SOURCES:
|
||||
react-native-netinfo:
|
||||
:path: "../../../node_modules/@react-native-community/netinfo"
|
||||
react-native-safe-area-context:
|
||||
:path: "../node_modules/react-native-safe-area-context"
|
||||
:path: "../../../node_modules/react-native-safe-area-context"
|
||||
React-NativeModulesApple:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/nativemodule/core/platform/ios"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/nativemodule/core/platform/ios"
|
||||
React-oscompat:
|
||||
:path: "../node_modules/react-native/ReactCommon/oscompat"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/oscompat"
|
||||
React-perflogger:
|
||||
:path: "../node_modules/react-native/ReactCommon/reactperflogger"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/reactperflogger"
|
||||
React-performancetimeline:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/performance/timeline"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/performance/timeline"
|
||||
React-RCTActionSheet:
|
||||
:path: "../node_modules/react-native/Libraries/ActionSheetIOS"
|
||||
:path: "../../../node_modules/react-native/Libraries/ActionSheetIOS"
|
||||
React-RCTAnimation:
|
||||
:path: "../node_modules/react-native/Libraries/NativeAnimation"
|
||||
:path: "../../../node_modules/react-native/Libraries/NativeAnimation"
|
||||
React-RCTAppDelegate:
|
||||
:path: "../node_modules/react-native/Libraries/AppDelegate"
|
||||
:path: "../../../node_modules/react-native/Libraries/AppDelegate"
|
||||
React-RCTBlob:
|
||||
:path: "../node_modules/react-native/Libraries/Blob"
|
||||
:path: "../../../node_modules/react-native/Libraries/Blob"
|
||||
React-RCTFabric:
|
||||
:path: "../node_modules/react-native/React"
|
||||
:path: "../../../node_modules/react-native/React"
|
||||
React-RCTFBReactNativeSpec:
|
||||
:path: "../node_modules/react-native/React"
|
||||
:path: "../../../node_modules/react-native/React"
|
||||
React-RCTImage:
|
||||
:path: "../node_modules/react-native/Libraries/Image"
|
||||
:path: "../../../node_modules/react-native/Libraries/Image"
|
||||
React-RCTLinking:
|
||||
:path: "../node_modules/react-native/Libraries/LinkingIOS"
|
||||
:path: "../../../node_modules/react-native/Libraries/LinkingIOS"
|
||||
React-RCTNetwork:
|
||||
:path: "../node_modules/react-native/Libraries/Network"
|
||||
:path: "../../../node_modules/react-native/Libraries/Network"
|
||||
React-RCTRuntime:
|
||||
:path: "../node_modules/react-native/React/Runtime"
|
||||
:path: "../../../node_modules/react-native/React/Runtime"
|
||||
React-RCTSettings:
|
||||
:path: "../node_modules/react-native/Libraries/Settings"
|
||||
:path: "../../../node_modules/react-native/Libraries/Settings"
|
||||
React-RCTText:
|
||||
:path: "../node_modules/react-native/Libraries/Text"
|
||||
:path: "../../../node_modules/react-native/Libraries/Text"
|
||||
React-RCTVibration:
|
||||
:path: "../node_modules/react-native/Libraries/Vibration"
|
||||
:path: "../../../node_modules/react-native/Libraries/Vibration"
|
||||
React-rendererconsistency:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/renderer/consistency"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/renderer/consistency"
|
||||
React-renderercss:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/renderer/css"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/renderer/css"
|
||||
React-rendererdebug:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/renderer/debug"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/renderer/debug"
|
||||
React-rncore:
|
||||
:path: "../node_modules/react-native/ReactCommon"
|
||||
:path: "../../../node_modules/react-native/ReactCommon"
|
||||
React-RuntimeApple:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/runtime/platform/ios"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/runtime/platform/ios"
|
||||
React-RuntimeCore:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/runtime"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/runtime"
|
||||
React-runtimeexecutor:
|
||||
:path: "../node_modules/react-native/ReactCommon/runtimeexecutor"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/runtimeexecutor"
|
||||
React-RuntimeHermes:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/runtime"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/runtime"
|
||||
React-runtimescheduler:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/renderer/runtimescheduler"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/renderer/runtimescheduler"
|
||||
React-timing:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/timing"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/timing"
|
||||
React-utils:
|
||||
:path: "../node_modules/react-native/ReactCommon/react/utils"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/react/utils"
|
||||
ReactAppDependencyProvider:
|
||||
:path: build/generated/ios
|
||||
ReactCodegen:
|
||||
:path: build/generated/ios
|
||||
ReactCommon:
|
||||
:path: "../node_modules/react-native/ReactCommon"
|
||||
:path: "../../../node_modules/react-native/ReactCommon"
|
||||
RNCClipboard:
|
||||
:path: "../../../node_modules/@react-native-clipboard/clipboard"
|
||||
RNScreens:
|
||||
:path: "../node_modules/react-native-screens"
|
||||
:path: "../../../node_modules/react-native-screens"
|
||||
Yoga:
|
||||
:path: "../node_modules/react-native/ReactCommon/yoga"
|
||||
:path: "../../../node_modules/react-native/ReactCommon/yoga"
|
||||
|
||||
SPEC CHECKSUMS:
|
||||
boost: 7e761d76ca2ce687f7cc98e698152abd03a18f90
|
||||
@@ -2692,7 +2692,7 @@ SPEC CHECKSUMS:
|
||||
React-timing: a275a1c2e6112dba17f8f7dd496d439213bbea0d
|
||||
React-utils: 449a6e1fd53886510e284e80bdbb1b1c6db29452
|
||||
ReactAppDependencyProvider: 3267432b637c9b38e86961b287f784ee1b08dde0
|
||||
ReactCodegen: 5d41e1df061200130dd326e55cdfdf94b0289c6e
|
||||
ReactCodegen: d82f538f70f00484d418803f74b5a0ea09cc8689
|
||||
ReactCommon: b028d09a66e60ebd83ca59d8cc9a1216360db147
|
||||
RNCClipboard: 54ff19965d7c816febbafe5f520c2c3e7b677a49
|
||||
RNScreens: ee2abe7e0c548eed14e92742e81ed991165c56aa
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"@azure/core-asynciterator-polyfill": "^1.0.2",
|
||||
"@bacons/text-decoder": "0.0.0",
|
||||
"@op-engineering/op-sqlite": "14.1.0",
|
||||
"@react-native-clipboard/clipboard": "1.16.2",
|
||||
"@react-native-clipboard/clipboard": "1.16.3",
|
||||
"@react-native-community/netinfo": "11.4.1",
|
||||
"@react-navigation/native": "7.1.14",
|
||||
"@react-navigation/native-stack": "7.3.19",
|
||||
@@ -40,7 +40,7 @@
|
||||
"@react-native/typescript-config": "0.80.0",
|
||||
"@rnx-kit/metro-config": "^2.0.1",
|
||||
"@rnx-kit/metro-resolver-symlinks": "^0.2.5",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react": "^19.1.0",
|
||||
"eslint": "^8.19.0",
|
||||
"pod-install": "^0.3.5",
|
||||
"prettier": "2.8.8",
|
||||
|
||||
@@ -1,5 +1,34 @@
|
||||
# passkey-svelte
|
||||
|
||||
## 0.0.99
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [9815ec6]
|
||||
- Updated dependencies [b4fdab4]
|
||||
- jazz-tools@0.15.10
|
||||
|
||||
## 0.0.98
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [27b4837]
|
||||
- jazz-tools@0.15.9
|
||||
|
||||
## 0.0.97
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [3844666]
|
||||
- jazz-tools@0.15.8
|
||||
|
||||
## 0.0.96
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [c09b636]
|
||||
- jazz-tools@0.15.7
|
||||
|
||||
## 0.0.95
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "chat-svelte",
|
||||
"version": "0.0.95",
|
||||
"version": "0.0.99",
|
||||
"type": "module",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
|
||||
@@ -16,15 +16,15 @@
|
||||
"hash-slash": "workspace:*",
|
||||
"jazz-tools": "workspace:*",
|
||||
"lucide-react": "^0.274.0",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
"zod": "3.25.28"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.50.1",
|
||||
"@tailwindcss/postcss": "^4.1.10",
|
||||
"@types/react": "19.0.0",
|
||||
"@types/react-dom": "19.0.0",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.0",
|
||||
"@vitejs/plugin-react-swc": "^3.10.1",
|
||||
"is-ci": "^3.0.1",
|
||||
"postcss": "^8.4.40",
|
||||
@@ -32,4 +32,4 @@
|
||||
"typescript": "5.6.2",
|
||||
"vite": "^6.3.5"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -14,15 +14,15 @@
|
||||
"@bam.tech/react-native-image-resizer": "^3.0.11",
|
||||
"@clerk/clerk-expo": "^2.13.1",
|
||||
"@react-native-community/netinfo": "11.4.1",
|
||||
"expo": "~53.0.9",
|
||||
"expo": "54.0.0-canary-20250701-6a945c5",
|
||||
"expo-crypto": "~14.1.5",
|
||||
"expo-linking": "~7.1.5",
|
||||
"expo-secure-store": "~14.2.3",
|
||||
"expo-sqlite": "~15.2.10",
|
||||
"expo-web-browser": "~14.2.0",
|
||||
"jazz-tools": "workspace:*",
|
||||
"react": "19.0.0",
|
||||
"react-native": "0.79.2",
|
||||
"react": "19.1.0",
|
||||
"react-native": "0.80.0",
|
||||
"react-native-get-random-values": "^1.11.0",
|
||||
"readable-stream": "^4.7.0"
|
||||
},
|
||||
@@ -32,4 +32,4 @@
|
||||
"typescript": "~5.8.3"
|
||||
},
|
||||
"private": true
|
||||
}
|
||||
}
|
||||
@@ -14,17 +14,17 @@
|
||||
"dependencies": {
|
||||
"@clerk/clerk-react": "^5.4.1",
|
||||
"jazz-tools": "workspace:*",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0"
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.50.1",
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@types/react": "19.0.0",
|
||||
"@types/react-dom": "19.0.0",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.0",
|
||||
"@vitejs/plugin-react": "^4.5.1",
|
||||
"globals": "^15.11.0",
|
||||
"typescript": "5.6.2",
|
||||
"vite": "^6.3.5"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -11,14 +11,14 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"jazz-tools": "workspace:*",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0"
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@tailwindcss/postcss": "^4.1.10",
|
||||
"@types/react": "19.0.0",
|
||||
"@types/react-dom": "19.0.0",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.0",
|
||||
"@vitejs/plugin-react": "^4.5.1",
|
||||
"globals": "^15.11.0",
|
||||
"is-ci": "^3.0.1",
|
||||
|
||||
@@ -12,16 +12,16 @@
|
||||
"dependencies": {
|
||||
"hash-slash": "workspace:*",
|
||||
"jazz-tools": "workspace:*",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0"
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@playwright/test": "^1.50.1",
|
||||
"@tailwindcss/forms": "^0.5.10",
|
||||
"@tailwindcss/postcss": "^4.1.10",
|
||||
"@types/react": "19.0.0",
|
||||
"@types/react-dom": "19.0.0",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.0",
|
||||
"@vitejs/plugin-react": "^4.5.1",
|
||||
"globals": "^15.11.0",
|
||||
"is-ci": "^3.0.1",
|
||||
|
||||
@@ -11,14 +11,14 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"jazz-tools": "workspace:*",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0"
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@tailwindcss/postcss": "^4.1.10",
|
||||
"@types/react": "19.0.0",
|
||||
"@types/react-dom": "19.0.0",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.0",
|
||||
"@vitejs/plugin-react": "^4.5.1",
|
||||
"globals": "^15.11.0",
|
||||
"typescript": "5.6.2",
|
||||
|
||||
@@ -17,15 +17,15 @@
|
||||
"cojson-transport-ws": "workspace:*",
|
||||
"hash-slash": "workspace:*",
|
||||
"jazz-tools": "workspace:*",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
"react-use": "^17.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.50.1",
|
||||
"@tailwindcss/postcss": "^4.1.10",
|
||||
"@types/react": "19.0.0",
|
||||
"@types/react-dom": "19.0.0",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.0",
|
||||
"@vitejs/plugin-react-swc": "^3.10.1",
|
||||
"postcss": "^8.4.40",
|
||||
"tailwindcss": "^4.1.10",
|
||||
|
||||
@@ -10,8 +10,8 @@
|
||||
"dependencies": {
|
||||
"jazz-tools": "workspace:*",
|
||||
"next": "15.3.2",
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0"
|
||||
"react": "^19.1.0",
|
||||
"react-dom": "^19.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tailwindcss/postcss": "^4.1.10",
|
||||
@@ -21,4 +21,4 @@
|
||||
"tailwindcss": "^4.1.10",
|
||||
"typescript": "^5"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -24,15 +24,15 @@
|
||||
"clsx": "^2.1.1",
|
||||
"jazz-tools": "workspace:*",
|
||||
"lucide-react": "^0.485.0",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
"tailwind-merge": "^3.0.2",
|
||||
"tailwindcss": "^4.0.17",
|
||||
"tw-animate-css": "^1.2.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "19.0.0",
|
||||
"@types/react-dom": "19.0.0",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.0",
|
||||
"@vitejs/plugin-react": "^4.3.4",
|
||||
"jazz-run": "workspace:*",
|
||||
"npm-run-all": "^4.1.5",
|
||||
|
||||
@@ -13,14 +13,14 @@
|
||||
"dependencies": {
|
||||
"@react-spring/web": "^9.7.5",
|
||||
"jazz-tools": "workspace:*",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
"zod": "3.25.28"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@types/react": "19.0.0",
|
||||
"@types/react-dom": "19.0.0",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.0",
|
||||
"@vitejs/plugin-react": "^4.5.1",
|
||||
"globals": "^15.11.0",
|
||||
"is-ci": "^3.0.1",
|
||||
|
||||
@@ -12,14 +12,14 @@
|
||||
"dependencies": {
|
||||
"@clerk/clerk-react": "^5.4.1",
|
||||
"jazz-tools": "workspace:*",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
"tailwindcss": "^4.1.10"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@types/react": "19.0.0",
|
||||
"@types/react-dom": "19.0.0",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.0",
|
||||
"@vitejs/plugin-react": "^4.5.1",
|
||||
"globals": "^15.11.0",
|
||||
"typescript": "5.6.2",
|
||||
|
||||
@@ -23,8 +23,8 @@
|
||||
"clsx": "^2.1.1",
|
||||
"jazz-tools": "workspace:*",
|
||||
"lucide-react": "^0.274.0",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
"react-router": "^6.16.0",
|
||||
"react-router-dom": "^6.16.0",
|
||||
"tailwind-merge": "^1.14.0"
|
||||
@@ -32,8 +32,8 @@
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.50.1",
|
||||
"@tailwindcss/postcss": "^4.1.10",
|
||||
"@types/react": "19.0.0",
|
||||
"@types/react-dom": "19.0.0",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.0",
|
||||
"@vitejs/plugin-react-swc": "^3.10.1",
|
||||
"postcss": "^8.4.27",
|
||||
"tailwindcss": "^4.1.10",
|
||||
|
||||
@@ -14,8 +14,8 @@
|
||||
"dependencies": {
|
||||
"jazz-tools": "workspace:*",
|
||||
"lucide-react": "^0.274.0",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
"react-router": "^6.16.0",
|
||||
"react-router-dom": "^6.16.0"
|
||||
},
|
||||
@@ -24,8 +24,8 @@
|
||||
"@playwright/test": "^1.50.1",
|
||||
"@tailwindcss/forms": "^0.5.10",
|
||||
"@tailwindcss/postcss": "^4.1.10",
|
||||
"@types/react": "19.0.0",
|
||||
"@types/react-dom": "19.0.0",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.0",
|
||||
"@vitejs/plugin-react": "^4.5.1",
|
||||
"globals": "^15.11.0",
|
||||
"postcss": "^8.4.40",
|
||||
|
||||
@@ -11,14 +11,14 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"jazz-tools": "workspace:*",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
"tailwindcss": "^4.1.10"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@types/react": "19.0.0",
|
||||
"@types/react-dom": "19.0.0",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.0",
|
||||
"@vitejs/plugin-react": "^4.5.1",
|
||||
"globals": "^15.11.0",
|
||||
"typescript": "5.6.2",
|
||||
|
||||
@@ -12,14 +12,14 @@
|
||||
"dependencies": {
|
||||
"hash-slash": "workspace:*",
|
||||
"jazz-tools": "workspace:*",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
"tailwindcss": "^4.1.10"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@types/react": "19.0.0",
|
||||
"@types/react-dom": "19.0.0",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.0",
|
||||
"@vitejs/plugin-react": "^4.5.1",
|
||||
"globals": "^15.11.0",
|
||||
"typescript": "5.6.2",
|
||||
|
||||
@@ -19,15 +19,15 @@
|
||||
"prosemirror-schema-list": "^1.5.1",
|
||||
"prosemirror-state": "^1.4.3",
|
||||
"prosemirror-view": "^1.39.1",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0"
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@playwright/test": "^1.50.1",
|
||||
"@tailwindcss/postcss": "^4.1.10",
|
||||
"@types/react": "19.0.0",
|
||||
"@types/react-dom": "19.0.0",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.0",
|
||||
"@vitejs/plugin-react": "^4.5.1",
|
||||
"globals": "^15.11.0",
|
||||
"is-ci": "^3.0.1",
|
||||
|
||||
@@ -22,15 +22,15 @@
|
||||
"clsx": "^2.1.1",
|
||||
"jazz-tools": "workspace:*",
|
||||
"lucide-react": "^0.509.0",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0"
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@playwright/test": "^1.50.1",
|
||||
"@tailwindcss/postcss": "^4.1.10",
|
||||
"@types/react": "19.0.0",
|
||||
"@types/react-dom": "19.0.0",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.0",
|
||||
"@vitejs/plugin-react": "^4.5.1",
|
||||
"globals": "^15.11.0",
|
||||
"is-ci": "^3.0.1",
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"@faker-js/faker": "^9.7.0",
|
||||
"@radix-ui/react-checkbox": "^1.3.2",
|
||||
"@radix-ui/react-slot": "^1.2.3",
|
||||
"@radix-ui/react-toast": "^1.2.14",
|
||||
@@ -19,8 +18,8 @@
|
||||
"jazz-tools": "workspace:*",
|
||||
"lucide-react": "^0.274.0",
|
||||
"qrcode": "^1.5.3",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
"react-router": "^6.16.0",
|
||||
"react-router-dom": "^6.16.0",
|
||||
"tailwind-merge": "^1.14.0",
|
||||
@@ -30,8 +29,8 @@
|
||||
"devDependencies": {
|
||||
"@tailwindcss/postcss": "^4.1.10",
|
||||
"@types/qrcode": "^1.5.1",
|
||||
"@types/react": "19.0.0",
|
||||
"@types/react-dom": "19.0.0",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.0",
|
||||
"@vitejs/plugin-react-swc": "^3.10.1",
|
||||
"postcss": "^8.4.27",
|
||||
"tailwindcss": "^4.1.10",
|
||||
|
||||
@@ -17,12 +17,12 @@ import React from "react";
|
||||
import { TodoAccount, TodoProject } from "./1_schema.ts";
|
||||
import { NewProjectForm } from "./3_NewProjectForm.tsx";
|
||||
import { ProjectTodoTable } from "./4_ProjectTodoTable.tsx";
|
||||
import { apiKey } from "./apiKey.ts";
|
||||
import {
|
||||
Button,
|
||||
ThemeProvider,
|
||||
TitleAndLogo,
|
||||
} from "./basicComponents/index.ts";
|
||||
import { TaskGenerator } from "./components/TaskGenerator.tsx";
|
||||
import { wordlist } from "./wordlist.ts";
|
||||
|
||||
/**
|
||||
@@ -41,7 +41,7 @@ function JazzAndAuth({ children }: { children: React.ReactNode }) {
|
||||
return (
|
||||
<JazzReactProvider
|
||||
sync={{
|
||||
peer: `ws://localhost:4200`,
|
||||
peer: `wss://cloud.jazz.tools/?key=${apiKey}`,
|
||||
}}
|
||||
AccountSchema={TodoAccount}
|
||||
>
|
||||
@@ -92,10 +92,6 @@ export default function App() {
|
||||
path: "/invite/*",
|
||||
element: <p>Accepting invite...</p>,
|
||||
},
|
||||
{
|
||||
path: "/generate",
|
||||
element: <TaskGenerator />,
|
||||
},
|
||||
]);
|
||||
|
||||
// `useAcceptInvite()` is a hook that accepts an invite link from the URL hash,
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
import { TodoAccount } from "@/1_schema";
|
||||
import { FormEvent, useState } from "react";
|
||||
import { useNavigate } from "react-router-dom";
|
||||
import { generateRandomProject } from "../generate";
|
||||
|
||||
export function TaskGenerator() {
|
||||
const [isGenerating, setIsGenerating] = useState(false);
|
||||
const navigate = useNavigate();
|
||||
|
||||
const handleSubmit = async (e: FormEvent<HTMLFormElement>) => {
|
||||
e.preventDefault();
|
||||
const formData = new FormData(e.currentTarget);
|
||||
const numTasks = Math.max(
|
||||
1,
|
||||
parseInt(formData.get("numTasks") as string) || 1,
|
||||
);
|
||||
|
||||
setIsGenerating(true);
|
||||
const project = generateRandomProject(numTasks);
|
||||
|
||||
const { root } = await TodoAccount.getMe().ensureLoaded({
|
||||
resolve: {
|
||||
root: {
|
||||
projects: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
root.projects.push(project.value);
|
||||
|
||||
await project.done;
|
||||
|
||||
navigate(`/project/${project.value.id}`);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="p-4 border rounded-lg shadow-xs bg-white">
|
||||
<h2 className="text-lg font-semibold mb-4">Generate Random Tasks</h2>
|
||||
<form onSubmit={handleSubmit} className="flex flex-col gap-4">
|
||||
<div className="flex items-center gap-2">
|
||||
<label htmlFor="numTasks" className="text-sm font-medium">
|
||||
Number of tasks:
|
||||
</label>
|
||||
<input
|
||||
id="numTasks"
|
||||
name="numTasks"
|
||||
type="number"
|
||||
min="1"
|
||||
defaultValue={5}
|
||||
className="w-20 px-2 py-1 border rounded"
|
||||
/>
|
||||
</div>
|
||||
<button
|
||||
type="submit"
|
||||
disabled={isGenerating}
|
||||
className="px-4 py-2 bg-blue-500 text-white rounded hover:bg-blue-600 disabled:bg-blue-300"
|
||||
>
|
||||
{isGenerating ? "Generating..." : "Generate Tasks"}
|
||||
</button>
|
||||
</form>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -12,14 +12,14 @@
|
||||
"dependencies": {
|
||||
"@tailwindcss/forms": "^0.5.9",
|
||||
"jazz-tools": "workspace:*",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0"
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@tailwindcss/postcss": "^4.1.10",
|
||||
"@types/react": "19.0.0",
|
||||
"@types/react-dom": "19.0.0",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.0",
|
||||
"@vitejs/plugin-react": "^4.5.1",
|
||||
"globals": "^15.11.0",
|
||||
"tailwindcss": "^4.1.10",
|
||||
|
||||
@@ -27,28 +27,26 @@ export default function ButtonsPage() {
|
||||
return (
|
||||
<>
|
||||
<h3 className="text-lg mt-5 mb-2 font-bold">Variants</h3>
|
||||
<p className="mb-3">
|
||||
For compatibility the shadcn/ui variants are mapped to the design
|
||||
system.
|
||||
</p>
|
||||
|
||||
<p className="my-3">Buttons are styled with the variant prop.</p>
|
||||
|
||||
<div className="grid grid-cols-2 gap-2">
|
||||
<Button variant="default">default</Button>
|
||||
<Button variant="link">link</Button>
|
||||
<Button variant="ghost">ghost</Button>
|
||||
<Button variant="outline">outline</Button>
|
||||
<Button variant="secondary">secondary</Button>
|
||||
<Button variant="destructive">destructive</Button>
|
||||
</div>
|
||||
|
||||
<h3 className="text-lg mt-5 mb-2 font-bold">Intents</h3>
|
||||
<p>
|
||||
We have extended the shadcn/ui variants to include more styles via the
|
||||
intent prop.
|
||||
<h3 className="text-lg mt-5 font-bold">Intents</h3>
|
||||
<p className="my-3">
|
||||
We have extended the variants to include more styles via the intent
|
||||
prop.
|
||||
</p>
|
||||
|
||||
<div className="grid grid-cols-2 gap-2">
|
||||
{/* <Button intent="default">default</Button> */}
|
||||
<Button intent="default">default</Button>
|
||||
<Button intent="muted">muted</Button>
|
||||
<Button intent="strong">strong</Button>
|
||||
<Button intent="primary">primary</Button>
|
||||
<Button intent="tip">tip</Button>
|
||||
<Button intent="info">info</Button>
|
||||
@@ -56,8 +54,6 @@ export default function ButtonsPage() {
|
||||
<Button intent="warning">warning</Button>
|
||||
<Button intent="alert">alert</Button>
|
||||
<Button intent="danger">danger</Button>
|
||||
<Button intent="muted">muted</Button>
|
||||
<Button intent="strong">strong</Button>
|
||||
</div>
|
||||
|
||||
<div className="flex justify-between items-center w-48 mt-10">
|
||||
@@ -89,7 +85,7 @@ export default function ButtonsPage() {
|
||||
|
||||
<p className="text-sm mt-2 mb-5">
|
||||
<strong>NB:</strong> Variants and styles are interchangeable. See the
|
||||
intent on each variant with the dropdown
|
||||
intent on each variant with the dropdown.
|
||||
</p>
|
||||
|
||||
<div className="grid grid-cols-2 gap-2">
|
||||
@@ -107,9 +103,19 @@ export default function ButtonsPage() {
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<p className="my-3">
|
||||
For compatibility the shadcn/ui variants are mapped to the design
|
||||
system.
|
||||
</p>
|
||||
|
||||
<div className="grid grid-cols-2 gap-2">
|
||||
<Button variant="secondary">secondary</Button>
|
||||
<Button variant="destructive">destructive</Button>
|
||||
</div>
|
||||
|
||||
<h3 className="text-lg font-bold mt-5">Icons</h3>
|
||||
|
||||
<p>Buttons can also contain an icon and text.</p>
|
||||
<p className="my-3">Buttons can also contain an icon and text.</p>
|
||||
|
||||
<div className="grid grid-cols-2 gap-2">
|
||||
<Button
|
||||
@@ -130,7 +136,7 @@ export default function ButtonsPage() {
|
||||
>
|
||||
outline info with icon
|
||||
</Button>
|
||||
<p className="col-span-2">
|
||||
<p className="col-span-2 my-2">
|
||||
Or just use the icon prop with any of the button variants, style
|
||||
variants and colors.
|
||||
</p>
|
||||
@@ -151,6 +157,7 @@ const buttonPropsTableData = {
|
||||
{
|
||||
prop: "intent?",
|
||||
types: [
|
||||
"default",
|
||||
"primary",
|
||||
"tip",
|
||||
"info",
|
||||
@@ -174,7 +181,7 @@ const buttonPropsTableData = {
|
||||
"secondary",
|
||||
"destructive",
|
||||
],
|
||||
default: "undefined",
|
||||
default: "default",
|
||||
},
|
||||
{
|
||||
prop: "icon?",
|
||||
|
||||
@@ -159,8 +159,8 @@ const styleClasses = (intent: Style, variant: Variant | undefined) => {
|
||||
inverted: `${styleToTextMap[intent]} ${colorToBgHoverMap30[styleToColorMap[intent] as VariantColor]} ${colorToBgMap[styleToColorMap[intent] as VariantColor]} ${colorToBgActiveMap50[styleToColorMap[intent] as VariantColor]} ${shadowClassesBase}`,
|
||||
ghost: `bg-transparent ${styleToTextMap[intent]} ${colorToBgHoverMap10[styleToColorMap[intent] as VariantColor]} ${colorToBgActiveMap25[styleToColorMap[intent] as VariantColor]}`,
|
||||
link: `bg-transparent ${styleToTextMap[intent]} underline underline-offset-2 p-0 hover:bg-transparent ${styleToTextHoverMap[intent]} ${styleToTextActiveMap[intent]} active:underline-stone-500`,
|
||||
secondary: `bg-stone-300 ${styleToTextMap[intent]} hover:bg-stone-400/80 active:bg-stone-500/80`,
|
||||
destructive: `bg-danger text-white hover:bg-red/80 active:bg-red/70`,
|
||||
secondary: variantClass("muted"),
|
||||
destructive: variantClass("danger"),
|
||||
default: `${styleToBgGradientColorMap["default"]} ${styleToBgGradientHoverMap["default"]} ${textColorVariant("default")} ${styleToButtonStateMap["default"]} ${shadowClassesBase} shadow-stone-400/20`,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
ChevronLeftIcon,
|
||||
ChevronRight,
|
||||
ChevronRightIcon,
|
||||
ClipboardCheckIcon,
|
||||
ClipboardIcon,
|
||||
CodeIcon,
|
||||
Eye,
|
||||
@@ -66,6 +67,7 @@ export const icons = {
|
||||
close: XIcon,
|
||||
code: CodeIcon,
|
||||
copy: ClipboardIcon,
|
||||
copySuccess: ClipboardCheckIcon,
|
||||
cursor: MousePointer2Icon,
|
||||
darkTheme: MoonIcon,
|
||||
delete: TrashIcon,
|
||||
|
||||
@@ -11,7 +11,7 @@ export function CopyButton({
|
||||
onCopy,
|
||||
}: {
|
||||
code: string;
|
||||
size: "md" | "lg";
|
||||
size: "sm" | "md" | "lg";
|
||||
className?: string;
|
||||
onCopy?: () => void;
|
||||
}) {
|
||||
@@ -32,13 +32,13 @@ export function CopyButton({
|
||||
type="button"
|
||||
className={clsx(
|
||||
className,
|
||||
"group/button absolute overflow-hidden rounded text-2xs font-medium md:opacity-0 backdrop-blur transition md:focus:opacity-100 group-hover:opacity-100",
|
||||
"group/button absolute overflow-hidden rounded text-2xs font-medium md:opacity-0 backdrop-blur transition md:focus:opacity-100 group-hover:opacity-100 items-center align-middle p-0",
|
||||
copied
|
||||
? "bg-emerald-400/10 ring-1 ring-inset ring-emerald-400/20"
|
||||
? "bg-blue-400/10 ring-1 ring-inset ring-blue-400/20"
|
||||
: "bg-white/5 hover:bg-white/7.5 dark:bg-white/2.5 dark:hover:bg-white/5",
|
||||
size == "md"
|
||||
size === "md"
|
||||
? "right-[8.5px] top-[8.5px] py-[2px] pl-1 pr-2"
|
||||
: "right-2 top-2 py-1 pl-2 pr-3",
|
||||
: "right-2 top-2 py-1 pl-2 pr-2",
|
||||
)}
|
||||
onClick={() => {
|
||||
window.navigator.clipboard.writeText(code).then(() => {
|
||||
@@ -60,18 +60,22 @@ export function CopyButton({
|
||||
className={clsx(
|
||||
size === "md" ? "size-3" : "size-4",
|
||||
"stroke-stone-500 transition-colors group-hover/button:stroke-stone-600 dark:group-hover/button:stroke-stone-400",
|
||||
copied && "stroke-primary",
|
||||
)}
|
||||
/>
|
||||
Copy
|
||||
{size !== "sm" && "Copy"}
|
||||
</span>
|
||||
<span
|
||||
aria-hidden={!copied}
|
||||
className={clsx(
|
||||
"pointer-events-none absolute inset-0 flex items-center justify-center text-emerald-600 transition duration-300 dark:text-emerald-400",
|
||||
"pointer-events-none absolute inset-0 flex items-center justify-center text-primary transition duration-300",
|
||||
!copied && "translate-y-1.5 opacity-0",
|
||||
)}
|
||||
>
|
||||
Copied!
|
||||
{size === "sm" && (
|
||||
<Icon name="copySuccess" size="xs" className="stroke-primary" />
|
||||
)}
|
||||
{size !== "sm" && "Copied!"}
|
||||
</span>
|
||||
</button>
|
||||
);
|
||||
|
||||
@@ -44,7 +44,7 @@ export const Input = forwardRef<HTMLInputElement, InputProps>(
|
||||
: icon && iconPosition === "right";
|
||||
|
||||
const inputClassName = clsx(
|
||||
"w-full rounded-md border px-3.5 py-2 shadow-sm",
|
||||
"w-full rounded-md border px-2.5 py-1 shadow-sm h-[36px]",
|
||||
"font-medium text-stone-900",
|
||||
"dark:text-white dark:bg-stone-925",
|
||||
);
|
||||
|
||||
@@ -60,7 +60,7 @@ export function DropdownItem({
|
||||
let classes = clsx(
|
||||
className,
|
||||
// Base styles
|
||||
"group rounded-md space-x-2 focus:outline-none px-2.5 py-1.5",
|
||||
"group rounded-md space-x-2 focus:outline-none px-2.5 py-1.5",
|
||||
// Text styles
|
||||
"text-left text-sm/6 dark:text-white forced-colors:text-[CanvasText]",
|
||||
// Focus
|
||||
|
||||
@@ -21,8 +21,8 @@ export type Style =
|
||||
|
||||
export const sizeClasses = {
|
||||
sm: "text-sm py-1 px-2",
|
||||
md: "py-1.5 px-3",
|
||||
lg: "md:text-lg py-2 px-3 md:px-8 md:py-3",
|
||||
md: "py-1.5 px-3 h-[36px]",
|
||||
lg: "py-2 px-5 md:px-6 md:py-2.5",
|
||||
};
|
||||
|
||||
export const styleToBorderMap = {
|
||||
|
||||
@@ -42,15 +42,6 @@ export const team: Array<TeamMember> = [
|
||||
linkedin: "giordanoricci",
|
||||
image: "gio.jpg",
|
||||
},
|
||||
{
|
||||
name: "Trisha Lim",
|
||||
slug: "trisha",
|
||||
titles: ["Frontend Dev", "Marketing"],
|
||||
image: "trisha.png",
|
||||
location: "Lisbon, Portugal ",
|
||||
github: "trishalim",
|
||||
website: "https://trishalim.com",
|
||||
},
|
||||
{
|
||||
name: "Meg Culotta",
|
||||
slug: "meg",
|
||||
@@ -73,7 +64,7 @@ export const team: Array<TeamMember> = [
|
||||
name: "Sammii Kellow",
|
||||
slug: "sammii",
|
||||
location: "London, UK",
|
||||
titles: ["Design Engineer", "Marketing"],
|
||||
titles: ["Frontend & Design Engineer", "Marketing"],
|
||||
x: "SammiiHaylock",
|
||||
github: "sammii-hk",
|
||||
website: "https://sammii.dev",
|
||||
@@ -91,4 +82,25 @@ export const team: Array<TeamMember> = [
|
||||
linkedin: "boorad",
|
||||
image: "brad.png",
|
||||
},
|
||||
{
|
||||
name: "Divya S",
|
||||
slug: "div",
|
||||
location: "New York, US",
|
||||
titles: ["Platform Engineer"],
|
||||
x: "shortdiv",
|
||||
github: "shortdiv",
|
||||
website: "https://shortdiv.com",
|
||||
bluesky: "shortdiv.bsky.social",
|
||||
linkedin: "shortdiv",
|
||||
image: "div.jpg",
|
||||
},
|
||||
{
|
||||
name: "Nico Rainhart",
|
||||
slug: "nico",
|
||||
location: "Buenos Aires, Argentina",
|
||||
titles: ["Full-Stack Dev", "Framework Engineer"],
|
||||
image: "nico.jpeg",
|
||||
github: "nrainhart",
|
||||
linkedin: "nicolás-rainhart",
|
||||
},
|
||||
];
|
||||
|
||||
BIN
homepage/gcmp/public/team/div.jpg
Normal file
BIN
homepage/gcmp/public/team/div.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 9.1 KiB |
BIN
homepage/gcmp/public/team/nico.jpeg
Normal file
BIN
homepage/gcmp/public/team/nico.jpeg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 279 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 186 KiB |
@@ -4,6 +4,7 @@ import { ComingSoonSection } from "@/components/home/ComingSoonSection";
|
||||
import { EarlyAdopterSection } from "@/components/home/EarlyAdopterSection";
|
||||
import { EncryptionSection } from "@/components/home/EncryptionSection";
|
||||
import { FeaturesSection } from "@/components/home/FeaturesSection";
|
||||
import { GetStartedSnippetSelect } from "@/components/home/GetStartedSnippetSelect";
|
||||
import { HeroSection } from "@/components/home/HeroSection";
|
||||
import { HowJazzWorksSection } from "@/components/home/HowJazzWorksSection";
|
||||
import { LocalFirstFeaturesSection } from "@/components/home/LocalFirstFeaturesSection";
|
||||
@@ -16,7 +17,8 @@ export default function Home() {
|
||||
<>
|
||||
<HeroSection />
|
||||
|
||||
<div className="container flex flex-col gap-12 mt-12 lg:gap-20 lg:mt-20">
|
||||
<div className="container flex flex-col gap-12 lg:gap-20">
|
||||
<GetStartedSnippetSelect />
|
||||
<SupportedEnvironmentsSection />
|
||||
<HowJazzWorksSection />
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ export function SideNavItem({
|
||||
}) {
|
||||
const classes = clsx(
|
||||
className,
|
||||
"py-1 px-2 -mx-2 group rounded-md flex items-center transition-colors",
|
||||
"py-1 px-2 group rounded-md flex items-center transition-colors",
|
||||
);
|
||||
const path = usePathname();
|
||||
|
||||
@@ -28,7 +28,7 @@ export function SideNavItem({
|
||||
className={clsx(
|
||||
classes,
|
||||
path === href
|
||||
? "text-stone-900 font-medium bg-stone-100 dark:text-white dark:bg-stone-900"
|
||||
? "text-stone-900 font-medium bg-stone-200/50 dark:text-white dark:bg-stone-800/50"
|
||||
: "hover:text-stone-900 dark:hover:text-stone-200",
|
||||
)}
|
||||
>
|
||||
|
||||
@@ -10,10 +10,21 @@ import {
|
||||
DropdownItem,
|
||||
DropdownMenu,
|
||||
} from "@garden-co/design-system/src/components/organisms/Dropdown";
|
||||
import clsx from "clsx";
|
||||
import { usePathname, useRouter } from "next/navigation";
|
||||
import { useState } from "react";
|
||||
|
||||
export function FrameworkSelect() {
|
||||
export function FrameworkSelect({
|
||||
onSelect,
|
||||
size = "md",
|
||||
routerPush = true,
|
||||
className,
|
||||
}: {
|
||||
onSelect?: (framework: Framework) => void;
|
||||
size?: "sm" | "md";
|
||||
routerPush?: boolean;
|
||||
className?: string;
|
||||
}) {
|
||||
const router = useRouter();
|
||||
const defaultFramework = useFramework();
|
||||
const [selectedFramework, setSelectedFramework] =
|
||||
@@ -23,26 +34,26 @@ export function FrameworkSelect() {
|
||||
|
||||
const selectFramework = (newFramework: Framework) => {
|
||||
setSelectedFramework(newFramework);
|
||||
router.push(path.replace(defaultFramework, newFramework));
|
||||
onSelect && onSelect(newFramework);
|
||||
routerPush && router.push(path.replace(defaultFramework, newFramework));
|
||||
};
|
||||
|
||||
return (
|
||||
<Dropdown>
|
||||
<DropdownButton
|
||||
className="w-full justify-between"
|
||||
className={clsx("w-full justify-between overflow-hidden text-nowrap", size === "sm" && "text-sm", className)}
|
||||
as={Button}
|
||||
variant="outline"
|
||||
intent="default"
|
||||
>
|
||||
{frameworkNames[selectedFramework].label}
|
||||
<span className="text-nowrap max-w-full overflow-hidden text-ellipsis">{frameworkNames[selectedFramework].label}</span>
|
||||
<Icon name="chevronDown" size="sm" />
|
||||
</DropdownButton>
|
||||
<DropdownMenu className="w-[--button-width] z-50" anchor="bottom start">
|
||||
{Object.entries(frameworkNames)
|
||||
.filter(([_, framework]) => !framework.hidden)
|
||||
.map(([key, framework]) => (
|
||||
<DropdownItem
|
||||
className="items-baseline"
|
||||
className={clsx("items-baseline", size === "sm" && "text-xs text-nowrap", selectedFramework === key && "text-primary dark:text-primary")}
|
||||
key={key}
|
||||
onClick={() => selectFramework(key as Framework)}
|
||||
>
|
||||
|
||||
@@ -0,0 +1,43 @@
|
||||
'use client'
|
||||
|
||||
import { Framework } from "@/content/framework";
|
||||
import { useFramework } from "@/lib/use-framework";
|
||||
import NpxCreateJazzApp from "@/components/home/NpxCreateJazzApp.mdx";
|
||||
import { CopyButton } from "@garden-co/design-system/src/components/molecules/CodeGroup";
|
||||
import { useState } from "react";
|
||||
import { Button } from "@garden-co/design-system/src/components/atoms/Button";
|
||||
import Link from "next/link";
|
||||
import { FrameworkSelect } from "../docs/FrameworkSelect";
|
||||
import clsx from "clsx";
|
||||
import { track } from "@vercel/analytics";
|
||||
import { GappedGrid } from "@garden-co/design-system/src/components/molecules/GappedGrid";
|
||||
|
||||
export function GetStartedSnippetSelect() {
|
||||
const defaultFramework = useFramework();
|
||||
const [selectedFramework, setSelectedFramework] =
|
||||
useState<Framework>(defaultFramework);
|
||||
|
||||
return (
|
||||
<GappedGrid>
|
||||
<div className="relative w-full col-span-2 lg:col-span-3 border-2 border-primary rounded-lg overflow-hidden">
|
||||
<CopyButton
|
||||
code="npx create-jazz-app@latest"
|
||||
size="sm"
|
||||
className={clsx("mt-0.5 mr-0.5 z-100 md:opacity-100 hidden md:block")}
|
||||
onCopy={() => track("create-jazz-app command copied from hero")}
|
||||
/>
|
||||
<NpxCreateJazzApp />
|
||||
</div>
|
||||
<div className="col-span-2 lg:col-span-3 flex flex-row gap-2">
|
||||
<div className="h-full items-center w-[175px]">
|
||||
<FrameworkSelect onSelect={setSelectedFramework} size="md" routerPush={false} className="h-full md:px-4" />
|
||||
</div>
|
||||
<div className="flex h-full items-center">
|
||||
<Button intent="primary" size="lg" className="w-full">
|
||||
<Link className="my-[0.11rem]" href={`/docs/${selectedFramework}`}>Get started</Link>
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</GappedGrid>
|
||||
);
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
"use client";
|
||||
|
||||
import CreateJazzApp from "@/components/home/CreateJazzApp.mdx";
|
||||
import { marketingCopy } from "@/content/marketingCopy";
|
||||
import { H1 } from "@garden-co/design-system/src/components/atoms/Headings";
|
||||
import {
|
||||
@@ -8,11 +7,10 @@ import {
|
||||
type IconName,
|
||||
} from "@garden-co/design-system/src/components/atoms/Icon";
|
||||
import { Kicker } from "@garden-co/design-system/src/components/atoms/Kicker";
|
||||
import { CopyButton } from "@garden-co/design-system/src/components/molecules/CodeGroup";
|
||||
import { Prose } from "@garden-co/design-system/src/components/molecules/Prose";
|
||||
import { SectionHeader } from "@garden-co/design-system/src/components/molecules/SectionHeader";
|
||||
import { track } from "@vercel/analytics";
|
||||
import Link from "next/link";
|
||||
import { GetStartedSnippetSelect } from "./GetStartedSnippetSelect";
|
||||
|
||||
|
||||
const features: Array<{
|
||||
title: string;
|
||||
@@ -54,8 +52,8 @@ const features: Array<{
|
||||
|
||||
export function HeroSection() {
|
||||
return (
|
||||
<div className="container grid items-center gap-x-8 gap-y-12 my-12 md:my-16 lg:my-24 lg:gap-x-10 lg:grid-cols-3">
|
||||
<div className="flex flex-col justify-center gap-5 lg:col-span-2 lg:gap-8">
|
||||
<div className="container grid items-center gap-x-8 gap-y-12 my-12 md:my-16 lg:my-24 lg:gap-x-10 lg:grid-cols-12">
|
||||
<div className="flex flex-col justify-center gap-5 lg:col-span-11 lg:gap-8">
|
||||
<Kicker>Toolkit for backendless apps</Kicker>
|
||||
<H1>
|
||||
<span className="inline-block text-highlight">
|
||||
@@ -94,31 +92,6 @@ export function HeroSection() {
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="h-full group grid md:grid-cols-2 items-center lg:grid-cols-1 lg:pt-36">
|
||||
<SectionHeader
|
||||
className="md:col-span-2 lg:sr-only"
|
||||
title="Get a Jazz app running in minutes."
|
||||
/>
|
||||
<div className="overflow-hidden sm:rounded-xl sm:border h-full sm:px-8 sm:pt-6 bg-stone-50 dark:bg-stone-950">
|
||||
<div className="rounded-lg bg-white dark:bg-stone-925 sm:ring-4 ring-stone-400/20 sm:shadow-xl sm:shadow-blue/20 border relative sm:top-2 h-full w-full">
|
||||
<div className="py-4 flex items-center gap-2.5 px-6 border-b">
|
||||
<span className="rounded-full size-3 bg-stone-200 dark:bg-stone-900" />
|
||||
<span className="rounded-full size-3 bg-stone-200 dark:bg-stone-900" />
|
||||
<span className="rounded-full size-3 bg-stone-200 dark:bg-stone-900" />
|
||||
<CopyButton
|
||||
code="npx create-jazz-app@latest"
|
||||
size="md"
|
||||
className="mt-0.5 mr-0.5"
|
||||
onCopy={() => track("create-jazz-app command copied from hero")}
|
||||
/>
|
||||
</div>
|
||||
<div className="p-3">
|
||||
<CreateJazzApp />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
3
homepage/homepage/components/home/NpxCreateJazzApp.mdx
Normal file
3
homepage/homepage/components/home/NpxCreateJazzApp.mdx
Normal file
@@ -0,0 +1,3 @@
|
||||
```sh
|
||||
npx create-jazz-app@latest
|
||||
```
|
||||
@@ -6,7 +6,7 @@ export const metadata = {
|
||||
|
||||
# Connecting CoValues with direct linking
|
||||
CoValues can form relationships with each other by **linking directly to other CoValues**. This creates a powerful connection where one CoValue can point to the unique identity of another.
|
||||
Instead of embedding all of the details of one coValue directly within another, you use its Jazz-Tools schema as the field type. This allows multiple CoValues to point to the same piece of data effortlessly.
|
||||
Instead of embedding all the details of one CoValue directly within another, you use its Jazz-Tools schema as the field type. This allows multiple CoValues to point to the same piece of data effortlessly.
|
||||
|
||||
<CodeGroup>
|
||||
```ts twoslash
|
||||
@@ -50,3 +50,51 @@ export type User = co.loaded<typeof User>;
|
||||
This direct linking approach offers a single source of truth. When you update a referenced CoValue, all other CoValues that point to it are automatically updated, ensuring data consistency across your application.
|
||||
|
||||
By connecting CoValues through these direct references, you can build robust and collaborative applications where data is consistent, efficient to manage, and relationships are clearly defined. The ability to link different CoValue types to the same underlying data is fundamental to building complex applications with Jazz.
|
||||
|
||||
|
||||
## Recursive references with DiscriminatedUnion
|
||||
In advanced schemas, you may want a CoValue that recursively references itself. For example, a `ReferenceItem` that contains a list of other items like `NoteItem` or `AttachmentItem`. This is common in tree-like structures such as threaded comments or nested project outlines.
|
||||
|
||||
You can model this with a Zod `z.discriminatedUnion`, but TypeScript’s type inference doesn't handle recursive unions well without a workaround.
|
||||
|
||||
Here’s how to structure your schema to avoid circular reference errors.
|
||||
|
||||
### Use this pattern for recursive discriminated unions
|
||||
<CodeGroup>
|
||||
```ts twoslash
|
||||
import { CoListSchema, co, z } from "jazz-tools";
|
||||
|
||||
// Recursive item modeling pattern using discriminated unions
|
||||
// First, define the non-recursive types
|
||||
export const NoteItem = co.map({
|
||||
type: z.literal("note"),
|
||||
internal: z.boolean(),
|
||||
content: co.plainText(),
|
||||
});
|
||||
|
||||
export const AttachmentItem = co.map({
|
||||
type: z.literal("attachment"),
|
||||
internal: z.boolean(),
|
||||
content: co.fileStream(),
|
||||
});
|
||||
|
||||
export const ReferenceItem = co.map({
|
||||
type: z.literal("reference"),
|
||||
internal: z.boolean(),
|
||||
content: z.string(),
|
||||
|
||||
// Workaround: declare the field type using CoListSchema and ZodDiscriminatedUnion so TS can safely recurse
|
||||
get children(): CoListSchema<z.ZodDiscriminatedUnion<[typeof NoteItem, typeof AttachmentItem, typeof ReferenceItem]>> {
|
||||
return ProjectContextItemList;
|
||||
},
|
||||
});
|
||||
|
||||
// Create the recursive union
|
||||
export const ProjectContextItem = z.discriminatedUnion("type", [NoteItem, AttachmentItem, ReferenceItem]);
|
||||
|
||||
// Final list of recursive types
|
||||
export const ProjectContextItemList = co.list(ProjectContextItem);
|
||||
```
|
||||
</CodeGroup>
|
||||
|
||||
Even though this seems like a shortcut, TypeScript and Zod can't resolve the circular reference this way. Always define the discriminated union before introducing recursive links.
|
||||
|
||||
1543
homepage/pnpm-lock.yaml
generated
1543
homepage/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,7 @@ packages:
|
||||
- "gcmp"
|
||||
|
||||
catalog:
|
||||
"react": "19.0.0"
|
||||
"react-dom": "19.0.0"
|
||||
"@types/react": "19.0.0"
|
||||
"@types/react-dom": "19.0.0"
|
||||
"react": "19.1.0"
|
||||
"react-dom": "19.1.0"
|
||||
"@types/react": "19.1.0"
|
||||
"@types/react-dom": "19.1.0"
|
||||
|
||||
@@ -39,6 +39,7 @@
|
||||
"changeset-version": "changeset version && pnpm i --no-frozen-lockfile",
|
||||
"release": "turbo run build --filter='./packages/*' && pnpm changeset publish && git push --follow-tags",
|
||||
"clean": "rm -rf ./packages/*/dist && rm -rf ./packages/*/node_modules && rm -rf ./examples/*/node_modules && rm -rf ./examples/*/dist",
|
||||
"postinstall": "lefthook install",
|
||||
"check-catalog-deps": "node scripts/check-catalog-deps.js"
|
||||
},
|
||||
"version": "0.0.0",
|
||||
@@ -50,10 +51,10 @@
|
||||
"ignoreMissing": ["@babel/*", "expo-modules-*", "typescript"]
|
||||
},
|
||||
"overrides": {
|
||||
"@types/react": "19.0.0",
|
||||
"@types/react-dom": "19.0.0",
|
||||
"react": "19.0.0",
|
||||
"react-dom": "19.0.0",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.0",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
"vite": "6.3.5",
|
||||
"esbuild": "0.24.0"
|
||||
}
|
||||
|
||||
@@ -1,5 +1,33 @@
|
||||
# cojson-storage-indexeddb
|
||||
|
||||
## 0.15.10
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cojson@0.15.10
|
||||
|
||||
## 0.15.9
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [27b4837]
|
||||
- Updated dependencies [2776263]
|
||||
- cojson@0.15.9
|
||||
|
||||
## 0.15.8
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cojson@0.15.8
|
||||
- cojson-storage@0.15.8
|
||||
|
||||
## 0.15.7
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cojson@0.15.7
|
||||
- cojson-storage@0.15.7
|
||||
|
||||
## 0.15.6
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
{
|
||||
"name": "cojson-storage-indexeddb",
|
||||
"version": "0.15.6",
|
||||
"version": "0.15.10",
|
||||
"main": "dist/index.js",
|
||||
"type": "module",
|
||||
"types": "dist/index.d.ts",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"cojson": "workspace:*",
|
||||
"cojson-storage": "workspace:*"
|
||||
"cojson": "workspace:*"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "catalog:",
|
||||
|
||||
@@ -7,7 +7,7 @@ import type {
|
||||
StoredCoValueRow,
|
||||
StoredSessionRow,
|
||||
TransactionRow,
|
||||
} from "cojson-storage";
|
||||
} from "cojson";
|
||||
import { CoJsonIDBTransaction } from "./CoJsonIDBTransaction.js";
|
||||
|
||||
export class IDBClient implements DBClientInterfaceAsync {
|
||||
|
||||
@@ -1,10 +1,4 @@
|
||||
import {
|
||||
type IncomingSyncStream,
|
||||
type OutgoingSyncQueue,
|
||||
type Peer,
|
||||
cojsonInternals,
|
||||
} from "cojson";
|
||||
import { StorageManagerAsync } from "cojson-storage";
|
||||
import { StorageApiAsync } from "cojson";
|
||||
import { IDBClient } from "./idbClient.js";
|
||||
|
||||
let DATABASE_NAME = "jazz-storage";
|
||||
@@ -13,132 +7,50 @@ export function internal_setDatabaseName(name: string) {
|
||||
DATABASE_NAME = name;
|
||||
}
|
||||
|
||||
function createParallelOpsRunner() {
|
||||
const ops = new Set<Promise<unknown>>();
|
||||
export async function getIndexedDBStorage(name = DATABASE_NAME) {
|
||||
const dbPromise = new Promise<IDBDatabase>((resolve, reject) => {
|
||||
const request = indexedDB.open(name, 4);
|
||||
request.onerror = () => {
|
||||
reject(request.error);
|
||||
};
|
||||
request.onsuccess = () => {
|
||||
resolve(request.result);
|
||||
};
|
||||
request.onupgradeneeded = async (ev) => {
|
||||
const db = request.result;
|
||||
if (ev.oldVersion === 0) {
|
||||
const coValues = db.createObjectStore("coValues", {
|
||||
autoIncrement: true,
|
||||
keyPath: "rowID",
|
||||
});
|
||||
|
||||
return {
|
||||
add: (op: Promise<unknown>) => {
|
||||
ops.add(op);
|
||||
op.finally(() => {
|
||||
ops.delete(op);
|
||||
});
|
||||
},
|
||||
wait() {
|
||||
return Promise.race(ops);
|
||||
},
|
||||
get size() {
|
||||
return ops.size;
|
||||
},
|
||||
};
|
||||
}
|
||||
coValues.createIndex("coValuesById", "id", {
|
||||
unique: true,
|
||||
});
|
||||
|
||||
export class IDBNode {
|
||||
private readonly dbClient: IDBClient;
|
||||
private readonly syncManager: StorageManagerAsync;
|
||||
const sessions = db.createObjectStore("sessions", {
|
||||
autoIncrement: true,
|
||||
keyPath: "rowID",
|
||||
});
|
||||
|
||||
constructor(
|
||||
db: IDBDatabase,
|
||||
fromLocalNode: IncomingSyncStream,
|
||||
toLocalNode: OutgoingSyncQueue,
|
||||
) {
|
||||
this.dbClient = new IDBClient(db);
|
||||
this.syncManager = new StorageManagerAsync(this.dbClient, toLocalNode);
|
||||
sessions.createIndex("sessionsByCoValue", "coValue");
|
||||
sessions.createIndex("uniqueSessions", ["coValue", "sessionID"], {
|
||||
unique: true,
|
||||
});
|
||||
|
||||
const processMessages = async () => {
|
||||
const batch = createParallelOpsRunner();
|
||||
|
||||
for await (const msg of fromLocalNode) {
|
||||
try {
|
||||
if (msg === "Disconnected" || msg === "PingTimeout") {
|
||||
throw new Error("Unexpected Disconnected message");
|
||||
}
|
||||
|
||||
if (msg.action === "content") {
|
||||
await this.syncManager.handleSyncMessage(msg);
|
||||
} else {
|
||||
batch.add(this.syncManager.handleSyncMessage(msg));
|
||||
}
|
||||
|
||||
if (batch.size > 10) {
|
||||
await batch.wait();
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
db.createObjectStore("transactions", {
|
||||
keyPath: ["ses", "idx"],
|
||||
});
|
||||
}
|
||||
if (ev.oldVersion <= 1) {
|
||||
db.createObjectStore("signatureAfter", {
|
||||
keyPath: ["ses", "idx"],
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
processMessages().catch((e) =>
|
||||
console.error("Error in processMessages in IndexedDB", e),
|
||||
);
|
||||
}
|
||||
const db = await dbPromise;
|
||||
|
||||
static async asPeer(
|
||||
{ localNodeName = "local" }: { localNodeName?: string } | undefined = {
|
||||
localNodeName: "local",
|
||||
},
|
||||
): Promise<Peer> {
|
||||
const [localNodeAsPeer, storageAsPeer] = cojsonInternals.connectedPeers(
|
||||
localNodeName,
|
||||
"indexedDB",
|
||||
{
|
||||
peer1role: "client",
|
||||
peer2role: "storage",
|
||||
crashOnClose: true,
|
||||
},
|
||||
);
|
||||
|
||||
await IDBNode.open(localNodeAsPeer.incoming, localNodeAsPeer.outgoing);
|
||||
|
||||
return { ...storageAsPeer, priority: 100 };
|
||||
}
|
||||
|
||||
static async open(
|
||||
fromLocalNode: IncomingSyncStream,
|
||||
toLocalNode: OutgoingSyncQueue,
|
||||
) {
|
||||
const dbPromise = new Promise<IDBDatabase>((resolve, reject) => {
|
||||
const request = indexedDB.open(DATABASE_NAME, 4);
|
||||
request.onerror = () => {
|
||||
reject(request.error);
|
||||
};
|
||||
request.onsuccess = () => {
|
||||
resolve(request.result);
|
||||
};
|
||||
request.onupgradeneeded = async (ev) => {
|
||||
const db = request.result;
|
||||
if (ev.oldVersion === 0) {
|
||||
const coValues = db.createObjectStore("coValues", {
|
||||
autoIncrement: true,
|
||||
keyPath: "rowID",
|
||||
});
|
||||
|
||||
coValues.createIndex("coValuesById", "id", {
|
||||
unique: true,
|
||||
});
|
||||
|
||||
const sessions = db.createObjectStore("sessions", {
|
||||
autoIncrement: true,
|
||||
keyPath: "rowID",
|
||||
});
|
||||
|
||||
sessions.createIndex("sessionsByCoValue", "coValue");
|
||||
sessions.createIndex("uniqueSessions", ["coValue", "sessionID"], {
|
||||
unique: true,
|
||||
});
|
||||
|
||||
db.createObjectStore("transactions", {
|
||||
keyPath: ["ses", "idx"],
|
||||
});
|
||||
}
|
||||
if (ev.oldVersion <= 1) {
|
||||
db.createObjectStore("signatureAfter", {
|
||||
keyPath: ["ses", "idx"],
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
return new IDBNode(await dbPromise, fromLocalNode, toLocalNode);
|
||||
}
|
||||
return new StorageApiAsync(new IDBClient(db));
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
export {
|
||||
IDBNode,
|
||||
IDBNode as IDBStorage,
|
||||
internal_setDatabaseName,
|
||||
getIndexedDBStorage,
|
||||
} from "./idbNode.js";
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
import { LocalNode } from "cojson";
|
||||
import { WasmCrypto } from "cojson/crypto/WasmCrypto";
|
||||
import { expect, test } from "vitest";
|
||||
import { IDBStorage } from "../index.js";
|
||||
|
||||
const Crypto = await WasmCrypto.create();
|
||||
|
||||
test("Should be able to initialize and load from empty DB", async () => {
|
||||
const agentSecret = Crypto.newRandomAgentSecret();
|
||||
|
||||
const node = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
node.syncManager.addPeer(await IDBStorage.asPeer({}));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
expect(node.syncManager.peers.indexedDB).toBeDefined();
|
||||
});
|
||||
|
||||
test("Should be able to sync data to database and then load that from a new node", async () => {
|
||||
const agentSecret = Crypto.newRandomAgentSecret();
|
||||
|
||||
const node1 = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
node1.syncManager.addPeer(
|
||||
await IDBStorage.asPeer({ localNodeName: "node1" }),
|
||||
);
|
||||
|
||||
const group = node1.createGroup();
|
||||
|
||||
const map = group.createMap();
|
||||
|
||||
map.set("hello", "world");
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
const node2 = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
node2.syncManager.addPeer(
|
||||
await IDBStorage.asPeer({ localNodeName: "node2" }),
|
||||
);
|
||||
|
||||
const map2 = await node2.load(map.id);
|
||||
if (map2 === "unavailable") {
|
||||
throw new Error("Map is unavailable");
|
||||
}
|
||||
|
||||
expect(map2.get("hello")).toBe("world");
|
||||
});
|
||||
@@ -1,8 +1,7 @@
|
||||
import { LocalNode } from "cojson";
|
||||
import { StorageManagerAsync } from "cojson-storage";
|
||||
import { LocalNode, StorageApiAsync } from "cojson";
|
||||
import { WasmCrypto } from "cojson/crypto/WasmCrypto";
|
||||
import { afterEach, beforeEach, expect, test, vi } from "vitest";
|
||||
import { IDBStorage } from "../index.js";
|
||||
import { getIndexedDBStorage } from "../index.js";
|
||||
import { toSimplifiedMessages } from "./messagesTestUtils.js";
|
||||
import { trackMessages, waitFor } from "./testUtils.js";
|
||||
|
||||
@@ -17,22 +16,6 @@ afterEach(() => {
|
||||
syncMessages.restore();
|
||||
});
|
||||
|
||||
test("Should be able to initialize and load from empty DB", async () => {
|
||||
const agentSecret = Crypto.newRandomAgentSecret();
|
||||
|
||||
const node = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
node.syncManager.addPeer(await IDBStorage.asPeer());
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
expect(node.syncManager.peers.indexedDB).toBeDefined();
|
||||
});
|
||||
|
||||
test("should sync and load data from storage", async () => {
|
||||
const agentSecret = Crypto.newRandomAgentSecret();
|
||||
|
||||
@@ -41,18 +24,14 @@ test("should sync and load data from storage", async () => {
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const peer = await IDBStorage.asPeer();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
node1.setStorage(await getIndexedDBStorage());
|
||||
|
||||
const group = node1.createGroup();
|
||||
|
||||
const map = group.createMap();
|
||||
|
||||
map.set("hello", "world");
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
await map.core.waitForSync();
|
||||
|
||||
expect(
|
||||
toSimplifiedMessages(
|
||||
@@ -65,9 +44,7 @@ test("should sync and load data from storage", async () => {
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"storage -> KNOWN Group sessions: header/3",
|
||||
"client -> CONTENT Map header: true new: After: 0 New: 1",
|
||||
"storage -> KNOWN Map sessions: header/1",
|
||||
]
|
||||
`);
|
||||
|
||||
@@ -80,9 +57,7 @@ test("should sync and load data from storage", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const peer2 = await IDBStorage.asPeer();
|
||||
|
||||
node2.syncManager.addPeer(peer2);
|
||||
node2.setStorage(await getIndexedDBStorage());
|
||||
|
||||
const map2 = await node2.load(map.id);
|
||||
if (map2 === "unavailable") {
|
||||
@@ -103,9 +78,7 @@ test("should sync and load data from storage", async () => {
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"client -> KNOWN Group sessions: header/3",
|
||||
"storage -> CONTENT Map header: true new: After: 0 New: 1",
|
||||
"client -> KNOWN Map sessions: header/1",
|
||||
]
|
||||
`);
|
||||
});
|
||||
@@ -119,15 +92,12 @@ test("should send an empty content message if there is no content", async () =>
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const peer = await IDBStorage.asPeer();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
node1.setStorage(await getIndexedDBStorage());
|
||||
|
||||
const group = node1.createGroup();
|
||||
|
||||
const map = group.createMap();
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
await map.core.waitForSync();
|
||||
|
||||
expect(
|
||||
toSimplifiedMessages(
|
||||
@@ -140,9 +110,7 @@ test("should send an empty content message if there is no content", async () =>
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"storage -> KNOWN Group sessions: header/3",
|
||||
"client -> CONTENT Map header: true new: ",
|
||||
"storage -> KNOWN Map sessions: header/0",
|
||||
]
|
||||
`);
|
||||
|
||||
@@ -155,9 +123,7 @@ test("should send an empty content message if there is no content", async () =>
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const peer2 = await IDBStorage.asPeer();
|
||||
|
||||
node2.syncManager.addPeer(peer2);
|
||||
node2.setStorage(await getIndexedDBStorage());
|
||||
|
||||
const map2 = await node2.load(map.id);
|
||||
if (map2 === "unavailable") {
|
||||
@@ -176,9 +142,7 @@ test("should send an empty content message if there is no content", async () =>
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"client -> KNOWN Group sessions: header/3",
|
||||
"storage -> CONTENT Map header: true new: ",
|
||||
"client -> KNOWN Map sessions: header/0",
|
||||
]
|
||||
`);
|
||||
});
|
||||
@@ -192,10 +156,7 @@ test("should load dependencies correctly (group inheritance)", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const peer = await IDBStorage.asPeer();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
|
||||
node1.setStorage(await getIndexedDBStorage());
|
||||
const group = node1.createGroup();
|
||||
const parentGroup = node1.createGroup();
|
||||
|
||||
@@ -205,7 +166,7 @@ test("should load dependencies correctly (group inheritance)", async () => {
|
||||
|
||||
map.set("hello", "world");
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
await map.core.waitForSync();
|
||||
|
||||
expect(
|
||||
toSimplifiedMessages(
|
||||
@@ -218,12 +179,9 @@ test("should load dependencies correctly (group inheritance)", async () => {
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> CONTENT ParentGroup header: true new: After: 0 New: 4",
|
||||
"storage -> KNOWN ParentGroup sessions: header/4",
|
||||
"client -> CONTENT Group header: true new: After: 0 New: 5",
|
||||
"storage -> KNOWN Group sessions: header/5",
|
||||
"client -> CONTENT ParentGroup header: true new: After: 0 New: 4",
|
||||
"client -> CONTENT Map header: true new: After: 0 New: 1",
|
||||
"storage -> KNOWN Map sessions: header/1",
|
||||
]
|
||||
`);
|
||||
|
||||
@@ -236,9 +194,7 @@ test("should load dependencies correctly (group inheritance)", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const peer2 = await IDBStorage.asPeer();
|
||||
|
||||
node2.syncManager.addPeer(peer2);
|
||||
node2.setStorage(await getIndexedDBStorage());
|
||||
|
||||
await node2.load(map.id);
|
||||
|
||||
@@ -259,11 +215,8 @@ test("should load dependencies correctly (group inheritance)", async () => {
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> CONTENT ParentGroup header: true new: After: 0 New: 4",
|
||||
"client -> KNOWN ParentGroup sessions: header/4",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 5",
|
||||
"client -> KNOWN Group sessions: header/5",
|
||||
"storage -> CONTENT Map header: true new: After: 0 New: 1",
|
||||
"client -> KNOWN Map sessions: header/1",
|
||||
]
|
||||
`);
|
||||
});
|
||||
@@ -277,9 +230,7 @@ test("should not send the same dependency value twice", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const peer = await IDBStorage.asPeer();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
node1.setStorage(await getIndexedDBStorage());
|
||||
|
||||
const group = node1.createGroup();
|
||||
const parentGroup = node1.createGroup();
|
||||
@@ -292,7 +243,8 @@ test("should not send the same dependency value twice", async () => {
|
||||
map.set("hello", "world");
|
||||
mapFromParent.set("hello", "world");
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
await map.core.waitForSync();
|
||||
await mapFromParent.core.waitForSync();
|
||||
|
||||
syncMessages.clear();
|
||||
node1.gracefulShutdown();
|
||||
@@ -303,9 +255,7 @@ test("should not send the same dependency value twice", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const peer2 = await IDBStorage.asPeer();
|
||||
|
||||
node2.syncManager.addPeer(peer2);
|
||||
node2.setStorage(await getIndexedDBStorage());
|
||||
|
||||
await node2.load(map.id);
|
||||
await node2.load(mapFromParent.id);
|
||||
@@ -329,14 +279,10 @@ test("should not send the same dependency value twice", async () => {
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> CONTENT ParentGroup header: true new: After: 0 New: 4",
|
||||
"client -> KNOWN ParentGroup sessions: header/4",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 5",
|
||||
"client -> KNOWN Group sessions: header/5",
|
||||
"storage -> CONTENT Map header: true new: After: 0 New: 1",
|
||||
"client -> KNOWN Map sessions: header/1",
|
||||
"client -> LOAD MapFromParent sessions: empty",
|
||||
"storage -> CONTENT MapFromParent header: true new: After: 0 New: 1",
|
||||
"client -> KNOWN MapFromParent sessions: header/1",
|
||||
]
|
||||
`);
|
||||
});
|
||||
@@ -350,9 +296,8 @@ test("should recover from data loss", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const peer = await IDBStorage.asPeer();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
const storage = await getIndexedDBStorage();
|
||||
node1.setStorage(storage);
|
||||
|
||||
const group = node1.createGroup();
|
||||
|
||||
@@ -360,22 +305,25 @@ test("should recover from data loss", async () => {
|
||||
|
||||
map.set("0", 0);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
await map.core.waitForSync();
|
||||
|
||||
const mock = vi
|
||||
.spyOn(StorageManagerAsync.prototype, "handleSyncMessage")
|
||||
.mockImplementation(() => Promise.resolve());
|
||||
.spyOn(StorageApiAsync.prototype, "store")
|
||||
.mockImplementation(() => Promise.resolve(undefined));
|
||||
|
||||
map.set("1", 1);
|
||||
map.set("2", 2);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
const knownState = storage.getKnownState(map.id);
|
||||
Object.assign(knownState, map.core.knownState());
|
||||
|
||||
mock.mockReset();
|
||||
|
||||
map.set("3", 3);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
await map.core.waitForSync();
|
||||
|
||||
expect(
|
||||
toSimplifiedMessages(
|
||||
@@ -388,13 +336,10 @@ test("should recover from data loss", async () => {
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"storage -> KNOWN Group sessions: header/3",
|
||||
"client -> CONTENT Map header: true new: After: 0 New: 1",
|
||||
"storage -> KNOWN Map sessions: header/1",
|
||||
"client -> CONTENT Map header: false new: After: 3 New: 1",
|
||||
"storage -> KNOWN CORRECTION Map sessions: header/1",
|
||||
"storage -> KNOWN CORRECTION Map sessions: header/4",
|
||||
"client -> CONTENT Map header: false new: After: 1 New: 3",
|
||||
"storage -> KNOWN Map sessions: header/4",
|
||||
]
|
||||
`);
|
||||
|
||||
@@ -407,9 +352,7 @@ test("should recover from data loss", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const peer2 = await IDBStorage.asPeer();
|
||||
|
||||
node2.syncManager.addPeer(peer2);
|
||||
node2.setStorage(await getIndexedDBStorage());
|
||||
|
||||
const map2 = await node2.load(map.id);
|
||||
|
||||
@@ -436,9 +379,7 @@ test("should recover from data loss", async () => {
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"client -> KNOWN Group sessions: header/3",
|
||||
"storage -> CONTENT Map header: true new: After: 0 New: 4",
|
||||
"client -> KNOWN Map sessions: header/4",
|
||||
]
|
||||
`);
|
||||
});
|
||||
@@ -452,7 +393,7 @@ test("should sync multiple sessions in a single content message", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
node1.syncManager.addPeer(await IDBStorage.asPeer());
|
||||
node1.setStorage(await getIndexedDBStorage());
|
||||
|
||||
const group = node1.createGroup();
|
||||
|
||||
@@ -460,7 +401,7 @@ test("should sync multiple sessions in a single content message", async () => {
|
||||
|
||||
map.set("hello", "world");
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
await map.core.waitForSync();
|
||||
|
||||
node1.gracefulShutdown();
|
||||
|
||||
@@ -470,7 +411,7 @@ test("should sync multiple sessions in a single content message", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
node2.syncManager.addPeer(await IDBStorage.asPeer());
|
||||
node2.setStorage(await getIndexedDBStorage());
|
||||
|
||||
const map2 = await node2.load(map.id);
|
||||
if (map2 === "unavailable") {
|
||||
@@ -493,7 +434,7 @@ test("should sync multiple sessions in a single content message", async () => {
|
||||
|
||||
syncMessages.clear();
|
||||
|
||||
node3.syncManager.addPeer(await IDBStorage.asPeer());
|
||||
node3.setStorage(await getIndexedDBStorage());
|
||||
|
||||
const map3 = await node3.load(map.id);
|
||||
if (map3 === "unavailable") {
|
||||
@@ -514,9 +455,7 @@ test("should sync multiple sessions in a single content message", async () => {
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"client -> KNOWN Group sessions: header/3",
|
||||
"storage -> CONTENT Map header: true new: After: 0 New: 1 | After: 0 New: 1",
|
||||
"client -> KNOWN Map sessions: header/2",
|
||||
]
|
||||
`);
|
||||
});
|
||||
@@ -530,7 +469,7 @@ test("large coValue upload streaming", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
node1.syncManager.addPeer(await IDBStorage.asPeer());
|
||||
node1.setStorage(await getIndexedDBStorage());
|
||||
|
||||
const group = node1.createGroup();
|
||||
const largeMap = group.createMap();
|
||||
@@ -547,6 +486,7 @@ test("large coValue upload streaming", async () => {
|
||||
largeMap.set(key, value, "trusting");
|
||||
}
|
||||
|
||||
// TODO: Wait for storage to be updated
|
||||
await largeMap.core.waitForSync();
|
||||
|
||||
const knownState = largeMap.core.knownState();
|
||||
@@ -561,7 +501,7 @@ test("large coValue upload streaming", async () => {
|
||||
|
||||
syncMessages.clear();
|
||||
|
||||
node2.syncManager.addPeer(await IDBStorage.asPeer());
|
||||
node2.setStorage(await getIndexedDBStorage());
|
||||
|
||||
const largeMapOnNode2 = await node2.load(largeMap.id);
|
||||
|
||||
@@ -586,15 +526,10 @@ test("large coValue upload streaming", async () => {
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> KNOWN Map sessions: header/200",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"client -> KNOWN Group sessions: header/3",
|
||||
"storage -> CONTENT Map header: true new: After: 0 New: 97",
|
||||
"client -> KNOWN Map sessions: header/97",
|
||||
"storage -> CONTENT Map header: true new: After: 97 New: 97",
|
||||
"client -> KNOWN Map sessions: header/194",
|
||||
"storage -> CONTENT Map header: true new: After: 194 New: 6",
|
||||
"client -> KNOWN Map sessions: header/200",
|
||||
]
|
||||
`);
|
||||
});
|
||||
@@ -605,7 +540,7 @@ test("should sync and load accounts from storage", async () => {
|
||||
const { node: node1, accountID } = await LocalNode.withNewlyCreatedAccount({
|
||||
crypto: Crypto,
|
||||
initialAgentSecret: agentSecret,
|
||||
peersToLoadFrom: [await IDBStorage.asPeer()],
|
||||
storage: await getIndexedDBStorage(),
|
||||
creationProps: {
|
||||
name: "test",
|
||||
},
|
||||
@@ -615,8 +550,6 @@ test("should sync and load accounts from storage", async () => {
|
||||
const profile = node1.expectProfileLoaded(accountID);
|
||||
const profileGroup = profile.group;
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
expect(
|
||||
toSimplifiedMessages(
|
||||
{
|
||||
@@ -629,11 +562,8 @@ test("should sync and load accounts from storage", async () => {
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> CONTENT Account header: true new: After: 0 New: 4",
|
||||
"storage -> KNOWN Account sessions: header/4",
|
||||
"client -> CONTENT ProfileGroup header: true new: After: 0 New: 5",
|
||||
"storage -> KNOWN ProfileGroup sessions: header/5",
|
||||
"client -> CONTENT Profile header: true new: After: 0 New: 1",
|
||||
"storage -> KNOWN Profile sessions: header/1",
|
||||
]
|
||||
`);
|
||||
|
||||
@@ -645,12 +575,11 @@ test("should sync and load accounts from storage", async () => {
|
||||
crypto: Crypto,
|
||||
accountSecret: agentSecret,
|
||||
accountID,
|
||||
peersToLoadFrom: [await IDBStorage.asPeer()],
|
||||
peersToLoadFrom: [],
|
||||
storage: await getIndexedDBStorage(),
|
||||
sessionID: Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
});
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
expect(
|
||||
toSimplifiedMessages(
|
||||
{
|
||||
@@ -664,12 +593,9 @@ test("should sync and load accounts from storage", async () => {
|
||||
[
|
||||
"client -> LOAD Account sessions: empty",
|
||||
"storage -> CONTENT Account header: true new: After: 0 New: 4",
|
||||
"client -> KNOWN Account sessions: header/4",
|
||||
"client -> LOAD Profile sessions: empty",
|
||||
"storage -> CONTENT ProfileGroup header: true new: After: 0 New: 5",
|
||||
"client -> KNOWN ProfileGroup sessions: header/5",
|
||||
"storage -> CONTENT Profile header: true new: After: 0 New: 1",
|
||||
"client -> KNOWN Profile sessions: header/1",
|
||||
]
|
||||
`);
|
||||
|
||||
|
||||
@@ -1,39 +1,63 @@
|
||||
import type { LocalNode, SyncMessage } from "cojson";
|
||||
import { cojsonInternals } from "cojson";
|
||||
import { StorageManagerAsync } from "cojson-storage";
|
||||
import type { RawCoID, SyncMessage } from "cojson";
|
||||
import { StorageApiAsync } from "cojson";
|
||||
import { onTestFinished } from "vitest";
|
||||
|
||||
const { SyncManager } = cojsonInternals;
|
||||
|
||||
export function trackMessages() {
|
||||
const messages: {
|
||||
from: "client" | "server" | "storage";
|
||||
msg: SyncMessage;
|
||||
}[] = [];
|
||||
|
||||
const originalHandleSyncMessage =
|
||||
StorageManagerAsync.prototype.handleSyncMessage;
|
||||
const originalNodeSyncMessage = SyncManager.prototype.handleSyncMessage;
|
||||
const originalLoad = StorageApiAsync.prototype.load;
|
||||
const originalStore = StorageApiAsync.prototype.store;
|
||||
|
||||
StorageManagerAsync.prototype.handleSyncMessage = async function (msg) {
|
||||
StorageApiAsync.prototype.load = async function (id, callback, done) {
|
||||
messages.push({
|
||||
from: "client",
|
||||
msg,
|
||||
msg: {
|
||||
action: "load",
|
||||
id: id as RawCoID,
|
||||
header: false,
|
||||
sessions: {},
|
||||
},
|
||||
});
|
||||
return originalHandleSyncMessage.call(this, msg);
|
||||
return originalLoad.call(
|
||||
this,
|
||||
id,
|
||||
(msg) => {
|
||||
messages.push({
|
||||
from: "storage",
|
||||
msg,
|
||||
});
|
||||
callback(msg);
|
||||
},
|
||||
done,
|
||||
);
|
||||
};
|
||||
|
||||
SyncManager.prototype.handleSyncMessage = async function (msg, peer) {
|
||||
messages.push({
|
||||
from: "storage",
|
||||
msg,
|
||||
StorageApiAsync.prototype.store = async function (data, correctionCallback) {
|
||||
for (const msg of data) {
|
||||
messages.push({
|
||||
from: "client",
|
||||
msg,
|
||||
});
|
||||
}
|
||||
return originalStore.call(this, data, (msg) => {
|
||||
messages.push({
|
||||
from: "storage",
|
||||
msg: {
|
||||
action: "known",
|
||||
isCorrection: true,
|
||||
...msg,
|
||||
},
|
||||
});
|
||||
correctionCallback(msg);
|
||||
});
|
||||
return originalNodeSyncMessage.call(this, msg, peer);
|
||||
};
|
||||
|
||||
const restore = () => {
|
||||
StorageManagerAsync.prototype.handleSyncMessage = originalHandleSyncMessage;
|
||||
SyncManager.prototype.handleSyncMessage = originalNodeSyncMessage;
|
||||
StorageApiAsync.prototype.load = originalLoad;
|
||||
StorageApiAsync.prototype.store = originalStore;
|
||||
messages.length = 0;
|
||||
};
|
||||
|
||||
|
||||
@@ -1,5 +1,33 @@
|
||||
# cojson-storage-sqlite
|
||||
|
||||
## 0.15.10
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cojson@0.15.10
|
||||
|
||||
## 0.15.9
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [27b4837]
|
||||
- Updated dependencies [2776263]
|
||||
- cojson@0.15.9
|
||||
|
||||
## 0.15.8
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cojson@0.15.8
|
||||
- cojson-storage@0.15.8
|
||||
|
||||
## 0.15.7
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cojson@0.15.7
|
||||
- cojson-storage@0.15.7
|
||||
|
||||
## 0.15.6
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
{
|
||||
"name": "cojson-storage-sqlite",
|
||||
"type": "module",
|
||||
"version": "0.15.6",
|
||||
"version": "0.15.10",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"better-sqlite3": "^11.7.0",
|
||||
"cojson": "workspace:0.15.6",
|
||||
"cojson-storage": "workspace:*"
|
||||
"cojson": "workspace:*"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/better-sqlite3": "^7.6.12",
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
import Database, { type Database as DatabaseT } from "better-sqlite3";
|
||||
import type { SQLiteDatabaseDriver } from "cojson-storage";
|
||||
|
||||
export class BetterSqliteDriver implements SQLiteDatabaseDriver {
|
||||
private readonly db: DatabaseT;
|
||||
|
||||
constructor(filename: string) {
|
||||
const db = new Database(filename);
|
||||
this.db = db;
|
||||
db.pragma("journal_mode = WAL");
|
||||
}
|
||||
|
||||
run(sql: string, params: unknown[]) {
|
||||
this.db.prepare(sql).run(params);
|
||||
}
|
||||
|
||||
query<T>(sql: string, params: unknown[]): T[] {
|
||||
return this.db.prepare(sql).all(params) as T[];
|
||||
}
|
||||
|
||||
get<T>(sql: string, params: unknown[]): T | undefined {
|
||||
return this.db.prepare(sql).get(params) as T | undefined;
|
||||
}
|
||||
|
||||
transaction(callback: () => unknown) {
|
||||
return this.db.transaction(callback)();
|
||||
}
|
||||
|
||||
closeDb() {
|
||||
this.db.close();
|
||||
}
|
||||
}
|
||||
@@ -1 +1,39 @@
|
||||
export { SQLiteNode, SQLiteNode as SQLiteStorage } from "./sqliteNode.js";
|
||||
import Database, { type Database as DatabaseT } from "better-sqlite3";
|
||||
import type { SQLiteDatabaseDriver } from "cojson";
|
||||
import { getSqliteStorage } from "cojson";
|
||||
|
||||
export class BetterSqliteDriver implements SQLiteDatabaseDriver {
|
||||
private readonly db: DatabaseT;
|
||||
|
||||
constructor(filename: string) {
|
||||
const db = new Database(filename);
|
||||
this.db = db;
|
||||
db.pragma("journal_mode = WAL");
|
||||
}
|
||||
|
||||
run(sql: string, params: unknown[]) {
|
||||
this.db.prepare(sql).run(params);
|
||||
}
|
||||
|
||||
query<T>(sql: string, params: unknown[]): T[] {
|
||||
return this.db.prepare(sql).all(params) as T[];
|
||||
}
|
||||
|
||||
get<T>(sql: string, params: unknown[]): T | undefined {
|
||||
return this.db.prepare(sql).get(params) as T | undefined;
|
||||
}
|
||||
|
||||
transaction(callback: () => unknown) {
|
||||
return this.db.transaction(callback)();
|
||||
}
|
||||
|
||||
closeDb() {
|
||||
this.db.close();
|
||||
}
|
||||
}
|
||||
|
||||
export function getBetterSqliteStorage(filename: string) {
|
||||
const db = new BetterSqliteDriver(filename);
|
||||
|
||||
return getSqliteStorage(db);
|
||||
}
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
import type { Peer } from "cojson";
|
||||
import { SQLiteNodeBase } from "cojson-storage";
|
||||
import { BetterSqliteDriver } from "./betterSqliteDriver.js";
|
||||
|
||||
export class SQLiteNode extends SQLiteNodeBase {
|
||||
static async asPeer({
|
||||
filename,
|
||||
localNodeName = "local",
|
||||
}: {
|
||||
filename: string;
|
||||
localNodeName?: string;
|
||||
}): Promise<Peer> {
|
||||
const db = new BetterSqliteDriver(filename);
|
||||
|
||||
return SQLiteNodeBase.create({
|
||||
db,
|
||||
localNodeName,
|
||||
maxBlockingTime: 500,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -2,18 +2,16 @@ import { randomUUID } from "node:crypto";
|
||||
import { unlinkSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import { LocalNode, cojsonInternals } from "cojson";
|
||||
import { SQLiteNodeBase, StorageManagerSync } from "cojson-storage";
|
||||
import { LocalNode, StorageApiSync, cojsonInternals } from "cojson";
|
||||
import { WasmCrypto } from "cojson/crypto/WasmCrypto";
|
||||
import { expect, onTestFinished, test, vi } from "vitest";
|
||||
import { BetterSqliteDriver } from "../betterSqliteDriver.js";
|
||||
import { SQLiteNode } from "../index.js";
|
||||
import { getBetterSqliteStorage } from "../index.js";
|
||||
import { toSimplifiedMessages } from "./messagesTestUtils.js";
|
||||
import { trackMessages, waitFor } from "./testUtils.js";
|
||||
|
||||
const Crypto = await WasmCrypto.create();
|
||||
|
||||
async function createSQLiteStorage(defaultDbPath?: string) {
|
||||
function createSQLiteStorage(defaultDbPath?: string) {
|
||||
const dbPath = defaultDbPath ?? join(tmpdir(), `test-${randomUUID()}.db`);
|
||||
|
||||
if (!defaultDbPath) {
|
||||
@@ -23,29 +21,11 @@ async function createSQLiteStorage(defaultDbPath?: string) {
|
||||
}
|
||||
|
||||
return {
|
||||
peer: await SQLiteNode.asPeer({
|
||||
filename: dbPath,
|
||||
}),
|
||||
storage: getBetterSqliteStorage(dbPath),
|
||||
dbPath,
|
||||
};
|
||||
}
|
||||
|
||||
test("Should be able to initialize and load from empty DB", async () => {
|
||||
const agentSecret = Crypto.newRandomAgentSecret();
|
||||
|
||||
const node = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
node.syncManager.addPeer((await createSQLiteStorage()).peer);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
expect(node.syncManager.peers.storage).toBeDefined();
|
||||
});
|
||||
|
||||
test("should sync and load data from storage", async () => {
|
||||
const agentSecret = Crypto.newRandomAgentSecret();
|
||||
|
||||
@@ -55,11 +35,11 @@ test("should sync and load data from storage", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node1Sync = trackMessages(node1);
|
||||
const node1Sync = trackMessages();
|
||||
|
||||
const { peer, dbPath } = await createSQLiteStorage();
|
||||
const { storage, dbPath } = createSQLiteStorage();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
node1.setStorage(storage);
|
||||
|
||||
const group = node1.createGroup();
|
||||
|
||||
@@ -80,9 +60,7 @@ test("should sync and load data from storage", async () => {
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"storage -> KNOWN Group sessions: header/3",
|
||||
"client -> CONTENT Map header: true new: After: 0 New: 1",
|
||||
"storage -> KNOWN Map sessions: header/1",
|
||||
]
|
||||
`);
|
||||
|
||||
@@ -94,11 +72,9 @@ test("should sync and load data from storage", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node2Sync = trackMessages(node2);
|
||||
const node2Sync = trackMessages();
|
||||
|
||||
const { peer: peer2 } = await createSQLiteStorage(dbPath);
|
||||
|
||||
node2.syncManager.addPeer(peer2);
|
||||
node2.setStorage(createSQLiteStorage(dbPath).storage);
|
||||
|
||||
const map2 = await node2.load(map.id);
|
||||
if (map2 === "unavailable") {
|
||||
@@ -119,9 +95,7 @@ test("should sync and load data from storage", async () => {
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"client -> KNOWN Group sessions: header/3",
|
||||
"storage -> CONTENT Map header: true new: After: 0 New: 1",
|
||||
"client -> KNOWN Map sessions: header/1",
|
||||
]
|
||||
`);
|
||||
|
||||
@@ -137,11 +111,11 @@ test("should send an empty content message if there is no content", async () =>
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node1Sync = trackMessages(node1);
|
||||
const node1Sync = trackMessages();
|
||||
|
||||
const { peer, dbPath } = await createSQLiteStorage();
|
||||
const { storage, dbPath } = createSQLiteStorage();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
node1.setStorage(storage);
|
||||
|
||||
const group = node1.createGroup();
|
||||
|
||||
@@ -160,9 +134,7 @@ test("should send an empty content message if there is no content", async () =>
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"storage -> KNOWN Group sessions: header/3",
|
||||
"client -> CONTENT Map header: true new: ",
|
||||
"storage -> KNOWN Map sessions: header/0",
|
||||
]
|
||||
`);
|
||||
|
||||
@@ -174,11 +146,9 @@ test("should send an empty content message if there is no content", async () =>
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node2Sync = trackMessages(node2);
|
||||
const node2Sync = trackMessages();
|
||||
|
||||
const { peer: peer2 } = await createSQLiteStorage(dbPath);
|
||||
|
||||
node2.syncManager.addPeer(peer2);
|
||||
node2.setStorage(createSQLiteStorage(dbPath).storage);
|
||||
|
||||
const map2 = await node2.load(map.id);
|
||||
if (map2 === "unavailable") {
|
||||
@@ -197,9 +167,7 @@ test("should send an empty content message if there is no content", async () =>
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"client -> KNOWN Group sessions: header/3",
|
||||
"storage -> CONTENT Map header: true new: ",
|
||||
"client -> KNOWN Map sessions: header/0",
|
||||
]
|
||||
`);
|
||||
|
||||
@@ -215,11 +183,11 @@ test("should load dependencies correctly (group inheritance)", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node1Sync = trackMessages(node1);
|
||||
const node1Sync = trackMessages();
|
||||
|
||||
const { peer, dbPath } = await createSQLiteStorage();
|
||||
const { storage, dbPath } = createSQLiteStorage();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
node1.setStorage(storage);
|
||||
|
||||
const group = node1.createGroup();
|
||||
const parentGroup = node1.createGroup();
|
||||
@@ -243,12 +211,9 @@ test("should load dependencies correctly (group inheritance)", async () => {
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> CONTENT ParentGroup header: true new: After: 0 New: 4",
|
||||
"storage -> KNOWN ParentGroup sessions: header/4",
|
||||
"client -> CONTENT Group header: true new: After: 0 New: 5",
|
||||
"storage -> KNOWN Group sessions: header/5",
|
||||
"client -> CONTENT ParentGroup header: true new: After: 0 New: 4",
|
||||
"client -> CONTENT Map header: true new: After: 0 New: 1",
|
||||
"storage -> KNOWN Map sessions: header/1",
|
||||
]
|
||||
`);
|
||||
|
||||
@@ -260,11 +225,9 @@ test("should load dependencies correctly (group inheritance)", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node2Sync = trackMessages(node2);
|
||||
const node2Sync = trackMessages();
|
||||
|
||||
const { peer: peer2 } = await createSQLiteStorage(dbPath);
|
||||
|
||||
node2.syncManager.addPeer(peer2);
|
||||
node2.setStorage(createSQLiteStorage(dbPath).storage);
|
||||
|
||||
await node2.load(map.id);
|
||||
|
||||
@@ -285,11 +248,8 @@ test("should load dependencies correctly (group inheritance)", async () => {
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> CONTENT ParentGroup header: true new: After: 0 New: 4",
|
||||
"client -> KNOWN ParentGroup sessions: header/4",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 5",
|
||||
"client -> KNOWN Group sessions: header/5",
|
||||
"storage -> CONTENT Map header: true new: After: 0 New: 1",
|
||||
"client -> KNOWN Map sessions: header/1",
|
||||
]
|
||||
`);
|
||||
});
|
||||
@@ -303,11 +263,11 @@ test("should not send the same dependency value twice", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node1Sync = trackMessages(node1);
|
||||
const node1Sync = trackMessages();
|
||||
|
||||
const { peer, dbPath } = await createSQLiteStorage();
|
||||
const { storage, dbPath } = createSQLiteStorage();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
node1.setStorage(storage);
|
||||
|
||||
const group = node1.createGroup();
|
||||
const parentGroup = node1.createGroup();
|
||||
@@ -330,11 +290,9 @@ test("should not send the same dependency value twice", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node2Sync = trackMessages(node2);
|
||||
const node2Sync = trackMessages();
|
||||
|
||||
const { peer: peer2 } = await createSQLiteStorage(dbPath);
|
||||
|
||||
node2.syncManager.addPeer(peer2);
|
||||
node2.setStorage(createSQLiteStorage(dbPath).storage);
|
||||
|
||||
await node2.load(map.id);
|
||||
await node2.load(mapFromParent.id);
|
||||
@@ -358,14 +316,10 @@ test("should not send the same dependency value twice", async () => {
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> CONTENT ParentGroup header: true new: After: 0 New: 4",
|
||||
"client -> KNOWN ParentGroup sessions: header/4",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 5",
|
||||
"client -> KNOWN Group sessions: header/5",
|
||||
"storage -> CONTENT Map header: true new: After: 0 New: 1",
|
||||
"client -> KNOWN Map sessions: header/1",
|
||||
"client -> LOAD MapFromParent sessions: empty",
|
||||
"storage -> CONTENT MapFromParent header: true new: After: 0 New: 1",
|
||||
"client -> KNOWN MapFromParent sessions: header/1",
|
||||
]
|
||||
`);
|
||||
});
|
||||
@@ -379,11 +333,11 @@ test("should recover from data loss", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node1Sync = trackMessages(node1);
|
||||
const node1Sync = trackMessages();
|
||||
|
||||
const { peer, dbPath } = await createSQLiteStorage();
|
||||
const { storage, dbPath } = createSQLiteStorage();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
node1.setStorage(storage);
|
||||
|
||||
const group = node1.createGroup();
|
||||
|
||||
@@ -394,8 +348,8 @@ test("should recover from data loss", async () => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
const mock = vi
|
||||
.spyOn(StorageManagerSync.prototype, "handleSyncMessage")
|
||||
.mockImplementation(() => Promise.resolve());
|
||||
.spyOn(StorageApiSync.prototype, "store")
|
||||
.mockImplementation(() => false);
|
||||
|
||||
map.set("1", 1);
|
||||
map.set("2", 2);
|
||||
@@ -419,13 +373,8 @@ test("should recover from data loss", async () => {
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"storage -> KNOWN Group sessions: header/3",
|
||||
"client -> CONTENT Map header: true new: After: 0 New: 1",
|
||||
"storage -> KNOWN Map sessions: header/1",
|
||||
"client -> CONTENT Map header: false new: After: 3 New: 1",
|
||||
"storage -> KNOWN CORRECTION Map sessions: header/1",
|
||||
"client -> CONTENT Map header: false new: After: 1 New: 3",
|
||||
"storage -> KNOWN Map sessions: header/4",
|
||||
]
|
||||
`);
|
||||
|
||||
@@ -437,11 +386,9 @@ test("should recover from data loss", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node2Sync = trackMessages(node2);
|
||||
const node2Sync = trackMessages();
|
||||
|
||||
const { peer: peer2 } = await createSQLiteStorage(dbPath);
|
||||
|
||||
node2.syncManager.addPeer(peer2);
|
||||
node2.setStorage(createSQLiteStorage(dbPath).storage);
|
||||
|
||||
const map2 = await node2.load(map.id);
|
||||
|
||||
@@ -468,9 +415,7 @@ test("should recover from data loss", async () => {
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"client -> KNOWN Group sessions: header/3",
|
||||
"storage -> CONTENT Map header: true new: After: 0 New: 4",
|
||||
"client -> KNOWN Map sessions: header/4",
|
||||
]
|
||||
`);
|
||||
});
|
||||
@@ -501,24 +446,28 @@ test("should recover missing dependencies from storage", async () => {
|
||||
node1.syncManager.addPeer(serverPeer);
|
||||
serverNode.syncManager.addPeer(clientPeer);
|
||||
|
||||
const handleSyncMessage = StorageManagerSync.prototype.handleSyncMessage;
|
||||
const store = StorageApiSync.prototype.store;
|
||||
|
||||
const mock = vi
|
||||
.spyOn(StorageManagerSync.prototype, "handleSyncMessage")
|
||||
.mockImplementation(function (this: StorageManagerSync, msg) {
|
||||
.spyOn(StorageApiSync.prototype, "store")
|
||||
.mockImplementation(function (
|
||||
this: StorageApiSync,
|
||||
data,
|
||||
correctionCallback,
|
||||
) {
|
||||
if (
|
||||
msg.action === "content" &&
|
||||
[group.core.id, account.core.id].includes(msg.id)
|
||||
data[0]?.id &&
|
||||
[group.core.id, account.core.id as string].includes(data[0].id)
|
||||
) {
|
||||
return Promise.resolve();
|
||||
return false;
|
||||
}
|
||||
|
||||
return handleSyncMessage.call(this, msg);
|
||||
return store.call(this, data, correctionCallback);
|
||||
});
|
||||
|
||||
const { peer, dbPath } = await createSQLiteStorage();
|
||||
const { storage, dbPath } = createSQLiteStorage();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
node1.setStorage(storage);
|
||||
|
||||
const group = node1.createGroup();
|
||||
group.addMember("everyone", "writer");
|
||||
@@ -549,9 +498,7 @@ test("should recover missing dependencies from storage", async () => {
|
||||
node2.syncManager.addPeer(serverPeer2);
|
||||
serverNode.syncManager.addPeer(clientPeer2);
|
||||
|
||||
const { peer: peer2 } = await createSQLiteStorage(dbPath);
|
||||
|
||||
node2.syncManager.addPeer(peer2);
|
||||
node2.setStorage(createSQLiteStorage(dbPath).storage);
|
||||
|
||||
const map2 = await node2.load(map.id);
|
||||
|
||||
@@ -573,9 +520,9 @@ test("should sync multiple sessions in a single content message", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const { peer, dbPath } = await createSQLiteStorage();
|
||||
const { storage, dbPath } = createSQLiteStorage();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
node1.setStorage(storage);
|
||||
|
||||
const group = node1.createGroup();
|
||||
|
||||
@@ -593,7 +540,7 @@ test("should sync multiple sessions in a single content message", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
node2.syncManager.addPeer((await createSQLiteStorage(dbPath)).peer);
|
||||
node2.setStorage(createSQLiteStorage(dbPath).storage);
|
||||
|
||||
const map2 = await node2.load(map.id);
|
||||
if (map2 === "unavailable") {
|
||||
@@ -614,9 +561,9 @@ test("should sync multiple sessions in a single content message", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node3Sync = trackMessages(node3);
|
||||
const node3Sync = trackMessages();
|
||||
|
||||
node3.syncManager.addPeer((await createSQLiteStorage(dbPath)).peer);
|
||||
node3.setStorage(createSQLiteStorage(dbPath).storage);
|
||||
|
||||
const map3 = await node3.load(map.id);
|
||||
if (map3 === "unavailable") {
|
||||
@@ -637,9 +584,7 @@ test("should sync multiple sessions in a single content message", async () => {
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"client -> KNOWN Group sessions: header/3",
|
||||
"storage -> CONTENT Map header: true new: After: 0 New: 1 | After: 0 New: 1",
|
||||
"client -> KNOWN Map sessions: header/2",
|
||||
]
|
||||
`);
|
||||
|
||||
@@ -655,9 +600,9 @@ test("large coValue upload streaming", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const { peer, dbPath } = await createSQLiteStorage();
|
||||
const { storage, dbPath } = createSQLiteStorage();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
node1.setStorage(storage);
|
||||
|
||||
const group = node1.createGroup();
|
||||
const largeMap = group.createMap();
|
||||
@@ -683,11 +628,9 @@ test("large coValue upload streaming", async () => {
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node2Sync = trackMessages(node2);
|
||||
const node2Sync = trackMessages();
|
||||
|
||||
const { peer: peer2 } = await createSQLiteStorage(dbPath);
|
||||
|
||||
node2.syncManager.addPeer(peer2);
|
||||
node2.setStorage(createSQLiteStorage(dbPath).storage);
|
||||
|
||||
const largeMapOnNode2 = await node2.load(largeMap.id);
|
||||
|
||||
@@ -714,51 +657,10 @@ test("large coValue upload streaming", async () => {
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> KNOWN Map sessions: header/200",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"client -> KNOWN Group sessions: header/3",
|
||||
"storage -> CONTENT Map header: true new: After: 0 New: 97",
|
||||
"client -> KNOWN Map sessions: header/97",
|
||||
"storage -> CONTENT Map header: true new: After: 97 New: 97",
|
||||
"client -> KNOWN Map sessions: header/194",
|
||||
"storage -> CONTENT Map header: true new: After: 194 New: 6",
|
||||
"client -> KNOWN Map sessions: header/200",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
test("should close the db when the node is closed", async () => {
|
||||
const agentSecret = Crypto.newRandomAgentSecret();
|
||||
|
||||
const node1 = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const dbPath = join(tmpdir(), `test-${randomUUID()}.db`);
|
||||
|
||||
const db = new BetterSqliteDriver(dbPath);
|
||||
|
||||
const peer = SQLiteNodeBase.create({
|
||||
db,
|
||||
localNodeName: "test",
|
||||
maxBlockingTime: 500,
|
||||
});
|
||||
|
||||
const spy = vi.spyOn(db, "closeDb");
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
|
||||
expect(spy).not.toHaveBeenCalled();
|
||||
|
||||
node1.gracefulShutdown();
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
|
||||
expect(spy).toHaveBeenCalled();
|
||||
|
||||
unlinkSync(dbPath);
|
||||
});
|
||||
|
||||
@@ -1,36 +1,64 @@
|
||||
import type { LocalNode, SyncMessage } from "cojson";
|
||||
import { StorageManagerSync } from "cojson-storage";
|
||||
import type { LocalNode, RawCoID, SyncMessage } from "cojson";
|
||||
import { StorageApiSync } from "cojson";
|
||||
import { onTestFinished } from "vitest";
|
||||
|
||||
export function trackMessages(node: LocalNode) {
|
||||
export function trackMessages() {
|
||||
const messages: {
|
||||
from: "client" | "server" | "storage";
|
||||
msg: SyncMessage;
|
||||
}[] = [];
|
||||
|
||||
const originalHandleSyncMessage =
|
||||
StorageManagerSync.prototype.handleSyncMessage;
|
||||
const originalNodeSyncMessage = node.syncManager.handleSyncMessage;
|
||||
const originalLoad = StorageApiSync.prototype.load;
|
||||
const originalStore = StorageApiSync.prototype.store;
|
||||
|
||||
StorageManagerSync.prototype.handleSyncMessage = async function (msg) {
|
||||
StorageApiSync.prototype.load = async function (id, callback, done) {
|
||||
messages.push({
|
||||
from: "client",
|
||||
msg,
|
||||
msg: {
|
||||
action: "load",
|
||||
id: id as RawCoID,
|
||||
header: false,
|
||||
sessions: {},
|
||||
},
|
||||
});
|
||||
return originalHandleSyncMessage.call(this, msg);
|
||||
return originalLoad.call(
|
||||
this,
|
||||
id,
|
||||
(msg) => {
|
||||
messages.push({
|
||||
from: "storage",
|
||||
msg,
|
||||
});
|
||||
callback(msg);
|
||||
},
|
||||
done,
|
||||
);
|
||||
};
|
||||
|
||||
node.syncManager.handleSyncMessage = async function (msg, peer) {
|
||||
messages.push({
|
||||
from: "storage",
|
||||
msg,
|
||||
StorageApiSync.prototype.store = function (data, correctionCallback) {
|
||||
for (const msg of data) {
|
||||
messages.push({
|
||||
from: "client",
|
||||
msg,
|
||||
});
|
||||
}
|
||||
return originalStore.call(this, data, (msg) => {
|
||||
messages.push({
|
||||
from: "storage",
|
||||
msg: {
|
||||
action: "known",
|
||||
isCorrection: true,
|
||||
...msg,
|
||||
},
|
||||
});
|
||||
correctionCallback(msg);
|
||||
});
|
||||
return originalNodeSyncMessage.call(this, msg, peer);
|
||||
};
|
||||
|
||||
const restore = () => {
|
||||
StorageManagerSync.prototype.handleSyncMessage = originalHandleSyncMessage;
|
||||
node.syncManager.handleSyncMessage = originalNodeSyncMessage;
|
||||
StorageApiSync.prototype.load = originalLoad;
|
||||
StorageApiSync.prototype.store = originalStore;
|
||||
messages.length = 0;
|
||||
};
|
||||
|
||||
onTestFinished(() => {
|
||||
|
||||
171
packages/cojson-storage/.gitignore
vendored
171
packages/cojson-storage/.gitignore
vendored
@@ -1,171 +0,0 @@
|
||||
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
|
||||
|
||||
# Logs
|
||||
|
||||
logs
|
||||
_.log
|
||||
npm-debug.log_
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
|
||||
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
|
||||
|
||||
# Runtime data
|
||||
|
||||
pids
|
||||
_.pid
|
||||
_.seed
|
||||
\*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
|
||||
coverage
|
||||
\*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# Snowpack dependency directory (https://snowpack.dev/)
|
||||
|
||||
web_modules/
|
||||
|
||||
# TypeScript cache
|
||||
|
||||
\*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
|
||||
.eslintcache
|
||||
|
||||
# Optional stylelint cache
|
||||
|
||||
.stylelintcache
|
||||
|
||||
# Microbundle cache
|
||||
|
||||
.rpt2_cache/
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
# Optional REPL history
|
||||
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
|
||||
\*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variable files
|
||||
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.local
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
|
||||
.cache
|
||||
.parcel-cache
|
||||
|
||||
# Next.js build output
|
||||
|
||||
.next
|
||||
out
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
|
||||
.nuxt
|
||||
dist
|
||||
|
||||
# Gatsby files
|
||||
|
||||
.cache/
|
||||
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
|
||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||
|
||||
# public
|
||||
|
||||
# vuepress build output
|
||||
|
||||
.vuepress/dist
|
||||
|
||||
# vuepress v2.x temp and cache directory
|
||||
|
||||
.temp
|
||||
.cache
|
||||
|
||||
# Docusaurus cache and generated files
|
||||
|
||||
.docusaurus
|
||||
|
||||
# Serverless directories
|
||||
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
|
||||
.dynamodb/
|
||||
|
||||
# TernJS port file
|
||||
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
|
||||
.vscode-test
|
||||
|
||||
# yarn v2
|
||||
|
||||
.yarn/cache
|
||||
.yarn/unplugged
|
||||
.yarn/build-state.yml
|
||||
.yarn/install-state.gz
|
||||
.pnp.\*
|
||||
|
||||
.DS_Store
|
||||
@@ -1,2 +0,0 @@
|
||||
coverage
|
||||
node_modules
|
||||
@@ -1,621 +0,0 @@
|
||||
# cojson-storage
|
||||
|
||||
## 0.15.6
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cojson@0.15.6
|
||||
|
||||
## 0.15.5
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cojson@0.15.5
|
||||
|
||||
## 0.15.4
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [277e4d4]
|
||||
- cojson@0.15.4
|
||||
|
||||
## 0.15.3
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cojson@0.15.3
|
||||
|
||||
## 0.15.2
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4b964ed]
|
||||
- cojson@0.15.2
|
||||
|
||||
## 0.15.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [b110f00]
|
||||
- cojson@0.15.1
|
||||
|
||||
## 0.15.0
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cojson@0.15.0
|
||||
|
||||
## 0.14.28
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cojson@0.14.28
|
||||
|
||||
## 0.14.27
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cojson@0.14.27
|
||||
|
||||
## 0.14.26
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 680a2e2: Read in parallel up to 10 values on the async storage adapters to improve loading perf
|
||||
- Updated dependencies [e74a077]
|
||||
- cojson@0.14.26
|
||||
|
||||
## 0.14.25
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cojson@0.14.25
|
||||
|
||||
## 0.14.24
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cojson@0.14.24
|
||||
|
||||
## 0.14.23
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 5f42c97: Close the DB connection when the node/context is closed
|
||||
- Updated dependencies [1ca9299]
|
||||
- cojson@0.14.23
|
||||
|
||||
## 0.14.22
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [57fb69f]
|
||||
- cojson@0.14.22
|
||||
|
||||
## 0.14.21
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [c3d8779]
|
||||
- cojson@0.14.21
|
||||
|
||||
## 0.14.20
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cojson@0.14.20
|
||||
|
||||
## 0.14.19
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cojson@0.14.19
|
||||
|
||||
## 0.14.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- be7c4c2: Incorporate SQLite sync/async adapters and make them more aligned
|
||||
- Updated dependencies [0d5ee3e]
|
||||
- cojson@0.14.18
|
||||
|
||||
## 0.14.16
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [5e253cc]
|
||||
- cojson@0.14.16
|
||||
|
||||
## 0.14.15
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 23daa7c: Align the processing of the group dependencies between LocalNode and Storage.
|
||||
- Updated dependencies [23daa7c]
|
||||
- cojson@0.14.15
|
||||
|
||||
## 0.14.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [c8b33ad]
|
||||
- cojson@0.14.1
|
||||
|
||||
## 0.14.0
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [5835ed1]
|
||||
- cojson@0.14.0
|
||||
|
||||
## 0.13.32
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 2bf9743: Implement content streaming for large CoValues on storage
|
||||
|
||||
## 0.13.31
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [d63716a]
|
||||
- Updated dependencies [d5edad7]
|
||||
- cojson@0.13.31
|
||||
|
||||
## 0.13.30
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [07dd2c5]
|
||||
- cojson@0.13.30
|
||||
|
||||
## 0.13.29
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- e2d6ba3: Create specialized Sync and Async storage managers
|
||||
- Updated dependencies [eef1a5d]
|
||||
- Updated dependencies [191ae38]
|
||||
- Updated dependencies [daee7b9]
|
||||
- cojson@0.13.29
|
||||
|
||||
## 0.13.28
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [e7ccb2c]
|
||||
- cojson@0.13.28
|
||||
|
||||
## 0.13.27
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [6357052]
|
||||
- cojson@0.13.27
|
||||
|
||||
## 0.13.25
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [a846e07]
|
||||
- cojson@0.13.25
|
||||
|
||||
## 0.13.23
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [6b781cf]
|
||||
- cojson@0.13.23
|
||||
|
||||
## 0.13.21
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [e14e61f]
|
||||
- cojson@0.13.21
|
||||
|
||||
## 0.13.20
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- adfc9a6: Make waitForSync work on storage peers by handling optimistic/known states
|
||||
- Updated dependencies [adfc9a6]
|
||||
- Updated dependencies [1389207]
|
||||
- Updated dependencies [d6e143e]
|
||||
- Updated dependencies [3e6229d]
|
||||
- cojson@0.13.20
|
||||
|
||||
## 0.13.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 8b2df0e: Optimized the dependency push from storage to send a given dependency only once
|
||||
- Updated dependencies [9089252]
|
||||
- Updated dependencies [b470f63]
|
||||
- Updated dependencies [66373ba]
|
||||
- Updated dependencies [f24cad1]
|
||||
- cojson@0.13.18
|
||||
|
||||
## 0.13.17
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [9fb98e2]
|
||||
- Updated dependencies [0b89fad]
|
||||
- cojson@0.13.17
|
||||
|
||||
## 0.13.16
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [c6fb8dc]
|
||||
- cojson@0.13.16
|
||||
|
||||
## 0.13.15
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [c712ef2]
|
||||
- cojson@0.13.15
|
||||
|
||||
## 0.13.14
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [5c2c7d4]
|
||||
- cojson@0.13.14
|
||||
|
||||
## 0.13.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [ec9cb40]
|
||||
- cojson@0.13.13
|
||||
|
||||
## 0.13.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [65719f2]
|
||||
- cojson@0.13.12
|
||||
|
||||
## 0.13.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [17273a6]
|
||||
- Updated dependencies [3396ed4]
|
||||
- Updated dependencies [267ea4c]
|
||||
- cojson@0.13.11
|
||||
|
||||
## 0.13.10
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [f837cfe]
|
||||
- cojson@0.13.10
|
||||
|
||||
## 0.13.7
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [bc3d7bb]
|
||||
- Updated dependencies [4e9aae1]
|
||||
- Updated dependencies [21c935c]
|
||||
- Updated dependencies [aa1c80e]
|
||||
- Updated dependencies [13074be]
|
||||
- cojson@0.13.7
|
||||
|
||||
## 0.13.5
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [e090b39]
|
||||
- cojson@0.13.5
|
||||
|
||||
## 0.13.2
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [c551839]
|
||||
- cojson@0.13.2
|
||||
|
||||
## 0.13.0
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [a013538]
|
||||
- Updated dependencies [bce3bcc]
|
||||
- cojson@0.13.0
|
||||
|
||||
## 0.12.2
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [c2f4827]
|
||||
- cojson@0.12.2
|
||||
|
||||
## 0.12.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [5a00fe0]
|
||||
- cojson@0.12.1
|
||||
|
||||
## 0.12.0
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [01523dc]
|
||||
- Updated dependencies [01523dc]
|
||||
- cojson@0.12.0
|
||||
|
||||
## 0.11.8
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [6c86c4f]
|
||||
- Updated dependencies [9d0c9dc]
|
||||
- cojson@0.11.8
|
||||
|
||||
## 0.11.7
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [2b94bc8]
|
||||
- Updated dependencies [2957362]
|
||||
- cojson@0.11.7
|
||||
|
||||
## 0.11.6
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [8ed144e]
|
||||
- cojson@0.11.6
|
||||
|
||||
## 0.11.5
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [60f5b3f]
|
||||
- cojson@0.11.5
|
||||
|
||||
## 0.11.4
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [7f036c1]
|
||||
- cojson@0.11.4
|
||||
|
||||
## 0.11.3
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 68b0242: Improve the error logging to have more information on errors leveraging the pino err serializer
|
||||
- Updated dependencies [68b0242]
|
||||
- cojson@0.11.3
|
||||
|
||||
## 0.11.0
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- a4713df: Moving to the d.ts files for the exported type definitions
|
||||
- Updated dependencies [b9d194a]
|
||||
- Updated dependencies [a4713df]
|
||||
- Updated dependencies [e22de9f]
|
||||
- Updated dependencies [34cbdc3]
|
||||
- Updated dependencies [0f67e0a]
|
||||
- cojson@0.11.0
|
||||
|
||||
## 0.10.15
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [f86e278]
|
||||
- cojson@0.10.15
|
||||
|
||||
## 0.10.8
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [153dc99]
|
||||
- cojson@0.10.8
|
||||
|
||||
## 0.10.7
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 1e625f3: Improve rollback on error when failing to add new content
|
||||
- Updated dependencies [0f83320]
|
||||
- Updated dependencies [012022d]
|
||||
- cojson@0.10.7
|
||||
|
||||
## 0.10.6
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [5c76e37]
|
||||
- cojson@0.10.6
|
||||
|
||||
## 0.10.4
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [1af6072]
|
||||
- cojson@0.10.4
|
||||
|
||||
## 0.10.2
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [cae3a9e]
|
||||
- cojson@0.10.2
|
||||
|
||||
## 0.10.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [5a63cba]
|
||||
- cojson@0.10.1
|
||||
|
||||
## 0.10.0
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [b426342]
|
||||
- Updated dependencies [498954f]
|
||||
- Updated dependencies [8217981]
|
||||
- Updated dependencies [ac3d9fa]
|
||||
- Updated dependencies [610543c]
|
||||
- cojson@0.10.0
|
||||
|
||||
## 0.9.23
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [70c9a5d]
|
||||
- cojson@0.9.23
|
||||
|
||||
## 0.9.19
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [6ad0a9f]
|
||||
- cojson@0.9.19
|
||||
|
||||
## 0.9.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [8898b10]
|
||||
- cojson@0.9.18
|
||||
|
||||
## 0.9.13
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 8d29e50: Restore the logger wrapper and adapt the API to pino
|
||||
- Updated dependencies [8d29e50]
|
||||
- cojson@0.9.13
|
||||
|
||||
## 0.9.12
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 15d4b2a: Revert the custom logger
|
||||
- Updated dependencies [15d4b2a]
|
||||
- cojson@0.9.12
|
||||
|
||||
## 0.9.11
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 5863bad: Wrap all the console logs with a logger class to make possible to customize the logger
|
||||
- Updated dependencies [efbf3d8]
|
||||
- Updated dependencies [5863bad]
|
||||
- cojson@0.9.11
|
||||
|
||||
## 0.9.10
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [4aa377d]
|
||||
- cojson@0.9.10
|
||||
|
||||
## 0.9.9
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [8eb9247]
|
||||
- cojson@0.9.9
|
||||
|
||||
## 0.9.0
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [8eda792]
|
||||
- Updated dependencies [1ef3226]
|
||||
- cojson@0.9.0
|
||||
|
||||
## 0.8.50
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [43378ef]
|
||||
- cojson@0.8.50
|
||||
|
||||
## 0.8.49
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [25dfd90]
|
||||
- cojson@0.8.49
|
||||
|
||||
## 0.8.48
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [10ea733]
|
||||
- cojson@0.8.48
|
||||
|
||||
## 0.8.45
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [6f0bd7f]
|
||||
- Updated dependencies [fca6a0b]
|
||||
- Updated dependencies [88d7d9a]
|
||||
- cojson@0.8.45
|
||||
|
||||
## 0.8.44
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [5d20c81]
|
||||
- cojson@0.8.44
|
||||
|
||||
## 0.8.41
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [3252502]
|
||||
- Updated dependencies [6370348]
|
||||
- Updated dependencies [ac216b9]
|
||||
- cojson@0.8.41
|
||||
|
||||
## 0.8.40
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- e905c84: Stop the use of incremental streaming of large CoValue content from local storage peers that triggers sync protocol bug leading to redundant syncing from server peers.
|
||||
|
||||
## 0.8.39
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [249eecb]
|
||||
- Updated dependencies [3121551]
|
||||
- cojson@0.8.39
|
||||
|
||||
## 0.8.38
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [b00ee91]
|
||||
- Updated dependencies [f488c09]
|
||||
- cojson@0.8.38
|
||||
|
||||
## 0.8.37
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [3d9f12e]
|
||||
- cojson@0.8.37
|
||||
|
||||
## 0.8.36
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 1afbd2c: Refactor the SQLite and IndexedDB storage packages to extract common synchronization functionality into newly created cojson-storage package.
|
||||
- Updated dependencies [441fe27]
|
||||
- cojson@0.8.36
|
||||
@@ -1,19 +0,0 @@
|
||||
Copyright 2025, Garden Computing, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -1,3 +0,0 @@
|
||||
# CoJSON Storage IndexedDB
|
||||
|
||||
This implements persistence sync service for CoJSON / Jazz (see [jazz.tools](https://jazz.tools)).
|
||||
@@ -1,24 +0,0 @@
|
||||
{
|
||||
"name": "cojson-storage",
|
||||
"version": "0.15.6",
|
||||
"main": "dist/index.js",
|
||||
"type": "module",
|
||||
"types": "dist/index.d.ts",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"cojson": "workspace:*"
|
||||
},
|
||||
"devDependencies": {
|
||||
"libsql": "^0.5.10",
|
||||
"typescript": "catalog:",
|
||||
"vitest": "catalog:"
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "tsc --watch --sourceMap --outDir dist",
|
||||
"test": "vitest --run --root ../../ --project cojson-storage",
|
||||
"test:watch": "vitest --watch --root ../../ --project cojson-storage",
|
||||
"format-and-lint": "biome check .",
|
||||
"format-and-lint:fix": "biome check . --write",
|
||||
"build": "rm -rf ./dist && tsc --sourceMap --outDir dist"
|
||||
}
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
export * from "./types.js";
|
||||
export { StorageManagerSync } from "./managerSync.js";
|
||||
export { StorageManagerAsync } from "./managerAsync.js";
|
||||
export * from "./sqlite/index.js";
|
||||
export * from "./sqliteAsync/index.js";
|
||||
@@ -1,2 +0,0 @@
|
||||
export { SQLiteNodeBase } from "./node.js";
|
||||
export type { SQLiteDatabaseDriver } from "./types.js";
|
||||
@@ -1,104 +0,0 @@
|
||||
import {
|
||||
type IncomingSyncStream,
|
||||
type OutgoingSyncQueue,
|
||||
type Peer,
|
||||
cojsonInternals,
|
||||
logger,
|
||||
} from "cojson";
|
||||
import { StorageManagerSync } from "../managerSync.js";
|
||||
import { SQLiteClient } from "./client.js";
|
||||
import { getSQLiteMigrationQueries } from "./sqliteMigrations.js";
|
||||
import type { SQLiteDatabaseDriver } from "./types.js";
|
||||
|
||||
export class SQLiteNodeBase {
|
||||
private readonly syncManager: StorageManagerSync;
|
||||
private readonly dbClient: SQLiteClient;
|
||||
|
||||
constructor(
|
||||
db: SQLiteDatabaseDriver,
|
||||
fromLocalNode: IncomingSyncStream,
|
||||
toLocalNode: OutgoingSyncQueue,
|
||||
maxBlockingTime: number,
|
||||
) {
|
||||
this.dbClient = new SQLiteClient(db);
|
||||
this.syncManager = new StorageManagerSync(this.dbClient, toLocalNode);
|
||||
|
||||
const processMessages = async () => {
|
||||
let lastTimer = performance.now();
|
||||
let runningTimer = false;
|
||||
|
||||
for await (const msg of fromLocalNode) {
|
||||
try {
|
||||
if (msg === "Disconnected" || msg === "PingTimeout") {
|
||||
throw new Error("Unexpected Disconnected message");
|
||||
}
|
||||
|
||||
if (!runningTimer) {
|
||||
runningTimer = true;
|
||||
lastTimer = performance.now();
|
||||
setTimeout(() => {
|
||||
runningTimer = false;
|
||||
}, 10);
|
||||
}
|
||||
|
||||
this.syncManager.handleSyncMessage(msg);
|
||||
|
||||
// Since the DB APIs are synchronous there may be the case
|
||||
// where a bulk of messages are processed without interruptions
|
||||
// which may block other peers from sending messages.
|
||||
|
||||
// To avoid this we schedule a timer to downgrade the priority of the storage peer work
|
||||
if (performance.now() - lastTimer > maxBlockingTime) {
|
||||
lastTimer = performance.now();
|
||||
await new Promise((resolve) => setTimeout(resolve, 0));
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error("Error reading from localNode, handling msg", {
|
||||
msg,
|
||||
err: e,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
db.closeDb();
|
||||
};
|
||||
|
||||
processMessages().catch((e) =>
|
||||
logger.error("Error in processMessages in sqlite", { err: e }),
|
||||
);
|
||||
}
|
||||
|
||||
static create({
|
||||
db,
|
||||
localNodeName = "local",
|
||||
maxBlockingTime = 500,
|
||||
}: {
|
||||
db: SQLiteDatabaseDriver;
|
||||
localNodeName?: string;
|
||||
maxBlockingTime?: number;
|
||||
}): Peer {
|
||||
const [localNodeAsPeer, storageAsPeer] = cojsonInternals.connectedPeers(
|
||||
localNodeName,
|
||||
"storage",
|
||||
{ peer1role: "client", peer2role: "storage", crashOnClose: true },
|
||||
);
|
||||
|
||||
const rows = db.query<{ user_version: string }>("PRAGMA user_version", []);
|
||||
const userVersion = Number(rows[0]?.user_version) ?? 0;
|
||||
|
||||
const migrations = getSQLiteMigrationQueries(userVersion);
|
||||
|
||||
for (const migration of migrations) {
|
||||
db.run(migration, []);
|
||||
}
|
||||
|
||||
new SQLiteNodeBase(
|
||||
db,
|
||||
localNodeAsPeer.incoming,
|
||||
localNodeAsPeer.outgoing,
|
||||
maxBlockingTime,
|
||||
);
|
||||
|
||||
return { ...storageAsPeer, priority: 100 };
|
||||
}
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
export { SQLiteNodeBaseAsync } from "./node.js";
|
||||
export type { SQLiteDatabaseDriverAsync } from "./types.js";
|
||||
@@ -1,115 +0,0 @@
|
||||
import {
|
||||
type IncomingSyncStream,
|
||||
type OutgoingSyncQueue,
|
||||
type Peer,
|
||||
cojsonInternals,
|
||||
logger,
|
||||
} from "cojson";
|
||||
import { StorageManagerAsync } from "../managerAsync.js";
|
||||
import { getSQLiteMigrationQueries } from "../sqlite/sqliteMigrations.js";
|
||||
import { SQLiteClientAsync } from "./client.js";
|
||||
import type { SQLiteDatabaseDriverAsync } from "./types.js";
|
||||
|
||||
function createParallelOpsRunner() {
|
||||
const ops = new Set<Promise<unknown>>();
|
||||
|
||||
return {
|
||||
add: (op: Promise<unknown>) => {
|
||||
ops.add(op);
|
||||
op.finally(() => {
|
||||
ops.delete(op);
|
||||
});
|
||||
},
|
||||
wait() {
|
||||
return Promise.race(ops);
|
||||
},
|
||||
get size() {
|
||||
return ops.size;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export class SQLiteNodeBaseAsync {
|
||||
private readonly syncManager: StorageManagerAsync;
|
||||
private readonly dbClient: SQLiteClientAsync;
|
||||
|
||||
constructor(
|
||||
db: SQLiteDatabaseDriverAsync,
|
||||
fromLocalNode: IncomingSyncStream,
|
||||
toLocalNode: OutgoingSyncQueue,
|
||||
) {
|
||||
this.dbClient = new SQLiteClientAsync(db);
|
||||
this.syncManager = new StorageManagerAsync(this.dbClient, toLocalNode);
|
||||
|
||||
const processMessages = async () => {
|
||||
const batch = createParallelOpsRunner();
|
||||
|
||||
for await (const msg of fromLocalNode) {
|
||||
try {
|
||||
if (msg === "Disconnected" || msg === "PingTimeout") {
|
||||
throw new Error("Unexpected Disconnected message");
|
||||
}
|
||||
|
||||
if (msg.action === "content") {
|
||||
await this.syncManager.handleSyncMessage(msg);
|
||||
} else {
|
||||
batch.add(this.syncManager.handleSyncMessage(msg));
|
||||
}
|
||||
|
||||
if (batch.size > 10) {
|
||||
await batch.wait();
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error("Error reading from localNode, handling msg", {
|
||||
msg,
|
||||
err: e,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
db.closeDb().catch((e) =>
|
||||
logger.error("Error closing sqlite", { err: e }),
|
||||
);
|
||||
};
|
||||
|
||||
processMessages().catch((e) =>
|
||||
logger.error("Error in processMessages in sqlite", { err: e }),
|
||||
);
|
||||
}
|
||||
|
||||
static async create({
|
||||
db,
|
||||
localNodeName = "local",
|
||||
}: {
|
||||
db: SQLiteDatabaseDriverAsync;
|
||||
localNodeName?: string;
|
||||
}): Promise<Peer> {
|
||||
const [localNodeAsPeer, storageAsPeer] = cojsonInternals.connectedPeers(
|
||||
localNodeName,
|
||||
"storage",
|
||||
{ peer1role: "client", peer2role: "storage", crashOnClose: true },
|
||||
);
|
||||
|
||||
await db.initialize();
|
||||
|
||||
const rows = await db.query<{ user_version: string }>(
|
||||
"PRAGMA user_version",
|
||||
[],
|
||||
);
|
||||
const userVersion = Number(rows[0]?.user_version) ?? 0;
|
||||
|
||||
const migrations = getSQLiteMigrationQueries(userVersion);
|
||||
|
||||
for (const migration of migrations) {
|
||||
await db.run(migration, []);
|
||||
}
|
||||
|
||||
new SQLiteNodeBaseAsync(
|
||||
db,
|
||||
localNodeAsPeer.incoming,
|
||||
localNodeAsPeer.outgoing,
|
||||
);
|
||||
|
||||
return { ...storageAsPeer, priority: 100 };
|
||||
}
|
||||
}
|
||||
@@ -1,95 +0,0 @@
|
||||
export const fixtures = {
|
||||
co_zKwG8NyfZ8GXqcjDHY4NS3SbU2m: {
|
||||
getContent: ({ after = 0 }: { after?: number }) => ({
|
||||
action: "content",
|
||||
id: "co_zKwG8NyfZ8GXqcjDHY4NS3SbU2m",
|
||||
header: {
|
||||
type: "comap",
|
||||
ruleset: {
|
||||
type: "group",
|
||||
initialAdmin:
|
||||
"sealer_zRKetKBH6tdGP8poA2rV9JDejXqTyAmpusCT4jRcXa4m/signer_z6bcctDRiWxtgmuqLRR6rVhM54DA3xJ2pWCEs6DVf4PSy",
|
||||
},
|
||||
meta: {
|
||||
type: "account",
|
||||
},
|
||||
createdAt: null,
|
||||
uniqueness: null,
|
||||
},
|
||||
new: {
|
||||
"sealer_zRKetKBH6tdGP8poA2rV9JDejXqTyAmpusCT4jRcXa4m/signer_z6bcctDRiWxtgmuqLRR6rVhM54DA3xJ2pWCEs6DVf4PSy_session_zbcBS6rHy8kA":
|
||||
{
|
||||
after,
|
||||
lastSignature:
|
||||
"signature_z2kcFHUPe1qGFYDY4ayvvFR2unFc4jeYph93nSCSjZYS14vnGN4uAw7pKZx1PEhwnspJcDizMRbLaFC8v13i6S79A",
|
||||
newTransactions: [
|
||||
{
|
||||
privacy: "trusting",
|
||||
madeAt: 1732368535089,
|
||||
changes:
|
||||
'[{"key":"sealer_zRKetKBH6tdGP8poA2rV9JDejXqTyAmpusCT4jRcXa4m/signer_z6bcctDRiWxtgmuqLRR6rVhM54DA3xJ2pWCEs6DVf4PSy","op":"set","value":"admin"}]',
|
||||
},
|
||||
{
|
||||
privacy: "trusting",
|
||||
madeAt: 1732368535096,
|
||||
changes:
|
||||
'[{"key":"key_z2YMuLXEfXG44Z2jGk_for_sealer_zRKetKBH6tdGP8poA2rV9JDejXqTyAmpusCT4jRcXa4m/signer_z6bcctDRiWxtgmuqLRR6rVhM54DA3xJ2pWCEs6DVf4PSy","op":"set","value":"sealed_UAIpJTby8EovZW6WPtAqdaczA2_r6PEWRBuEtLN93-Dh9xDJFaGUNTXK1Cck61tjvA3GoGn9EyQdNN2fU6tnmWP2M09a83dG41Q=="}]',
|
||||
},
|
||||
{
|
||||
privacy: "trusting",
|
||||
madeAt: 1732368535099,
|
||||
changes:
|
||||
'[{"key":"readKey","op":"set","value":"key_z2YMuLXEfXG44Z2jGk"}]',
|
||||
},
|
||||
],
|
||||
},
|
||||
"sealer_zRKetKBH6tdGP8poA2rV9JDejXqTyAmpusCT4jRcXa4m/signer_z6bcctDRiWxtgmuqLRR6rVhM54DA3xJ2pWCEs6DVf4PSy_session_zXgW54i2cCNA":
|
||||
{
|
||||
after,
|
||||
lastSignature:
|
||||
"signature_z5FsinkJCpqZfozVBkEMSchCQarsAjvMYpWN4d227PZtqCiM7KRBNukND3B25Q73idBLdY2MsghbmYFz5JHXk3d4D",
|
||||
newTransactions: [
|
||||
{
|
||||
privacy: "trusting",
|
||||
madeAt: 1732368535113,
|
||||
changes:
|
||||
'[{"key":"profile","op":"set","value":"co_zMKhQJs5rAeGjta3JX2qEdBS6hS"}]',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
priority: 0,
|
||||
}),
|
||||
known: {
|
||||
action: "known",
|
||||
id: "co_zKwG8NyfZ8GXqcjDHY4NS3SbU2m",
|
||||
header: true,
|
||||
sessions: {
|
||||
"sealer_zRKetKBH6tdGP8poA2rV9JDejXqTyAmpusCT4jRcXa4m/signer_z6bcctDRiWxtgmuqLRR6rVhM54DA3xJ2pWCEs6DVf4PSy_session_zbcBS6rHy8kA": 3,
|
||||
"sealer_zRKetKBH6tdGP8poA2rV9JDejXqTyAmpusCT4jRcXa4m/signer_z6bcctDRiWxtgmuqLRR6rVhM54DA3xJ2pWCEs6DVf4PSy_session_zXgW54i2cCNA": 1,
|
||||
},
|
||||
},
|
||||
sessionRecords: [
|
||||
{
|
||||
bytesSinceLastSignature: 479,
|
||||
coValue: 2,
|
||||
lastIdx: 3,
|
||||
lastSignature:
|
||||
"signature_z2kcFHUPe1qGFYDY4ayvvFR2unFc4jeYph93nSCSjZYS14vnGN4uAw7pKZx1PEhwnspJcDizMRbLaFC8v13i6S79A",
|
||||
rowID: 2,
|
||||
sessionID:
|
||||
"sealer_zRKetKBH6tdGP8poA2rV9JDejXqTyAmpusCT4jRcXa4m/signer_z6bcctDRiWxtgmuqLRR6rVhM54DA3xJ2pWCEs6DVf4PSy_session_zbcBS6rHy8kA",
|
||||
},
|
||||
{
|
||||
bytesSinceLastSignature: 71,
|
||||
coValue: 2,
|
||||
lastIdx: 1,
|
||||
lastSignature:
|
||||
"signature_z5FsinkJCpqZfozVBkEMSchCQarsAjvMYpWN4d227PZtqCiM7KRBNukND3B25Q73idBLdY2MsghbmYFz5JHXk3d4D",
|
||||
rowID: 3,
|
||||
sessionID:
|
||||
"sealer_zRKetKBH6tdGP8poA2rV9JDejXqTyAmpusCT4jRcXa4m/signer_z6bcctDRiWxtgmuqLRR6rVhM54DA3xJ2pWCEs6DVf4PSy_session_zXgW54i2cCNA",
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
@@ -1,184 +0,0 @@
|
||||
import type { CojsonInternalTypes, SessionID, Stringified } from "cojson";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { getDependedOnCoValues } from "../syncUtils.js";
|
||||
|
||||
function getMockedSessionID(accountId?: `co_z${string}`) {
|
||||
return `${accountId ?? getMockedCoValueId()}_session_z${Math.random().toString(36).substring(2, 15)}`;
|
||||
}
|
||||
|
||||
function getMockedCoValueId() {
|
||||
return `co_z${Math.random().toString(36).substring(2, 15)}` as const;
|
||||
}
|
||||
|
||||
function generateNewContentMessage(
|
||||
privacy: "trusting" | "private",
|
||||
changes: any[],
|
||||
accountId: `co_z${string}`,
|
||||
) {
|
||||
return {
|
||||
action: "content",
|
||||
id: getMockedCoValueId(),
|
||||
new: {
|
||||
[getMockedSessionID(accountId)]: {
|
||||
after: 0,
|
||||
lastSignature: "signature_z123",
|
||||
newTransactions: [
|
||||
{
|
||||
privacy,
|
||||
madeAt: 0,
|
||||
changes: JSON.stringify(changes) as any,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
priority: 0,
|
||||
} as CojsonInternalTypes.NewContentMessage;
|
||||
}
|
||||
|
||||
describe("getDependedOnCoValues", () => {
|
||||
it("should return dependencies for group ruleset", () => {
|
||||
const coValueRow = {
|
||||
id: "co_test",
|
||||
header: {
|
||||
ruleset: {
|
||||
type: "group",
|
||||
},
|
||||
},
|
||||
} as any;
|
||||
|
||||
const accountId = getMockedCoValueId();
|
||||
const result = getDependedOnCoValues(
|
||||
coValueRow.header,
|
||||
generateNewContentMessage(
|
||||
"trusting",
|
||||
[
|
||||
{ op: "set", key: "co_zabc123", value: "test" },
|
||||
{ op: "set", key: "parent_co_zdef456", value: "test" },
|
||||
{ op: "set", key: "normal_key", value: "test" },
|
||||
],
|
||||
accountId,
|
||||
),
|
||||
);
|
||||
|
||||
expect(result).toEqual(new Set([accountId, "co_zabc123", "co_zdef456"]));
|
||||
});
|
||||
|
||||
it("should not throw on malformed JSON", () => {
|
||||
const coValueRow = {
|
||||
id: "co_test",
|
||||
header: {
|
||||
ruleset: {
|
||||
type: "group",
|
||||
},
|
||||
},
|
||||
} as any;
|
||||
|
||||
const accountId = getMockedCoValueId();
|
||||
const message = generateNewContentMessage(
|
||||
"trusting",
|
||||
[{ op: "set", key: "co_zabc123", value: "test" }],
|
||||
accountId,
|
||||
);
|
||||
|
||||
message.new["invalid_session" as SessionID] = {
|
||||
after: 0,
|
||||
lastSignature: "signature_z123",
|
||||
newTransactions: [
|
||||
{
|
||||
privacy: "trusting",
|
||||
madeAt: 0,
|
||||
changes: "}{-:)" as Stringified<CojsonInternalTypes.JsonObject[]>,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const result = getDependedOnCoValues(coValueRow.header, message);
|
||||
|
||||
expect(result).toEqual(new Set([accountId, "co_zabc123"]));
|
||||
});
|
||||
|
||||
it("should return dependencies for ownedByGroup ruleset", () => {
|
||||
const groupId = getMockedCoValueId();
|
||||
const coValueRow = {
|
||||
id: "co_owner",
|
||||
header: {
|
||||
ruleset: {
|
||||
type: "ownedByGroup",
|
||||
group: groupId,
|
||||
},
|
||||
},
|
||||
} as any;
|
||||
|
||||
const accountId = getMockedCoValueId();
|
||||
const message = generateNewContentMessage(
|
||||
"trusting",
|
||||
[
|
||||
{ op: "set", key: "co_zabc123", value: "test" },
|
||||
{ op: "set", key: "parent_co_zdef456", value: "test" },
|
||||
{ op: "set", key: "normal_key", value: "test" },
|
||||
],
|
||||
accountId,
|
||||
);
|
||||
|
||||
message.new["invalid_session" as SessionID] = {
|
||||
after: 0,
|
||||
lastSignature: "signature_z123",
|
||||
newTransactions: [],
|
||||
};
|
||||
|
||||
const result = getDependedOnCoValues(coValueRow.header, message);
|
||||
|
||||
expect(result).toEqual(new Set([groupId, accountId]));
|
||||
});
|
||||
|
||||
it("should return empty array for other ruleset types", () => {
|
||||
const coValueRow = {
|
||||
id: "co_test",
|
||||
header: {
|
||||
ruleset: {
|
||||
type: "other",
|
||||
},
|
||||
},
|
||||
} as any;
|
||||
|
||||
const accountId = getMockedCoValueId();
|
||||
const result = getDependedOnCoValues(
|
||||
coValueRow.header,
|
||||
generateNewContentMessage(
|
||||
"trusting",
|
||||
[
|
||||
{ op: "set", key: "co_zabc123", value: "test" },
|
||||
{ op: "set", key: "parent_co_zdef456", value: "test" },
|
||||
{ op: "set", key: "normal_key", value: "test" },
|
||||
],
|
||||
accountId,
|
||||
),
|
||||
);
|
||||
|
||||
expect(result).toEqual(new Set([accountId]));
|
||||
});
|
||||
|
||||
it("should ignore non-trusting transactions in group ruleset", () => {
|
||||
const coValueRow = {
|
||||
id: "co_test",
|
||||
header: {
|
||||
ruleset: {
|
||||
type: "group",
|
||||
},
|
||||
},
|
||||
} as any;
|
||||
|
||||
const accountId = getMockedCoValueId();
|
||||
|
||||
const result = getDependedOnCoValues(
|
||||
coValueRow.header,
|
||||
generateNewContentMessage(
|
||||
"private",
|
||||
[{ op: "set", key: "co_zabc123", value: "test" }],
|
||||
accountId,
|
||||
),
|
||||
);
|
||||
|
||||
expect(result).toEqual(new Set([accountId]));
|
||||
});
|
||||
});
|
||||
@@ -1,72 +0,0 @@
|
||||
import type { CoValueCore, CojsonInternalTypes, SyncMessage } from "cojson";
|
||||
|
||||
function simplifySessions(msg: CojsonInternalTypes.CoValueKnownState) {
|
||||
const count = Object.values(msg.sessions).reduce(
|
||||
(acc: number, session: number) => acc + session,
|
||||
0,
|
||||
);
|
||||
|
||||
if (msg.header) {
|
||||
return `header/${count}`;
|
||||
}
|
||||
|
||||
return "empty";
|
||||
}
|
||||
|
||||
function simplifyNewContent(
|
||||
content: CojsonInternalTypes.NewContentMessage["new"],
|
||||
) {
|
||||
if (!content) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return Object.values(content)
|
||||
.map((c) => `After: ${c.after} New: ${c.newTransactions.length}`)
|
||||
.join(" | ");
|
||||
}
|
||||
|
||||
export function toSimplifiedMessages(
|
||||
coValues: Record<string, CoValueCore>,
|
||||
messages: {
|
||||
from: "client" | "server" | "storage";
|
||||
msg: SyncMessage;
|
||||
}[],
|
||||
) {
|
||||
function getCoValue(id: string) {
|
||||
for (const [name, coValue] of Object.entries(coValues)) {
|
||||
if (coValue.id === id) {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
|
||||
return `unknown/${id}`;
|
||||
}
|
||||
|
||||
function toDebugString(
|
||||
from: "client" | "server" | "storage",
|
||||
msg: SyncMessage,
|
||||
) {
|
||||
switch (msg.action) {
|
||||
case "known":
|
||||
return `${from} -> KNOWN ${msg.isCorrection ? "CORRECTION " : ""}${getCoValue(msg.id)} sessions: ${simplifySessions(msg)}`;
|
||||
case "load":
|
||||
return `${from} -> LOAD ${getCoValue(msg.id)} sessions: ${simplifySessions(msg)}`;
|
||||
case "done":
|
||||
return `${from} -> DONE ${getCoValue(msg.id)}`;
|
||||
case "content":
|
||||
return `${from} -> CONTENT ${getCoValue(msg.id)} header: ${Boolean(msg.header)} new: ${simplifyNewContent(msg.new)}`;
|
||||
}
|
||||
}
|
||||
|
||||
return messages.map((m) => toDebugString(m.from, m.msg));
|
||||
}
|
||||
|
||||
export function debugMessages(
|
||||
coValues: Record<string, CoValueCore>,
|
||||
messages: {
|
||||
from: "client" | "server" | "storage";
|
||||
msg: SyncMessage;
|
||||
}[],
|
||||
) {
|
||||
console.log(toSimplifiedMessages(coValues, messages));
|
||||
}
|
||||
@@ -1,798 +0,0 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { unlinkSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import { LocalNode, cojsonInternals } from "cojson";
|
||||
import { WasmCrypto } from "cojson/crypto/WasmCrypto";
|
||||
import { expect, onTestFinished, test, vi } from "vitest";
|
||||
import { toSimplifiedMessages } from "./messagesTestUtils.js";
|
||||
import { trackMessages, waitFor } from "./testUtils.js";
|
||||
|
||||
const Crypto = await WasmCrypto.create();
|
||||
|
||||
import Database, { type Database as DatabaseT } from "libsql";
|
||||
import { StorageManagerAsync } from "../managerAsync.js";
|
||||
import { SQLiteNodeBaseAsync } from "../sqliteAsync/node.js";
|
||||
import type { SQLiteDatabaseDriverAsync } from "../sqliteAsync/types.js";
|
||||
|
||||
class LibSQLSqliteDriver implements SQLiteDatabaseDriverAsync {
|
||||
private readonly db: DatabaseT;
|
||||
|
||||
constructor(filename: string) {
|
||||
this.db = new Database(filename, {});
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
await this.db.pragma("journal_mode = WAL");
|
||||
}
|
||||
|
||||
async run(sql: string, params: unknown[]) {
|
||||
this.db.prepare(sql).run(params);
|
||||
}
|
||||
|
||||
async query<T>(sql: string, params: unknown[]): Promise<T[]> {
|
||||
return this.db.prepare(sql).all(params) as T[];
|
||||
}
|
||||
|
||||
async get<T>(sql: string, params: unknown[]): Promise<T | undefined> {
|
||||
return this.db.prepare(sql).get(params) as T | undefined;
|
||||
}
|
||||
|
||||
async transaction(callback: () => unknown) {
|
||||
await this.run("BEGIN TRANSACTION", []);
|
||||
|
||||
try {
|
||||
await callback();
|
||||
await this.run("COMMIT", []);
|
||||
} catch (error) {
|
||||
await this.run("ROLLBACK", []);
|
||||
}
|
||||
}
|
||||
|
||||
async closeDb() {
|
||||
this.db.close();
|
||||
}
|
||||
}
|
||||
|
||||
async function createSQLiteStorage(defaultDbPath?: string) {
|
||||
const dbPath = defaultDbPath ?? join(tmpdir(), `test-${randomUUID()}.db`);
|
||||
|
||||
if (!defaultDbPath) {
|
||||
onTestFinished(() => {
|
||||
unlinkSync(dbPath);
|
||||
});
|
||||
}
|
||||
|
||||
const db = new LibSQLSqliteDriver(dbPath);
|
||||
|
||||
return {
|
||||
peer: await SQLiteNodeBaseAsync.create({
|
||||
db,
|
||||
}),
|
||||
dbPath,
|
||||
db,
|
||||
};
|
||||
}
|
||||
|
||||
test("Should be able to initialize and load from empty DB", async () => {
|
||||
const agentSecret = Crypto.newRandomAgentSecret();
|
||||
|
||||
const node = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
node.syncManager.addPeer((await createSQLiteStorage()).peer);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
expect(node.syncManager.peers.storage).toBeDefined();
|
||||
});
|
||||
|
||||
test("should sync and load data from storage", async () => {
|
||||
const agentSecret = Crypto.newRandomAgentSecret();
|
||||
|
||||
const node1 = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node1Sync = trackMessages(node1);
|
||||
|
||||
const { peer, dbPath } = await createSQLiteStorage();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
|
||||
const group = node1.createGroup();
|
||||
|
||||
const map = group.createMap();
|
||||
|
||||
map.set("hello", "world");
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
expect(
|
||||
toSimplifiedMessages(
|
||||
{
|
||||
Map: map.core,
|
||||
Group: group.core,
|
||||
},
|
||||
node1Sync.messages,
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"storage -> KNOWN Group sessions: header/3",
|
||||
"client -> CONTENT Map header: true new: After: 0 New: 1",
|
||||
"storage -> KNOWN Map sessions: header/1",
|
||||
]
|
||||
`);
|
||||
|
||||
node1Sync.restore();
|
||||
|
||||
const node2 = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node2Sync = trackMessages(node2);
|
||||
|
||||
const { peer: peer2 } = await createSQLiteStorage(dbPath);
|
||||
|
||||
node2.syncManager.addPeer(peer2);
|
||||
|
||||
const map2 = await node2.load(map.id);
|
||||
if (map2 === "unavailable") {
|
||||
throw new Error("Map is unavailable");
|
||||
}
|
||||
|
||||
expect(map2.get("hello")).toBe("world");
|
||||
|
||||
expect(
|
||||
toSimplifiedMessages(
|
||||
{
|
||||
Map: map.core,
|
||||
Group: group.core,
|
||||
},
|
||||
node2Sync.messages,
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"client -> KNOWN Group sessions: header/3",
|
||||
"storage -> CONTENT Map header: true new: After: 0 New: 1",
|
||||
"client -> KNOWN Map sessions: header/1",
|
||||
]
|
||||
`);
|
||||
|
||||
node2Sync.restore();
|
||||
});
|
||||
|
||||
test("should send an empty content message if there is no content", async () => {
|
||||
const agentSecret = Crypto.newRandomAgentSecret();
|
||||
|
||||
const node1 = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node1Sync = trackMessages(node1);
|
||||
|
||||
const { peer, dbPath } = await createSQLiteStorage();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
|
||||
const group = node1.createGroup();
|
||||
|
||||
const map = group.createMap();
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
expect(
|
||||
toSimplifiedMessages(
|
||||
{
|
||||
Map: map.core,
|
||||
Group: group.core,
|
||||
},
|
||||
node1Sync.messages,
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"storage -> KNOWN Group sessions: header/3",
|
||||
"client -> CONTENT Map header: true new: ",
|
||||
"storage -> KNOWN Map sessions: header/0",
|
||||
]
|
||||
`);
|
||||
|
||||
node1Sync.restore();
|
||||
|
||||
const node2 = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node2Sync = trackMessages(node2);
|
||||
|
||||
const { peer: peer2 } = await createSQLiteStorage(dbPath);
|
||||
|
||||
node2.syncManager.addPeer(peer2);
|
||||
|
||||
const map2 = await node2.load(map.id);
|
||||
if (map2 === "unavailable") {
|
||||
throw new Error("Map is unavailable");
|
||||
}
|
||||
|
||||
expect(
|
||||
toSimplifiedMessages(
|
||||
{
|
||||
Map: map.core,
|
||||
Group: group.core,
|
||||
},
|
||||
node2Sync.messages,
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"client -> KNOWN Group sessions: header/3",
|
||||
"storage -> CONTENT Map header: true new: ",
|
||||
"client -> KNOWN Map sessions: header/0",
|
||||
]
|
||||
`);
|
||||
|
||||
node2Sync.restore();
|
||||
});
|
||||
|
||||
test("should load dependencies correctly (group inheritance)", async () => {
|
||||
const agentSecret = Crypto.newRandomAgentSecret();
|
||||
|
||||
const node1 = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node1Sync = trackMessages(node1);
|
||||
|
||||
const { peer, dbPath } = await createSQLiteStorage();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
|
||||
const group = node1.createGroup();
|
||||
const parentGroup = node1.createGroup();
|
||||
|
||||
group.extend(parentGroup);
|
||||
|
||||
const map = group.createMap();
|
||||
|
||||
map.set("hello", "world");
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
expect(
|
||||
toSimplifiedMessages(
|
||||
{
|
||||
Map: map.core,
|
||||
Group: group.core,
|
||||
ParentGroup: parentGroup.core,
|
||||
},
|
||||
node1Sync.messages,
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> CONTENT ParentGroup header: true new: After: 0 New: 4",
|
||||
"storage -> KNOWN ParentGroup sessions: header/4",
|
||||
"client -> CONTENT Group header: true new: After: 0 New: 5",
|
||||
"storage -> KNOWN Group sessions: header/5",
|
||||
"client -> CONTENT Map header: true new: After: 0 New: 1",
|
||||
"storage -> KNOWN Map sessions: header/1",
|
||||
]
|
||||
`);
|
||||
|
||||
node1Sync.restore();
|
||||
|
||||
const node2 = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node2Sync = trackMessages(node2);
|
||||
|
||||
const { peer: peer2 } = await createSQLiteStorage(dbPath);
|
||||
|
||||
node2.syncManager.addPeer(peer2);
|
||||
|
||||
await node2.load(map.id);
|
||||
|
||||
expect(node2.expectCoValueLoaded(map.id)).toBeTruthy();
|
||||
expect(node2.expectCoValueLoaded(group.id)).toBeTruthy();
|
||||
expect(node2.expectCoValueLoaded(parentGroup.id)).toBeTruthy();
|
||||
|
||||
expect(
|
||||
toSimplifiedMessages(
|
||||
{
|
||||
Map: map.core,
|
||||
Group: group.core,
|
||||
ParentGroup: parentGroup.core,
|
||||
},
|
||||
node2Sync.messages,
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> CONTENT ParentGroup header: true new: After: 0 New: 4",
|
||||
"client -> KNOWN ParentGroup sessions: header/4",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 5",
|
||||
"client -> KNOWN Group sessions: header/5",
|
||||
"storage -> CONTENT Map header: true new: After: 0 New: 1",
|
||||
"client -> KNOWN Map sessions: header/1",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
test("should not send the same dependency value twice", async () => {
|
||||
const agentSecret = Crypto.newRandomAgentSecret();
|
||||
|
||||
const node1 = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node1Sync = trackMessages(node1);
|
||||
|
||||
const { peer, dbPath } = await createSQLiteStorage();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
|
||||
const group = node1.createGroup();
|
||||
const parentGroup = node1.createGroup();
|
||||
|
||||
group.extend(parentGroup);
|
||||
|
||||
const mapFromParent = parentGroup.createMap();
|
||||
const map = group.createMap();
|
||||
|
||||
map.set("hello", "world");
|
||||
mapFromParent.set("hello", "world");
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
node1Sync.restore();
|
||||
|
||||
const node2 = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node2Sync = trackMessages(node2);
|
||||
|
||||
const { peer: peer2 } = await createSQLiteStorage(dbPath);
|
||||
|
||||
node2.syncManager.addPeer(peer2);
|
||||
|
||||
await node2.load(map.id);
|
||||
await node2.load(mapFromParent.id);
|
||||
|
||||
expect(node2.expectCoValueLoaded(map.id)).toBeTruthy();
|
||||
expect(node2.expectCoValueLoaded(mapFromParent.id)).toBeTruthy();
|
||||
expect(node2.expectCoValueLoaded(group.id)).toBeTruthy();
|
||||
expect(node2.expectCoValueLoaded(parentGroup.id)).toBeTruthy();
|
||||
|
||||
expect(
|
||||
toSimplifiedMessages(
|
||||
{
|
||||
Map: map.core,
|
||||
Group: group.core,
|
||||
ParentGroup: parentGroup.core,
|
||||
MapFromParent: mapFromParent.core,
|
||||
},
|
||||
node2Sync.messages,
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> CONTENT ParentGroup header: true new: After: 0 New: 4",
|
||||
"client -> KNOWN ParentGroup sessions: header/4",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 5",
|
||||
"client -> KNOWN Group sessions: header/5",
|
||||
"storage -> CONTENT Map header: true new: After: 0 New: 1",
|
||||
"client -> KNOWN Map sessions: header/1",
|
||||
"client -> LOAD MapFromParent sessions: empty",
|
||||
"storage -> CONTENT MapFromParent header: true new: After: 0 New: 1",
|
||||
"client -> KNOWN MapFromParent sessions: header/1",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
test("should recover from data loss", async () => {
|
||||
const agentSecret = Crypto.newRandomAgentSecret();
|
||||
|
||||
const node1 = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node1Sync = trackMessages(node1);
|
||||
|
||||
const { peer, dbPath } = await createSQLiteStorage();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
|
||||
const group = node1.createGroup();
|
||||
|
||||
const map = group.createMap();
|
||||
|
||||
map.set("0", 0);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
const mock = vi
|
||||
.spyOn(StorageManagerAsync.prototype, "handleSyncMessage")
|
||||
.mockImplementation(() => Promise.resolve());
|
||||
|
||||
map.set("1", 1);
|
||||
map.set("2", 2);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
mock.mockReset();
|
||||
|
||||
map.set("3", 3);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
expect(
|
||||
toSimplifiedMessages(
|
||||
{
|
||||
Map: map.core,
|
||||
Group: group.core,
|
||||
},
|
||||
node1Sync.messages,
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"storage -> KNOWN Group sessions: header/3",
|
||||
"client -> CONTENT Map header: true new: After: 0 New: 1",
|
||||
"storage -> KNOWN Map sessions: header/1",
|
||||
"client -> CONTENT Map header: false new: After: 3 New: 1",
|
||||
"storage -> KNOWN CORRECTION Map sessions: header/1",
|
||||
"client -> CONTENT Map header: false new: After: 1 New: 3",
|
||||
"storage -> KNOWN Map sessions: header/4",
|
||||
]
|
||||
`);
|
||||
|
||||
node1Sync.restore();
|
||||
|
||||
const node2 = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node2Sync = trackMessages(node2);
|
||||
|
||||
const { peer: peer2 } = await createSQLiteStorage(dbPath);
|
||||
|
||||
node2.syncManager.addPeer(peer2);
|
||||
|
||||
const map2 = await node2.load(map.id);
|
||||
|
||||
if (map2 === "unavailable") {
|
||||
throw new Error("Map is unavailable");
|
||||
}
|
||||
|
||||
expect(map2.toJSON()).toEqual({
|
||||
"0": 0,
|
||||
"1": 1,
|
||||
"2": 2,
|
||||
"3": 3,
|
||||
});
|
||||
|
||||
expect(
|
||||
toSimplifiedMessages(
|
||||
{
|
||||
Map: map.core,
|
||||
Group: group.core,
|
||||
},
|
||||
node2Sync.messages,
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"client -> KNOWN Group sessions: header/3",
|
||||
"storage -> CONTENT Map header: true new: After: 0 New: 4",
|
||||
"client -> KNOWN Map sessions: header/4",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
test("should recover missing dependencies from storage", async () => {
|
||||
const agentSecret = Crypto.newRandomAgentSecret();
|
||||
|
||||
const account = LocalNode.internalCreateAccount({
|
||||
crypto: Crypto,
|
||||
});
|
||||
const node1 = account.core.node;
|
||||
|
||||
const serverNode = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const [serverPeer, clientPeer] = cojsonInternals.connectedPeers(
|
||||
node1.agentSecret,
|
||||
serverNode.agentSecret,
|
||||
{
|
||||
peer1role: "server",
|
||||
peer2role: "client",
|
||||
},
|
||||
);
|
||||
|
||||
node1.syncManager.addPeer(serverPeer);
|
||||
serverNode.syncManager.addPeer(clientPeer);
|
||||
|
||||
const handleSyncMessage = StorageManagerAsync.prototype.handleSyncMessage;
|
||||
|
||||
const mock = vi
|
||||
.spyOn(StorageManagerAsync.prototype, "handleSyncMessage")
|
||||
.mockImplementation(function (this: StorageManagerAsync, msg) {
|
||||
if (
|
||||
msg.action === "content" &&
|
||||
[group.core.id, account.core.id].includes(msg.id)
|
||||
) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
return handleSyncMessage.call(this, msg);
|
||||
});
|
||||
|
||||
const { peer, dbPath } = await createSQLiteStorage();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
|
||||
const group = node1.createGroup();
|
||||
group.addMember("everyone", "writer");
|
||||
|
||||
const map = group.createMap();
|
||||
|
||||
map.set("0", 0);
|
||||
|
||||
mock.mockReset();
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
const node2 = new LocalNode(
|
||||
Crypto.newRandomAgentSecret(),
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const [serverPeer2, clientPeer2] = cojsonInternals.connectedPeers(
|
||||
node1.agentSecret,
|
||||
serverNode.agentSecret,
|
||||
{
|
||||
peer1role: "server",
|
||||
peer2role: "client",
|
||||
},
|
||||
);
|
||||
|
||||
node2.syncManager.addPeer(serverPeer2);
|
||||
serverNode.syncManager.addPeer(clientPeer2);
|
||||
|
||||
const { peer: peer2 } = await createSQLiteStorage(dbPath);
|
||||
|
||||
node2.syncManager.addPeer(peer2);
|
||||
|
||||
const map2 = await node2.load(map.id);
|
||||
|
||||
if (map2 === "unavailable") {
|
||||
throw new Error("Map is unavailable");
|
||||
}
|
||||
|
||||
expect(map2.toJSON()).toEqual({
|
||||
"0": 0,
|
||||
});
|
||||
});
|
||||
|
||||
test("should sync multiple sessions in a single content message", async () => {
|
||||
const agentSecret = Crypto.newRandomAgentSecret();
|
||||
|
||||
const node1 = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const { peer, dbPath } = await createSQLiteStorage();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
|
||||
const group = node1.createGroup();
|
||||
|
||||
const map = group.createMap();
|
||||
|
||||
map.set("hello", "world");
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
node1.gracefulShutdown();
|
||||
|
||||
const node2 = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
node2.syncManager.addPeer((await createSQLiteStorage(dbPath)).peer);
|
||||
|
||||
const map2 = await node2.load(map.id);
|
||||
if (map2 === "unavailable") {
|
||||
throw new Error("Map is unavailable");
|
||||
}
|
||||
|
||||
expect(map2.get("hello")).toBe("world");
|
||||
|
||||
map2.set("hello", "world2");
|
||||
|
||||
await map2.core.waitForSync();
|
||||
|
||||
node2.gracefulShutdown();
|
||||
|
||||
const node3 = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node3Sync = trackMessages(node3);
|
||||
|
||||
node3.syncManager.addPeer((await createSQLiteStorage(dbPath)).peer);
|
||||
|
||||
const map3 = await node3.load(map.id);
|
||||
if (map3 === "unavailable") {
|
||||
throw new Error("Map is unavailable");
|
||||
}
|
||||
|
||||
expect(map3.get("hello")).toBe("world2");
|
||||
|
||||
expect(
|
||||
toSimplifiedMessages(
|
||||
{
|
||||
Map: map.core,
|
||||
Group: group.core,
|
||||
},
|
||||
node3Sync.messages,
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"client -> KNOWN Group sessions: header/3",
|
||||
"storage -> CONTENT Map header: true new: After: 0 New: 1 | After: 0 New: 1",
|
||||
"client -> KNOWN Map sessions: header/2",
|
||||
]
|
||||
`);
|
||||
|
||||
node3Sync.restore();
|
||||
});
|
||||
|
||||
test("large coValue upload streaming", async () => {
|
||||
const agentSecret = Crypto.newRandomAgentSecret();
|
||||
|
||||
const node1 = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const { peer, dbPath } = await createSQLiteStorage();
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
|
||||
const group = node1.createGroup();
|
||||
const largeMap = group.createMap();
|
||||
|
||||
const dataSize = 1 * 1024 * 200;
|
||||
const chunkSize = 1024; // 1KB chunks
|
||||
const chunks = dataSize / chunkSize;
|
||||
|
||||
const value = "a".repeat(chunkSize);
|
||||
|
||||
for (let i = 0; i < chunks; i++) {
|
||||
const key = `key${i}`;
|
||||
largeMap.set(key, value, "trusting");
|
||||
}
|
||||
|
||||
await largeMap.core.waitForSync();
|
||||
|
||||
node1.gracefulShutdown();
|
||||
|
||||
const node2 = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const node2Sync = trackMessages(node2);
|
||||
|
||||
const { peer: peer2 } = await createSQLiteStorage(dbPath);
|
||||
|
||||
node2.syncManager.addPeer(peer2);
|
||||
|
||||
const largeMapOnNode2 = await node2.load(largeMap.id);
|
||||
|
||||
if (largeMapOnNode2 === "unavailable") {
|
||||
throw new Error("Map is unavailable");
|
||||
}
|
||||
|
||||
await waitFor(() => {
|
||||
expect(largeMapOnNode2.core.knownState()).toEqual(
|
||||
largeMap.core.knownState(),
|
||||
);
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
expect(
|
||||
toSimplifiedMessages(
|
||||
{
|
||||
Map: largeMap.core,
|
||||
Group: group.core,
|
||||
},
|
||||
node2Sync.messages,
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
"client -> LOAD Map sessions: empty",
|
||||
"storage -> KNOWN Map sessions: header/200",
|
||||
"storage -> CONTENT Group header: true new: After: 0 New: 3",
|
||||
"client -> KNOWN Group sessions: header/3",
|
||||
"storage -> CONTENT Map header: true new: After: 0 New: 97",
|
||||
"client -> KNOWN Map sessions: header/97",
|
||||
"storage -> CONTENT Map header: true new: After: 97 New: 97",
|
||||
"client -> KNOWN Map sessions: header/194",
|
||||
"storage -> CONTENT Map header: true new: After: 194 New: 6",
|
||||
"client -> KNOWN Map sessions: header/200",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
test("should close the db when the node is closed", async () => {
|
||||
const agentSecret = Crypto.newRandomAgentSecret();
|
||||
|
||||
const node1 = new LocalNode(
|
||||
agentSecret,
|
||||
Crypto.newRandomSessionID(Crypto.getAgentID(agentSecret)),
|
||||
Crypto,
|
||||
);
|
||||
|
||||
const { peer, db } = await createSQLiteStorage();
|
||||
|
||||
const spy = vi.spyOn(db, "closeDb");
|
||||
|
||||
node1.syncManager.addPeer(peer);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
|
||||
expect(spy).not.toHaveBeenCalled();
|
||||
|
||||
node1.gracefulShutdown();
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
|
||||
expect(spy).toHaveBeenCalled();
|
||||
});
|
||||
@@ -1,245 +0,0 @@
|
||||
import {
|
||||
type Mocked,
|
||||
afterEach,
|
||||
beforeEach,
|
||||
describe,
|
||||
expect,
|
||||
test,
|
||||
vi,
|
||||
} from "vitest";
|
||||
|
||||
import type {
|
||||
CojsonInternalTypes,
|
||||
OutgoingSyncQueue,
|
||||
SessionID,
|
||||
SyncMessage,
|
||||
} from "cojson";
|
||||
import { StorageManagerAsync as SyncManager } from "../managerAsync.js";
|
||||
import { getDependedOnCoValues } from "../syncUtils.js";
|
||||
import type { DBClientInterfaceAsync as DBClientInterface } from "../types.js";
|
||||
import { fixtures } from "./fixtureMessages.js";
|
||||
|
||||
type RawCoID = CojsonInternalTypes.RawCoID;
|
||||
type NewContentMessage = CojsonInternalTypes.NewContentMessage;
|
||||
vi.mock("../syncUtils");
|
||||
|
||||
const coValueIdToLoad = "co_zKwG8NyfZ8GXqcjDHY4NS3SbU2m";
|
||||
const createEmptyLoadMsg = (id: string) =>
|
||||
({
|
||||
action: "load",
|
||||
id,
|
||||
header: false,
|
||||
sessions: {},
|
||||
}) as SyncMessage;
|
||||
|
||||
const sessionsData = fixtures[coValueIdToLoad].sessionRecords;
|
||||
const coValueHeader = fixtures[coValueIdToLoad].getContent({ after: 0 }).header;
|
||||
const incomingContentMessage = fixtures[coValueIdToLoad].getContent({
|
||||
after: 0,
|
||||
}) as SyncMessage;
|
||||
|
||||
describe("DB sync manager", () => {
|
||||
let syncManager: SyncManager;
|
||||
const queue: OutgoingSyncQueue = {} as unknown as OutgoingSyncQueue;
|
||||
|
||||
const DBClient = vi.fn();
|
||||
DBClient.prototype.getCoValue = vi.fn();
|
||||
DBClient.prototype.getCoValueSessions = vi.fn();
|
||||
DBClient.prototype.getSingleCoValueSession = vi.fn();
|
||||
DBClient.prototype.getNewTransactionInSession = vi.fn();
|
||||
DBClient.prototype.addSessionUpdate = vi.fn();
|
||||
DBClient.prototype.addTransaction = vi.fn();
|
||||
DBClient.prototype.transaction = vi.fn((callback) => callback());
|
||||
|
||||
beforeEach(async () => {
|
||||
const idbClient = new DBClient() as unknown as Mocked<DBClientInterface>;
|
||||
syncManager = new SyncManager(idbClient, queue);
|
||||
syncManager.sendStateMessage = vi.fn();
|
||||
|
||||
// No dependencies found
|
||||
vi.mocked(getDependedOnCoValues).mockReturnValue(new Set());
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
test("Incoming known messages are not processed", async () => {
|
||||
await syncManager.handleSyncMessage({ action: "known" } as SyncMessage);
|
||||
expect(syncManager.sendStateMessage).not.toBeCalled();
|
||||
});
|
||||
|
||||
describe("Handle load incoming message", () => {
|
||||
test("sends empty known message for unknown coValue", async () => {
|
||||
const loadMsg = createEmptyLoadMsg(coValueIdToLoad);
|
||||
|
||||
DBClient.prototype.getCoValue.mockResolvedValueOnce(undefined);
|
||||
|
||||
await syncManager.handleSyncMessage(loadMsg);
|
||||
|
||||
expect(syncManager.sendStateMessage).toBeCalledWith({
|
||||
action: "known",
|
||||
header: false,
|
||||
id: coValueIdToLoad,
|
||||
sessions: {},
|
||||
});
|
||||
});
|
||||
|
||||
test("Sends known and content message for known coValue with no sessions", async () => {
|
||||
const loadMsg = createEmptyLoadMsg(coValueIdToLoad);
|
||||
|
||||
DBClient.prototype.getCoValue.mockResolvedValueOnce({
|
||||
id: coValueIdToLoad,
|
||||
header: coValueHeader,
|
||||
rowID: 3,
|
||||
});
|
||||
DBClient.prototype.getCoValueSessions.mockResolvedValueOnce([]);
|
||||
|
||||
await syncManager.handleSyncMessage(loadMsg);
|
||||
|
||||
expect(syncManager.sendStateMessage).toBeCalledTimes(1);
|
||||
expect(syncManager.sendStateMessage).toBeCalledWith({
|
||||
action: "content",
|
||||
header: expect.objectContaining({
|
||||
type: expect.any(String),
|
||||
ruleset: expect.any(Object),
|
||||
}),
|
||||
id: coValueIdToLoad,
|
||||
new: {},
|
||||
priority: 0,
|
||||
});
|
||||
});
|
||||
|
||||
test("Sends messages for unique coValue dependencies only, leaving out circular dependencies", async () => {
|
||||
const loadMsg = createEmptyLoadMsg(coValueIdToLoad);
|
||||
const dependency1 = "co_zMKhQJs5rAeGjta3JX2qEdBS6hS";
|
||||
const dependency2 = "co_zP51HdyAVCuRY9ptq5iu8DhMyAb";
|
||||
const dependency3 = "co_zGyBniuJmKkcirCKYrccWpjQEFY";
|
||||
const dependenciesTreeWithLoop: Record<RawCoID, RawCoID[]> = {
|
||||
[coValueIdToLoad]: [dependency1, dependency2],
|
||||
[dependency1]: [],
|
||||
[dependency2]: [coValueIdToLoad, dependency3],
|
||||
[dependency3]: [dependency1],
|
||||
};
|
||||
|
||||
DBClient.prototype.getCoValue.mockImplementation(
|
||||
(coValueId: RawCoID) => ({
|
||||
id: coValueId,
|
||||
header: coValueHeader,
|
||||
rowID: 3,
|
||||
}),
|
||||
);
|
||||
|
||||
DBClient.prototype.getCoValueSessions.mockResolvedValue([]);
|
||||
|
||||
// Fetch dependencies of the current dependency for the future recursion iterations
|
||||
vi.mocked(getDependedOnCoValues).mockImplementation(
|
||||
(_, msg) => new Set(dependenciesTreeWithLoop[msg.id] || []),
|
||||
);
|
||||
|
||||
await syncManager.handleSyncMessage(loadMsg);
|
||||
|
||||
// We send out pairs (known + content) messages only FOUR times - as many as the coValues number
|
||||
// and less than amount of interconnected dependencies to loop through in dependenciesTreeWithLoop
|
||||
expect(syncManager.sendStateMessage).toBeCalledTimes(4);
|
||||
|
||||
const contentExpected = {
|
||||
action: "content",
|
||||
header: expect.any(Object),
|
||||
new: {},
|
||||
priority: 0,
|
||||
};
|
||||
|
||||
expect(syncManager.sendStateMessage).toHaveBeenNthCalledWith(1, {
|
||||
...contentExpected,
|
||||
id: dependency1,
|
||||
});
|
||||
expect(syncManager.sendStateMessage).toHaveBeenNthCalledWith(2, {
|
||||
...contentExpected,
|
||||
id: dependency3,
|
||||
});
|
||||
expect(syncManager.sendStateMessage).toHaveBeenNthCalledWith(3, {
|
||||
...contentExpected,
|
||||
id: dependency2,
|
||||
});
|
||||
expect(syncManager.sendStateMessage).toHaveBeenNthCalledWith(4, {
|
||||
...contentExpected,
|
||||
id: coValueIdToLoad,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Handle content incoming message", () => {
|
||||
test("Sends correction message for unknown coValue", async () => {
|
||||
DBClient.prototype.getCoValue.mockResolvedValueOnce(undefined);
|
||||
|
||||
await syncManager.handleSyncMessage({
|
||||
...incomingContentMessage,
|
||||
header: undefined,
|
||||
} as SyncMessage);
|
||||
|
||||
expect(syncManager.sendStateMessage).toBeCalledWith({
|
||||
action: "known",
|
||||
header: false,
|
||||
id: coValueIdToLoad,
|
||||
isCorrection: true,
|
||||
sessions: {},
|
||||
});
|
||||
});
|
||||
|
||||
test("Saves new transaction and sends an ack message as response", async () => {
|
||||
DBClient.prototype.getCoValue.mockResolvedValueOnce({
|
||||
id: coValueIdToLoad,
|
||||
header: coValueHeader,
|
||||
rowID: 3,
|
||||
});
|
||||
DBClient.prototype.getCoValueSessions.mockResolvedValueOnce([]);
|
||||
const msg = {
|
||||
...incomingContentMessage,
|
||||
header: undefined,
|
||||
} as NewContentMessage;
|
||||
|
||||
await syncManager.handleSyncMessage(msg);
|
||||
|
||||
const incomingTxCount = Object.keys(msg.new).reduce(
|
||||
(acc, sessionID) =>
|
||||
acc + msg.new[sessionID as SessionID]!.newTransactions.length,
|
||||
0,
|
||||
);
|
||||
expect(DBClient.prototype.addTransaction).toBeCalledTimes(
|
||||
incomingTxCount,
|
||||
);
|
||||
|
||||
expect(syncManager.sendStateMessage).toBeCalledWith({
|
||||
action: "known",
|
||||
header: true,
|
||||
id: coValueIdToLoad,
|
||||
sessions: expect.any(Object),
|
||||
});
|
||||
});
|
||||
|
||||
test("Sends correction message when peer sends a message far ahead of our state due to invalid assumption", async () => {
|
||||
DBClient.prototype.getCoValue.mockResolvedValueOnce({
|
||||
id: coValueIdToLoad,
|
||||
header: coValueHeader,
|
||||
rowID: 3,
|
||||
});
|
||||
DBClient.prototype.getCoValueSessions.mockResolvedValueOnce(sessionsData);
|
||||
|
||||
const farAheadContentMessage = fixtures[coValueIdToLoad].getContent({
|
||||
after: 10000,
|
||||
});
|
||||
await syncManager.handleSyncMessage(
|
||||
farAheadContentMessage as SyncMessage,
|
||||
);
|
||||
|
||||
expect(syncManager.sendStateMessage).toBeCalledWith({
|
||||
action: "known",
|
||||
header: true,
|
||||
id: coValueIdToLoad,
|
||||
isCorrection: true,
|
||||
sessions: expect.any(Object),
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,73 +0,0 @@
|
||||
import type { LocalNode, SyncMessage } from "cojson";
|
||||
import { onTestFinished } from "vitest";
|
||||
import { StorageManagerAsync } from "../managerAsync";
|
||||
|
||||
export function trackMessages(node: LocalNode) {
|
||||
const messages: {
|
||||
from: "client" | "server" | "storage";
|
||||
msg: SyncMessage;
|
||||
}[] = [];
|
||||
|
||||
const originalHandleSyncMessage =
|
||||
StorageManagerAsync.prototype.handleSyncMessage;
|
||||
const originalNodeSyncMessage = node.syncManager.handleSyncMessage;
|
||||
|
||||
StorageManagerAsync.prototype.handleSyncMessage = async function (msg) {
|
||||
messages.push({
|
||||
from: "client",
|
||||
msg,
|
||||
});
|
||||
return originalHandleSyncMessage.call(this, msg);
|
||||
};
|
||||
|
||||
node.syncManager.handleSyncMessage = async function (msg, peer) {
|
||||
messages.push({
|
||||
from: "storage",
|
||||
msg,
|
||||
});
|
||||
return originalNodeSyncMessage.call(this, msg, peer);
|
||||
};
|
||||
|
||||
const restore = () => {
|
||||
StorageManagerAsync.prototype.handleSyncMessage = originalHandleSyncMessage;
|
||||
node.syncManager.handleSyncMessage = originalNodeSyncMessage;
|
||||
};
|
||||
|
||||
onTestFinished(() => {
|
||||
restore();
|
||||
});
|
||||
|
||||
return {
|
||||
messages,
|
||||
restore,
|
||||
};
|
||||
}
|
||||
export function waitFor(
|
||||
callback: () => boolean | undefined | Promise<boolean | undefined>,
|
||||
) {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const checkPassed = async () => {
|
||||
try {
|
||||
return { ok: await callback(), error: null };
|
||||
} catch (error) {
|
||||
return { ok: false, error };
|
||||
}
|
||||
};
|
||||
|
||||
let retries = 0;
|
||||
|
||||
const interval = setInterval(async () => {
|
||||
const { ok, error } = await checkPassed();
|
||||
|
||||
if (ok !== false) {
|
||||
clearInterval(interval);
|
||||
resolve();
|
||||
}
|
||||
|
||||
if (++retries > 10) {
|
||||
clearInterval(interval);
|
||||
reject(error);
|
||||
}
|
||||
}, 100);
|
||||
});
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"lib": ["ESNext", "DOM"],
|
||||
"module": "esnext",
|
||||
"target": "ES2020",
|
||||
"moduleResolution": "bundler",
|
||||
"moduleDetection": "force",
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"esModuleInterop": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true
|
||||
},
|
||||
"include": ["./src/**/*"]
|
||||
}
|
||||
@@ -1,5 +1,31 @@
|
||||
# cojson-transport-nodejs-ws
|
||||
|
||||
## 0.15.10
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cojson@0.15.10
|
||||
|
||||
## 0.15.9
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [27b4837]
|
||||
- Updated dependencies [2776263]
|
||||
- cojson@0.15.9
|
||||
|
||||
## 0.15.8
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cojson@0.15.8
|
||||
|
||||
## 0.15.7
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- cojson@0.15.7
|
||||
|
||||
## 0.15.6
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "cojson-transport-ws",
|
||||
"type": "module",
|
||||
"version": "0.15.6",
|
||||
"version": "0.15.10",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -1,21 +1,106 @@
|
||||
import type { SyncMessage } from "cojson";
|
||||
import type { DisconnectedError, SyncMessage } from "cojson";
|
||||
import type { Peer } from "cojson";
|
||||
import {
|
||||
type CojsonInternalTypes,
|
||||
PriorityBasedMessageQueue,
|
||||
cojsonInternals,
|
||||
logger,
|
||||
} from "cojson";
|
||||
import { addMessageToBacklog } from "./serialization.js";
|
||||
import type { AnyWebSocket } from "./types.js";
|
||||
import {
|
||||
hasWebSocketTooMuchBufferedData,
|
||||
isWebSocketOpen,
|
||||
waitForWebSocketBufferedAmount,
|
||||
waitForWebSocketOpen,
|
||||
} from "./utils.js";
|
||||
|
||||
const { CO_VALUE_PRIORITY } = cojsonInternals;
|
||||
|
||||
export const MAX_OUTGOING_MESSAGES_CHUNK_BYTES = 25_000;
|
||||
|
||||
export class BatchedOutgoingMessages {
|
||||
export class BatchedOutgoingMessages
|
||||
implements CojsonInternalTypes.OutgoingPeerChannel
|
||||
{
|
||||
private backlog = "";
|
||||
private timeout: ReturnType<typeof setTimeout> | null = null;
|
||||
private queue: PriorityBasedMessageQueue;
|
||||
private processing = false;
|
||||
private closed = false;
|
||||
|
||||
constructor(private send: (messages: string) => void) {}
|
||||
constructor(
|
||||
private websocket: AnyWebSocket,
|
||||
private batching: boolean,
|
||||
peerRole: Peer["role"],
|
||||
) {
|
||||
this.queue = new PriorityBasedMessageQueue(
|
||||
CO_VALUE_PRIORITY.HIGH,
|
||||
"outgoing",
|
||||
{
|
||||
peerRole: peerRole,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
push(msg: SyncMessage) {
|
||||
const payload = addMessageToBacklog(this.backlog, msg);
|
||||
|
||||
if (this.timeout) {
|
||||
clearTimeout(this.timeout);
|
||||
push(msg: SyncMessage | DisconnectedError) {
|
||||
if (msg === "Disconnected") {
|
||||
this.close();
|
||||
return;
|
||||
}
|
||||
|
||||
this.queue.push(msg);
|
||||
|
||||
if (this.processing) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.processQueue().catch((e) => {
|
||||
logger.error("Error while processing sendMessage queue", { err: e });
|
||||
});
|
||||
}
|
||||
|
||||
private async processQueue() {
|
||||
const { websocket } = this;
|
||||
|
||||
this.processing = true;
|
||||
|
||||
// Delay the initiation of the queue processing to accumulate messages
|
||||
// before sending them, in order to do prioritization and batching
|
||||
await new Promise<void>((resolve) => setTimeout(resolve, 5));
|
||||
|
||||
let msg = this.queue.pull();
|
||||
|
||||
while (msg) {
|
||||
if (this.closed) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isWebSocketOpen(websocket)) {
|
||||
await waitForWebSocketOpen(websocket);
|
||||
}
|
||||
|
||||
if (hasWebSocketTooMuchBufferedData(websocket)) {
|
||||
await waitForWebSocketBufferedAmount(websocket);
|
||||
}
|
||||
|
||||
if (isWebSocketOpen(websocket)) {
|
||||
this.processMessage(msg);
|
||||
|
||||
msg = this.queue.pull();
|
||||
}
|
||||
}
|
||||
|
||||
this.sendMessagesInBulk();
|
||||
this.processing = false;
|
||||
}
|
||||
|
||||
processMessage(msg: SyncMessage) {
|
||||
if (!this.batching) {
|
||||
this.websocket.send(JSON.stringify(msg));
|
||||
return;
|
||||
}
|
||||
|
||||
const payload = addMessageToBacklog(this.backlog, msg);
|
||||
|
||||
const maxChunkSizeReached =
|
||||
payload.length >= MAX_OUTGOING_MESSAGES_CHUNK_BYTES;
|
||||
const backlogExists = this.backlog.length > 0;
|
||||
@@ -23,26 +108,49 @@ export class BatchedOutgoingMessages {
|
||||
if (maxChunkSizeReached && backlogExists) {
|
||||
this.sendMessagesInBulk();
|
||||
this.backlog = addMessageToBacklog("", msg);
|
||||
this.timeout = setTimeout(() => {
|
||||
this.sendMessagesInBulk();
|
||||
}, 0);
|
||||
} else if (maxChunkSizeReached) {
|
||||
this.backlog = payload;
|
||||
this.sendMessagesInBulk();
|
||||
} else {
|
||||
this.backlog = payload;
|
||||
this.timeout = setTimeout(() => {
|
||||
this.sendMessagesInBulk();
|
||||
}, 0);
|
||||
}
|
||||
}
|
||||
|
||||
sendMessagesInBulk() {
|
||||
this.send(this.backlog);
|
||||
this.backlog = "";
|
||||
if (this.backlog.length > 0 && isWebSocketOpen(this.websocket)) {
|
||||
this.websocket.send(this.backlog);
|
||||
this.backlog = "";
|
||||
}
|
||||
}
|
||||
|
||||
setBatching(enabled: boolean) {
|
||||
this.batching = enabled;
|
||||
}
|
||||
|
||||
private closeListeners = new Set<() => void>();
|
||||
onClose(callback: () => void) {
|
||||
this.closeListeners.add(callback);
|
||||
}
|
||||
|
||||
close() {
|
||||
if (this.closed) {
|
||||
return;
|
||||
}
|
||||
|
||||
let msg = this.queue.pull();
|
||||
|
||||
while (msg) {
|
||||
this.processMessage(msg);
|
||||
msg = this.queue.pull();
|
||||
}
|
||||
|
||||
this.closed = true;
|
||||
this.sendMessagesInBulk();
|
||||
|
||||
for (const listener of this.closeListeners) {
|
||||
listener();
|
||||
}
|
||||
|
||||
this.closeListeners.clear();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +1,9 @@
|
||||
import {
|
||||
type DisconnectedError,
|
||||
type Peer,
|
||||
type PingTimeoutError,
|
||||
type SyncMessage,
|
||||
cojsonInternals,
|
||||
logger,
|
||||
} from "cojson";
|
||||
import { type Peer, type SyncMessage, cojsonInternals, logger } from "cojson";
|
||||
import { BatchedOutgoingMessages } from "./BatchedOutgoingMessages.js";
|
||||
import { deserializeMessages } from "./serialization.js";
|
||||
import type { AnyWebSocket } from "./types.js";
|
||||
|
||||
export const BUFFER_LIMIT = 100_000;
|
||||
export const BUFFER_LIMIT_POLLING_INTERVAL = 10;
|
||||
const { ConnectedPeerChannel } = cojsonInternals;
|
||||
|
||||
export type CreateWebSocketPeerOpts = {
|
||||
id: string;
|
||||
@@ -52,70 +44,6 @@ function createPingTimeoutListener(
|
||||
};
|
||||
}
|
||||
|
||||
function waitForWebSocketOpen(websocket: AnyWebSocket) {
|
||||
return new Promise<void>((resolve) => {
|
||||
if (websocket.readyState === 1) {
|
||||
resolve();
|
||||
} else {
|
||||
websocket.addEventListener("open", () => resolve(), { once: true });
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function createOutgoingMessagesManager(
|
||||
websocket: AnyWebSocket,
|
||||
batchingByDefault: boolean,
|
||||
) {
|
||||
let closed = false;
|
||||
const outgoingMessages = new BatchedOutgoingMessages((messages) => {
|
||||
if (websocket.readyState === 1) {
|
||||
websocket.send(messages);
|
||||
}
|
||||
});
|
||||
|
||||
let batchingEnabled = batchingByDefault;
|
||||
|
||||
async function sendMessage(msg: SyncMessage) {
|
||||
if (closed) {
|
||||
return Promise.reject(new Error("WebSocket closed"));
|
||||
}
|
||||
|
||||
if (websocket.readyState !== 1) {
|
||||
await waitForWebSocketOpen(websocket);
|
||||
}
|
||||
|
||||
while (
|
||||
websocket.bufferedAmount > BUFFER_LIMIT &&
|
||||
websocket.readyState === 1
|
||||
) {
|
||||
await new Promise<void>((resolve) =>
|
||||
setTimeout(resolve, BUFFER_LIMIT_POLLING_INTERVAL),
|
||||
);
|
||||
}
|
||||
|
||||
if (websocket.readyState !== 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!batchingEnabled) {
|
||||
websocket.send(JSON.stringify(msg));
|
||||
} else {
|
||||
outgoingMessages.push(msg);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
sendMessage,
|
||||
setBatchingEnabled(enabled: boolean) {
|
||||
batchingEnabled = enabled;
|
||||
},
|
||||
close() {
|
||||
closed = true;
|
||||
outgoingMessages.close();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createClosedEventEmitter(callback = () => {}) {
|
||||
let disconnected = false;
|
||||
|
||||
@@ -137,17 +65,11 @@ export function createWebSocketPeer({
|
||||
onSuccess,
|
||||
onClose,
|
||||
}: CreateWebSocketPeerOpts): Peer {
|
||||
const incoming = new cojsonInternals.Channel<
|
||||
SyncMessage | DisconnectedError | PingTimeoutError
|
||||
>();
|
||||
const incoming = new ConnectedPeerChannel();
|
||||
const emitClosedEvent = createClosedEventEmitter(onClose);
|
||||
|
||||
function handleClose() {
|
||||
incoming
|
||||
.push("Disconnected")
|
||||
.catch((e) =>
|
||||
logger.error("Error while pushing disconnect msg", { err: e }),
|
||||
);
|
||||
incoming.push("Disconnected");
|
||||
emitClosedEvent();
|
||||
}
|
||||
|
||||
@@ -166,18 +88,19 @@ export function createWebSocketPeer({
|
||||
expectPings,
|
||||
pingTimeout,
|
||||
() => {
|
||||
incoming
|
||||
.push("PingTimeout")
|
||||
.catch((e) =>
|
||||
logger.error("Error while pushing ping timeout", { err: e }),
|
||||
);
|
||||
incoming.push("Disconnected");
|
||||
logger.error("Ping timeout from peer", {
|
||||
peerId: id,
|
||||
peerRole: role,
|
||||
});
|
||||
emitClosedEvent();
|
||||
},
|
||||
);
|
||||
|
||||
const outgoingMessages = createOutgoingMessagesManager(
|
||||
const outgoing = new BatchedOutgoingMessages(
|
||||
websocket,
|
||||
batchingByDefault,
|
||||
role,
|
||||
);
|
||||
let isFirstMessage = true;
|
||||
|
||||
@@ -206,50 +129,42 @@ export function createWebSocketPeer({
|
||||
|
||||
if (messages.length > 1) {
|
||||
// If more than one message is received, the other peer supports batching
|
||||
outgoingMessages.setBatchingEnabled(true);
|
||||
outgoing.setBatching(true);
|
||||
}
|
||||
|
||||
for (const msg of messages) {
|
||||
if (msg && "action" in msg) {
|
||||
incoming
|
||||
.push(msg)
|
||||
.catch((e) =>
|
||||
logger.error("Error while pushing incoming msg", { err: e }),
|
||||
);
|
||||
incoming.push(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
websocket.addEventListener("message", handleIncomingMsg);
|
||||
|
||||
outgoing.onClose(() => {
|
||||
websocket.removeEventListener("message", handleIncomingMsg);
|
||||
websocket.removeEventListener("close", handleClose);
|
||||
pingTimeoutListener.clear();
|
||||
emitClosedEvent();
|
||||
|
||||
if (websocket.readyState === 0) {
|
||||
websocket.addEventListener(
|
||||
"open",
|
||||
function handleClose() {
|
||||
websocket.close();
|
||||
},
|
||||
{ once: true },
|
||||
);
|
||||
} else if (websocket.readyState === 1) {
|
||||
websocket.close();
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
id,
|
||||
incoming,
|
||||
outgoing: {
|
||||
push: outgoingMessages.sendMessage,
|
||||
close() {
|
||||
outgoingMessages.close();
|
||||
|
||||
websocket.removeEventListener("message", handleIncomingMsg);
|
||||
websocket.removeEventListener("close", handleClose);
|
||||
pingTimeoutListener.clear();
|
||||
emitClosedEvent();
|
||||
|
||||
if (websocket.readyState === 0) {
|
||||
websocket.addEventListener(
|
||||
"open",
|
||||
function handleClose() {
|
||||
websocket.close();
|
||||
},
|
||||
{ once: true },
|
||||
);
|
||||
} else if (websocket.readyState === 1) {
|
||||
websocket.close();
|
||||
}
|
||||
},
|
||||
},
|
||||
outgoing,
|
||||
role,
|
||||
crashOnClose: false,
|
||||
deletePeerStateOnClose,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,146 +0,0 @@
|
||||
import type { SyncMessage } from "cojson";
|
||||
import type { CojsonInternalTypes } from "cojson";
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
|
||||
import {
|
||||
BatchedOutgoingMessages,
|
||||
MAX_OUTGOING_MESSAGES_CHUNK_BYTES,
|
||||
} from "../BatchedOutgoingMessages.js";
|
||||
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
describe("BatchedOutgoingMessages", () => {
|
||||
function setup() {
|
||||
const sendMock = vi.fn();
|
||||
const batchedMessages = new BatchedOutgoingMessages(sendMock);
|
||||
return { sendMock, batchedMessages };
|
||||
}
|
||||
|
||||
test("should batch messages and send them after a timeout", () => {
|
||||
const { sendMock, batchedMessages } = setup();
|
||||
const message1: SyncMessage = {
|
||||
action: "known",
|
||||
id: "co_z1",
|
||||
header: false,
|
||||
sessions: {},
|
||||
};
|
||||
const message2: SyncMessage = {
|
||||
action: "known",
|
||||
id: "co_z2",
|
||||
header: false,
|
||||
sessions: {},
|
||||
};
|
||||
|
||||
batchedMessages.push(message1);
|
||||
batchedMessages.push(message2);
|
||||
|
||||
expect(sendMock).not.toHaveBeenCalled();
|
||||
|
||||
vi.runAllTimers();
|
||||
|
||||
expect(sendMock).toHaveBeenCalledTimes(1);
|
||||
expect(sendMock).toHaveBeenCalledWith(
|
||||
`${JSON.stringify(message1)}\n${JSON.stringify(message2)}`,
|
||||
);
|
||||
});
|
||||
|
||||
test("should send messages immediately when reaching MAX_OUTGOING_MESSAGES_CHUNK_BYTES", () => {
|
||||
const { sendMock, batchedMessages } = setup();
|
||||
const largeMessage: SyncMessage = {
|
||||
action: "known",
|
||||
id: "co_z_large",
|
||||
header: false,
|
||||
sessions: {
|
||||
// Add a large payload to exceed MAX_OUTGOING_MESSAGES_CHUNK_BYTES
|
||||
payload: "x".repeat(MAX_OUTGOING_MESSAGES_CHUNK_BYTES),
|
||||
} as CojsonInternalTypes.CoValueKnownState["sessions"],
|
||||
};
|
||||
|
||||
batchedMessages.push(largeMessage);
|
||||
|
||||
expect(sendMock).toHaveBeenCalledTimes(1);
|
||||
expect(sendMock).toHaveBeenCalledWith(JSON.stringify(largeMessage));
|
||||
});
|
||||
|
||||
test("should send accumulated messages before a large message", () => {
|
||||
const { sendMock, batchedMessages } = setup();
|
||||
const smallMessage: SyncMessage = {
|
||||
action: "known",
|
||||
id: "co_z_small",
|
||||
header: false,
|
||||
sessions: {},
|
||||
};
|
||||
const largeMessage: SyncMessage = {
|
||||
action: "known",
|
||||
id: "co_z_large",
|
||||
header: false,
|
||||
sessions: {
|
||||
// Add a large payload to exceed MAX_OUTGOING_MESSAGES_CHUNK_BYTES
|
||||
payload: "x".repeat(MAX_OUTGOING_MESSAGES_CHUNK_BYTES),
|
||||
} as CojsonInternalTypes.CoValueKnownState["sessions"],
|
||||
};
|
||||
|
||||
batchedMessages.push(smallMessage);
|
||||
batchedMessages.push(largeMessage);
|
||||
|
||||
vi.runAllTimers();
|
||||
|
||||
expect(sendMock).toHaveBeenCalledTimes(2);
|
||||
expect(sendMock).toHaveBeenNthCalledWith(1, JSON.stringify(smallMessage));
|
||||
expect(sendMock).toHaveBeenNthCalledWith(2, JSON.stringify(largeMessage));
|
||||
});
|
||||
|
||||
test("should send remaining messages on close", () => {
|
||||
const { sendMock, batchedMessages } = setup();
|
||||
const message: SyncMessage = {
|
||||
action: "known",
|
||||
id: "co_z_test",
|
||||
header: false,
|
||||
sessions: {},
|
||||
};
|
||||
|
||||
batchedMessages.push(message);
|
||||
expect(sendMock).not.toHaveBeenCalled();
|
||||
|
||||
batchedMessages.close();
|
||||
|
||||
expect(sendMock).toHaveBeenCalledTimes(1);
|
||||
expect(sendMock).toHaveBeenCalledWith(JSON.stringify(message));
|
||||
});
|
||||
|
||||
test("should clear timeout when pushing new messages", () => {
|
||||
const { sendMock, batchedMessages } = setup();
|
||||
const message1: SyncMessage = {
|
||||
action: "known",
|
||||
id: "co_z1",
|
||||
header: false,
|
||||
sessions: {},
|
||||
};
|
||||
const message2: SyncMessage = {
|
||||
action: "known",
|
||||
id: "co_z2",
|
||||
header: false,
|
||||
sessions: {},
|
||||
};
|
||||
|
||||
batchedMessages.push(message1);
|
||||
|
||||
const clearTimeoutSpy = vi.spyOn(global, "clearTimeout");
|
||||
|
||||
batchedMessages.push(message2);
|
||||
|
||||
expect(clearTimeoutSpy).toHaveBeenCalled();
|
||||
|
||||
vi.runAllTimers();
|
||||
|
||||
expect(sendMock).toHaveBeenCalledTimes(1);
|
||||
expect(sendMock).toHaveBeenCalledWith(
|
||||
`${JSON.stringify(message1)}\n${JSON.stringify(message2)}`,
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -1,14 +1,15 @@
|
||||
import type { SyncMessage } from "cojson";
|
||||
import type { Channel } from "queueueue";
|
||||
import type { CojsonInternalTypes, SyncMessage } from "cojson";
|
||||
import { cojsonInternals } from "cojson";
|
||||
import { type Mocked, describe, expect, test, vi } from "vitest";
|
||||
import { MAX_OUTGOING_MESSAGES_CHUNK_BYTES } from "../BatchedOutgoingMessages.js";
|
||||
import {
|
||||
BUFFER_LIMIT,
|
||||
BUFFER_LIMIT_POLLING_INTERVAL,
|
||||
type CreateWebSocketPeerOpts,
|
||||
createWebSocketPeer,
|
||||
} from "../createWebSocketPeer.js";
|
||||
import type { AnyWebSocket } from "../types.js";
|
||||
import { BUFFER_LIMIT, BUFFER_LIMIT_POLLING_INTERVAL } from "../utils.js";
|
||||
|
||||
const { CO_VALUE_PRIORITY } = cojsonInternals;
|
||||
|
||||
function setup(opts: Partial<CreateWebSocketPeerOpts> = {}) {
|
||||
const listeners = new Map<string, (event: MessageEvent) => void>();
|
||||
@@ -48,34 +49,28 @@ describe("createWebSocketPeer", () => {
|
||||
expect(peer).toHaveProperty("incoming");
|
||||
expect(peer).toHaveProperty("outgoing");
|
||||
expect(peer).toHaveProperty("role", "client");
|
||||
expect(peer).toHaveProperty("crashOnClose", false);
|
||||
});
|
||||
|
||||
test("should handle disconnection", async () => {
|
||||
expect.assertions(1);
|
||||
|
||||
const { listeners, peer } = setup();
|
||||
|
||||
const incoming = peer.incoming as Channel<
|
||||
SyncMessage | "Disconnected" | "PingTimeout"
|
||||
>;
|
||||
const pushSpy = vi.spyOn(incoming, "push");
|
||||
const onMessageSpy = vi.fn();
|
||||
peer.incoming.onMessage(onMessageSpy);
|
||||
|
||||
const closeHandler = listeners.get("close");
|
||||
|
||||
closeHandler?.(new MessageEvent("close"));
|
||||
|
||||
expect(pushSpy).toHaveBeenCalledWith("Disconnected");
|
||||
expect(onMessageSpy).toHaveBeenCalledWith("Disconnected");
|
||||
});
|
||||
|
||||
test("should handle ping timeout", async () => {
|
||||
vi.useFakeTimers();
|
||||
const { listeners, peer } = setup();
|
||||
|
||||
const incoming = peer.incoming as Channel<
|
||||
SyncMessage | "Disconnected" | "PingTimeout"
|
||||
>;
|
||||
const pushSpy = vi.spyOn(incoming, "push");
|
||||
const onMessageSpy = vi.fn();
|
||||
|
||||
peer.incoming.onMessage(onMessageSpy);
|
||||
|
||||
const messageHandler = listeners.get("message");
|
||||
|
||||
@@ -83,7 +78,7 @@ describe("createWebSocketPeer", () => {
|
||||
|
||||
await vi.advanceTimersByTimeAsync(10_000);
|
||||
|
||||
expect(pushSpy).toHaveBeenCalledWith("PingTimeout");
|
||||
expect(onMessageSpy).toHaveBeenCalledWith("Disconnected");
|
||||
|
||||
vi.useRealTimers();
|
||||
});
|
||||
@@ -97,15 +92,14 @@ describe("createWebSocketPeer", () => {
|
||||
header: false,
|
||||
sessions: {},
|
||||
};
|
||||
const promise = peer.outgoing.push(testMessage);
|
||||
|
||||
peer.outgoing.push(testMessage);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockWebSocket.send).toHaveBeenCalledWith(
|
||||
JSON.stringify(testMessage),
|
||||
);
|
||||
});
|
||||
|
||||
await expect(promise).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
test("should stop sending messages when the websocket is closed", async () => {
|
||||
@@ -153,23 +147,6 @@ describe("createWebSocketPeer", () => {
|
||||
expect(mockWebSocket.close).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test("should return a rejection if a message is sent after the peer is closed", async () => {
|
||||
const { peer } = setup();
|
||||
|
||||
peer.outgoing.close();
|
||||
|
||||
const message: SyncMessage = {
|
||||
action: "known",
|
||||
id: "co_ztest",
|
||||
header: false,
|
||||
sessions: {},
|
||||
};
|
||||
|
||||
await expect(peer.outgoing.push(message)).rejects.toThrow(
|
||||
"WebSocket closed",
|
||||
);
|
||||
});
|
||||
|
||||
test("should call onSuccess handler after receiving first message", () => {
|
||||
const onSuccess = vi.fn();
|
||||
const { listeners } = setup({ onSuccess });
|
||||
@@ -229,6 +206,42 @@ describe("createWebSocketPeer", () => {
|
||||
);
|
||||
});
|
||||
|
||||
test("should sort outgoing messages by priority", async () => {
|
||||
const { peer, mockWebSocket } = setup();
|
||||
|
||||
mockWebSocket.send.mockImplementation(() => {
|
||||
mockWebSocket.readyState = 0;
|
||||
});
|
||||
|
||||
const message1: SyncMessage = {
|
||||
action: "content",
|
||||
id: "co_zlow",
|
||||
new: {},
|
||||
priority: CO_VALUE_PRIORITY.LOW,
|
||||
};
|
||||
|
||||
const message2: SyncMessage = {
|
||||
action: "content",
|
||||
id: "co_zhigh",
|
||||
new: {},
|
||||
priority: CO_VALUE_PRIORITY.HIGH,
|
||||
};
|
||||
|
||||
void peer.outgoing.push(message1);
|
||||
void peer.outgoing.push(message2);
|
||||
void peer.outgoing.push(message2);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockWebSocket.send).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
expect(mockWebSocket.send).toHaveBeenCalledWith(
|
||||
[message2, message2, message1]
|
||||
.map((msg) => JSON.stringify(msg))
|
||||
.join("\n"),
|
||||
);
|
||||
});
|
||||
|
||||
test("should send all the pending messages when the websocket is closed", async () => {
|
||||
const { peer, mockWebSocket } = setup();
|
||||
|
||||
@@ -298,6 +311,43 @@ describe("createWebSocketPeer", () => {
|
||||
);
|
||||
});
|
||||
|
||||
test("should send accumulated messages before a large message", async () => {
|
||||
const { peer, mockWebSocket } = setup();
|
||||
|
||||
const smallMessage: SyncMessage = {
|
||||
action: "known",
|
||||
id: "co_z_small",
|
||||
header: false,
|
||||
sessions: {},
|
||||
};
|
||||
const largeMessage: SyncMessage = {
|
||||
action: "known",
|
||||
id: "co_z_large",
|
||||
header: false,
|
||||
sessions: {
|
||||
// Add a large payload to exceed MAX_OUTGOING_MESSAGES_CHUNK_BYTES
|
||||
payload: "x".repeat(MAX_OUTGOING_MESSAGES_CHUNK_BYTES),
|
||||
} as CojsonInternalTypes.CoValueKnownState["sessions"],
|
||||
};
|
||||
|
||||
void peer.outgoing.push(smallMessage);
|
||||
void peer.outgoing.push(largeMessage);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockWebSocket.send).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
expect(mockWebSocket.send).toHaveBeenCalledTimes(2);
|
||||
expect(mockWebSocket.send).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
JSON.stringify(smallMessage),
|
||||
);
|
||||
expect(mockWebSocket.send).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
JSON.stringify(largeMessage),
|
||||
);
|
||||
});
|
||||
|
||||
test("should wait for the buffer to be under BUFFER_LIMIT before sending more messages", async () => {
|
||||
vi.useFakeTimers();
|
||||
const { peer, mockWebSocket } = setup();
|
||||
|
||||
@@ -88,5 +88,6 @@ export const startSyncServer = async (port?: number) => {
|
||||
syncServer,
|
||||
port: actualPort,
|
||||
localNode,
|
||||
wss,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { assert } from "node:console";
|
||||
import { ControlledAgent, type CryptoProvider, LocalNode } from "cojson";
|
||||
import { WasmCrypto } from "cojson/crypto/WasmCrypto";
|
||||
import { afterEach, beforeEach, describe, expect, test } from "vitest";
|
||||
@@ -7,7 +8,7 @@ import { startSyncServer } from "./syncServer";
|
||||
import { waitFor } from "./utils";
|
||||
|
||||
describe("WebSocket Peer Integration", () => {
|
||||
let server: any;
|
||||
let server: Awaited<ReturnType<typeof startSyncServer>>;
|
||||
let syncServerUrl: string;
|
||||
let crypto: CryptoProvider;
|
||||
|
||||
@@ -93,7 +94,11 @@ describe("WebSocket Peer Integration", () => {
|
||||
const serverNode = server.localNode;
|
||||
const serverMap = await serverNode.load(map.id);
|
||||
|
||||
expect(serverMap.get("testKey")).toBe("testValue");
|
||||
if (serverMap === "unavailable") {
|
||||
throw new Error("Server map is unavailable");
|
||||
}
|
||||
|
||||
expect(serverMap.get("testKey")?.toString()).toBe("testValue");
|
||||
});
|
||||
|
||||
test("should handle disconnection and cleanup", async () => {
|
||||
@@ -161,4 +166,34 @@ describe("WebSocket Peer Integration", () => {
|
||||
|
||||
expect(ws.readyState).toBe(WebSocket.CLOSED);
|
||||
});
|
||||
|
||||
test("calling terminate on the server should close the connection", async () => {
|
||||
const ws = new WebSocket(syncServerUrl);
|
||||
let disconnectCalled = false;
|
||||
|
||||
createWebSocketPeer({
|
||||
id: "test-client",
|
||||
websocket: ws,
|
||||
role: "server",
|
||||
onClose: () => {
|
||||
disconnectCalled = true;
|
||||
},
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(server.wss.clients.size).toBe(1);
|
||||
});
|
||||
|
||||
const peerOnServer = server.localNode.syncManager.getPeers()[0];
|
||||
|
||||
for (const client of server.wss.clients) {
|
||||
client.terminate();
|
||||
}
|
||||
|
||||
await waitFor(() => {
|
||||
expect(disconnectCalled).toBe(true);
|
||||
});
|
||||
|
||||
expect(peerOnServer?.closed).toBe(true);
|
||||
});
|
||||
});
|
||||
30
packages/cojson-transport-ws/src/utils.ts
Normal file
30
packages/cojson-transport-ws/src/utils.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import type { AnyWebSocket } from "./types.js";
|
||||
|
||||
export const BUFFER_LIMIT = 100_000;
|
||||
export const BUFFER_LIMIT_POLLING_INTERVAL = 10;
|
||||
|
||||
export function isWebSocketOpen(websocket: AnyWebSocket) {
|
||||
return websocket.readyState === 1;
|
||||
}
|
||||
|
||||
export function hasWebSocketTooMuchBufferedData(websocket: AnyWebSocket) {
|
||||
return websocket.bufferedAmount > BUFFER_LIMIT && isWebSocketOpen(websocket);
|
||||
}
|
||||
|
||||
export function waitForWebSocketOpen(websocket: AnyWebSocket) {
|
||||
return new Promise<void>((resolve) => {
|
||||
if (websocket.readyState === 1) {
|
||||
resolve();
|
||||
} else {
|
||||
websocket.addEventListener("open", () => resolve(), { once: true });
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function waitForWebSocketBufferedAmount(websocket: AnyWebSocket) {
|
||||
while (hasWebSocketTooMuchBufferedData(websocket)) {
|
||||
await new Promise<void>((resolve) =>
|
||||
setTimeout(resolve, BUFFER_LIMIT_POLLING_INTERVAL),
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,21 @@
|
||||
# cojson
|
||||
|
||||
## 0.15.10
|
||||
|
||||
## 0.15.9
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- 27b4837: Wait for the full streaming before return values in load and subscribe
|
||||
- 2776263: - Refactored the Peer incoming/outgoing channels to be syncronous
|
||||
- Changed the storage communication to work with an explicit API and removed the storage role on peers
|
||||
- Added scheduling of the incoming messages using a round-robin over the peers and a timer to do collaborative scheduling with the event loop
|
||||
- Added expectContentUntil on the content messages to optimize content syncing with servers during streaming
|
||||
|
||||
## 0.15.8
|
||||
|
||||
## 0.15.7
|
||||
|
||||
## 0.15.6
|
||||
|
||||
## 0.15.5
|
||||
|
||||
@@ -25,9 +25,10 @@
|
||||
},
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"version": "0.15.6",
|
||||
"version": "0.15.10",
|
||||
"devDependencies": {
|
||||
"@opentelemetry/sdk-metrics": "^2.0.0",
|
||||
"libsql": "^0.5.13",
|
||||
"typescript": "catalog:"
|
||||
},
|
||||
"dependencies": {
|
||||
@@ -38,7 +39,6 @@
|
||||
"@scure/base": "1.2.1",
|
||||
"jazz-crypto-rs": "0.0.7",
|
||||
"neverthrow": "^7.0.1",
|
||||
"queueueue": "^4.1.2",
|
||||
"unicode-segmenter": "^0.12.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
142
packages/cojson/src/IncomingMessagesQueue.ts
Normal file
142
packages/cojson/src/IncomingMessagesQueue.ts
Normal file
@@ -0,0 +1,142 @@
|
||||
import { Counter, ValueType, metrics } from "@opentelemetry/api";
|
||||
import type { PeerState } from "./PeerState.js";
|
||||
import { LinkedList } from "./PriorityBasedMessageQueue.js";
|
||||
import { SYNC_SCHEDULER_CONFIG } from "./config.js";
|
||||
import { logger } from "./logger.js";
|
||||
import type { SyncMessage } from "./sync.js";
|
||||
|
||||
/**
|
||||
* A queue that schedules messages across different peers using a round-robin approach.
|
||||
*
|
||||
* This class manages incoming sync messages from multiple peers, ensuring fair processing
|
||||
* by cycling through each peer's message queue in a round-robin fashion. It also implements
|
||||
* collaborative scheduling on message processing, pausing when the main thread is blocked
|
||||
* for more than 50ms.
|
||||
*/
|
||||
export class IncomingMessagesQueue {
|
||||
private pullCounter: Counter;
|
||||
private pushCounter: Counter;
|
||||
|
||||
queues: [LinkedList<SyncMessage>, PeerState][];
|
||||
peerToQueue: WeakMap<PeerState, LinkedList<SyncMessage>>;
|
||||
currentQueue = 0;
|
||||
|
||||
constructor() {
|
||||
this.pullCounter = metrics
|
||||
.getMeter("cojson")
|
||||
.createCounter(`jazz.messagequeue.incoming.pulled`, {
|
||||
description: "Number of messages pulled from the queue",
|
||||
valueType: ValueType.INT,
|
||||
unit: "1",
|
||||
});
|
||||
this.pushCounter = metrics
|
||||
.getMeter("cojson")
|
||||
.createCounter(`jazz.messagequeue.incoming.pushed`, {
|
||||
description: "Number of messages pushed to the queue",
|
||||
valueType: ValueType.INT,
|
||||
unit: "1",
|
||||
});
|
||||
|
||||
/**
|
||||
* This makes sure that those metrics are generated (and emitted) as soon as the queue is created.
|
||||
* This is to avoid edge cases where one series reset is delayed, which would cause spikes or dips
|
||||
* when queried - and it also more correctly represents the actual state of the queue after a restart.
|
||||
*/
|
||||
this.pullCounter.add(0, {
|
||||
peerRole: "client",
|
||||
});
|
||||
this.pushCounter.add(0, {
|
||||
peerRole: "client",
|
||||
});
|
||||
this.pullCounter.add(0, {
|
||||
peerRole: "server",
|
||||
});
|
||||
this.pushCounter.add(0, {
|
||||
peerRole: "server",
|
||||
});
|
||||
|
||||
this.queues = [];
|
||||
this.peerToQueue = new WeakMap();
|
||||
}
|
||||
|
||||
public push(msg: SyncMessage, peer: PeerState) {
|
||||
const queue = this.peerToQueue.get(peer);
|
||||
|
||||
if (!queue) {
|
||||
const newQueue = new LinkedList<SyncMessage>();
|
||||
this.peerToQueue.set(peer, newQueue);
|
||||
this.queues.push([newQueue, peer]);
|
||||
newQueue.push(msg);
|
||||
} else {
|
||||
queue.push(msg);
|
||||
}
|
||||
|
||||
this.pushCounter.add(1, {
|
||||
peerRole: peer.role,
|
||||
});
|
||||
}
|
||||
|
||||
public pull() {
|
||||
const entry = this.queues[this.currentQueue];
|
||||
|
||||
if (!entry) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const [queue, peer] = entry;
|
||||
const msg = queue.shift();
|
||||
|
||||
if (queue.isEmpty()) {
|
||||
this.queues.splice(this.currentQueue, 1);
|
||||
this.peerToQueue.delete(peer);
|
||||
} else {
|
||||
this.currentQueue++;
|
||||
}
|
||||
|
||||
if (this.currentQueue >= this.queues.length) {
|
||||
this.currentQueue = 0;
|
||||
}
|
||||
|
||||
if (msg) {
|
||||
this.pullCounter.add(1, {
|
||||
peerRole: peer.role,
|
||||
});
|
||||
|
||||
return { msg, peer };
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
processing = false;
|
||||
|
||||
async processQueue(callback: (msg: SyncMessage, peer: PeerState) => void) {
|
||||
this.processing = true;
|
||||
|
||||
let entry: { msg: SyncMessage; peer: PeerState } | undefined;
|
||||
let lastTimer = performance.now();
|
||||
|
||||
while ((entry = this.pull())) {
|
||||
const { msg, peer } = entry;
|
||||
|
||||
try {
|
||||
callback(msg, peer);
|
||||
} catch (err) {
|
||||
logger.error("Error processing message", { err });
|
||||
}
|
||||
|
||||
const currentTimer = performance.now();
|
||||
|
||||
// We check if we have blocked the main thread for too long
|
||||
// and if so, we schedule a timer task to yield to the event loop
|
||||
if (
|
||||
currentTimer - lastTimer >
|
||||
SYNC_SCHEDULER_CONFIG.INCOMING_MESSAGES_TIME_BUDGET
|
||||
) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 0));
|
||||
}
|
||||
}
|
||||
|
||||
this.processing = false;
|
||||
}
|
||||
}
|
||||
@@ -1,27 +1,13 @@
|
||||
import { PeerKnownStates, ReadonlyPeerKnownStates } from "./PeerKnownStates.js";
|
||||
import { PriorityBasedMessageQueue } from "./PriorityBasedMessageQueue.js";
|
||||
import { RawCoID, SessionID } from "./ids.js";
|
||||
import { logger } from "./logger.js";
|
||||
import { CO_VALUE_PRIORITY } from "./priority.js";
|
||||
import { CoValueKnownState, Peer, SyncMessage } from "./sync.js";
|
||||
|
||||
export class PeerState {
|
||||
private queue: PriorityBasedMessageQueue;
|
||||
|
||||
constructor(
|
||||
private peer: Peer,
|
||||
knownStates: ReadonlyPeerKnownStates | undefined,
|
||||
) {
|
||||
/**
|
||||
* We set as default priority HIGH to handle all the messages without a
|
||||
* priority property as HIGH priority.
|
||||
*
|
||||
* This way we consider all the non-content messsages as HIGH priority.
|
||||
*/
|
||||
this.queue = new PriorityBasedMessageQueue(CO_VALUE_PRIORITY.HIGH, {
|
||||
peerRole: peer.role,
|
||||
});
|
||||
|
||||
this._knownStates = knownStates?.clone() ?? new PeerKnownStates();
|
||||
this._optimisticKnownStates = knownStates?.clone() ?? new PeerKnownStates();
|
||||
}
|
||||
@@ -105,75 +91,14 @@ export class PeerState {
|
||||
return this.peer.priority;
|
||||
}
|
||||
|
||||
get crashOnClose() {
|
||||
return this.peer.crashOnClose;
|
||||
}
|
||||
|
||||
shouldRetryUnavailableCoValues() {
|
||||
return this.peer.role === "server";
|
||||
}
|
||||
|
||||
isServerOrStoragePeer() {
|
||||
return this.peer.role === "server" || this.peer.role === "storage";
|
||||
}
|
||||
|
||||
private processing = false;
|
||||
public closed = false;
|
||||
|
||||
async processQueue() {
|
||||
if (this.processing) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.processing = true;
|
||||
|
||||
let msg: SyncMessage | undefined;
|
||||
while ((msg = this.queue.pull())) {
|
||||
if (this.closed) {
|
||||
break;
|
||||
}
|
||||
|
||||
// Awaiting the push to send one message at a time
|
||||
// This way when the peer is "under pressure" we can enqueue all
|
||||
// the coming messages and organize them by priority
|
||||
try {
|
||||
await this.peer.outgoing.push(msg);
|
||||
} catch (e) {
|
||||
logger.error("Error sending message", {
|
||||
err: e,
|
||||
action: msg.action,
|
||||
id: msg.id,
|
||||
peerId: this.id,
|
||||
peerRole: this.role,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
this.processing = false;
|
||||
get incoming() {
|
||||
return this.peer.incoming;
|
||||
}
|
||||
|
||||
pushOutgoingMessage(msg: SyncMessage) {
|
||||
if (this.closed) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.queue.push(msg);
|
||||
|
||||
void this.processQueue();
|
||||
}
|
||||
|
||||
isProcessing() {
|
||||
return this.processing;
|
||||
}
|
||||
|
||||
get incoming() {
|
||||
if (this.closed) {
|
||||
return (async function* () {
|
||||
yield "Disconnected" as const;
|
||||
})();
|
||||
}
|
||||
|
||||
return this.peer.incoming;
|
||||
this.peer.outgoing.push(msg);
|
||||
}
|
||||
|
||||
closeListeners = new Set<() => void>();
|
||||
@@ -200,43 +125,19 @@ export class PeerState {
|
||||
}
|
||||
|
||||
gracefulShutdown() {
|
||||
if (this.closed) {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug("Gracefully closing", {
|
||||
peerId: this.id,
|
||||
peerRole: this.role,
|
||||
});
|
||||
this.peer.crashOnClose = false;
|
||||
this.peer.outgoing.close();
|
||||
|
||||
this.closed = true;
|
||||
this.peer.outgoing.push("Disconnected");
|
||||
this.peer.outgoing.close();
|
||||
this.peer.incoming.close();
|
||||
this.emitClose();
|
||||
}
|
||||
|
||||
async processIncomingMessages(callback: (msg: SyncMessage) => void) {
|
||||
if (this.closed) {
|
||||
throw new Error("Peer is closed");
|
||||
}
|
||||
|
||||
const processIncomingMessages = async () => {
|
||||
for await (const msg of this.incoming) {
|
||||
if (this.closed) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (msg === "Disconnected") {
|
||||
return;
|
||||
}
|
||||
|
||||
if (msg === "PingTimeout") {
|
||||
logger.error("Ping timeout from peer", {
|
||||
peerId: this.id,
|
||||
peerRole: this.role,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
callback(msg);
|
||||
}
|
||||
};
|
||||
|
||||
return processIncomingMessages();
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user