Compare commits

..

1 Commits

Author SHA1 Message Date
Guido D'Orsi
763d28a46c chore: run e2e/BinaryCoStream tests on CI 2024-09-14 13:01:12 +02:00
2209 changed files with 48498 additions and 277968 deletions

View File

@@ -1,25 +1,24 @@
{
"$schema": "https://unpkg.com/@changesets/config@2.3.1/schema.json",
"changelog": "@changesets/cli/changelog",
"commit": false,
"linked": [],
"fixed": [
[
"cojson",
"cojson-core-wasm",
"cojson-storage-indexeddb",
"cojson-storage-sqlite",
"cojson-transport-ws",
"jazz-auth-betterauth",
"jazz-betterauth-client-plugin",
"jazz-betterauth-server-plugin",
"jazz-react-auth-betterauth",
"jazz-run",
"jazz-tools",
"community-jazz-vue"
]
],
"access": "public",
"baseBranch": "main",
"updateInternalDependencies": "minor"
"$schema": "https://unpkg.com/@changesets/config@2.3.1/schema.json",
"changelog": "@changesets/cli/changelog",
"commit": false,
"fixed": [],
"linked": [
[
"cojson",
"jazz-tools",
"jazz-browser",
"jazz-browser-media-images",
"jazz-react",
"jazz-nodejs",
"jazz-run",
"cojson-transport-ws",
"cojson-storage-indexeddb",
"cojson-storage-sqlite"
]
],
"access": "public",
"baseBranch": "main",
"updateInternalDependencies": "patch",
"ignore": []
}

View File

@@ -0,0 +1,5 @@
---
"jazz-tools": patch
---
Fix on CoMapInit to not allow null values on required refs

View File

@@ -0,0 +1,5 @@
---
"cojson": patch
---
Clean up binary stream ending logic

View File

@@ -0,0 +1,5 @@
---
"jazz-tools": patch
---
fix: handle null values for co.refs

View File

@@ -0,0 +1,5 @@
---
"jazz-tools": patch
---
Fix loadAsBlob resolving too early

View File

@@ -0,0 +1,5 @@
---
"jazz-react": patch
---
mark the auth as loading when authState is not ready

View File

@@ -0,0 +1,5 @@
---
"jazz-run": patch
---
Added sync command to start a local sync server

View File

@@ -1,5 +0,0 @@
---
"jazz-tools": patch
---
Explicit loadAs in CoList.upsertUnique to use it without loaded context

View File

@@ -1,8 +0,0 @@
---
"cojson": patch
---
Fix admin permission downgrade to writeOnly
- Allow admin to self-downgrade to writeOnly
- Prevent admin from downgrading other admins to writeOnly

View File

@@ -1,5 +0,0 @@
---
"cojson": patch
---
Skip agent resolution when skipVerify is true

View File

@@ -0,0 +1,5 @@
---
"jazz-inspector": patch
---
fix(inspector): subscribe to latent covalues instead of loading them immediately

View File

@@ -1,11 +0,0 @@
# EditorConfig is awesome: https://editorconfig.org
# top-most EditorConfig file
root = true
# Unix-style newlines with a newline ending every file
[*]
end_of_line = lf
insert_final_newline = true
indent_style = space
indent_size = 2

1
.envrc
View File

@@ -1 +0,0 @@
use flake

View File

@@ -1,2 +0,0 @@
# Formatted workspace with biome
be0a09a22295cd5d2ee3ef323e2d999da8a14110

8
.github/CODEOWNERS vendored
View File

@@ -1,8 +0,0 @@
./packages @garden-co/framework
./tests @garden-co/framework
./packages/quint-ui @garden-co/ui
./homepage @garden-co/ui
./homepage/homepage/content/docs @garden-co/docs
./starters @garden-co/docs
./examples @garden-co/docs @garden-co/ui

View File

@@ -1,10 +0,0 @@
---
name: Docs request
about: Allow people to quickly report issues & improvements for the docs
title: 'Docs: '
labels: docs, requested
assignees: bensleveritt
---

View File

@@ -1,39 +0,0 @@
name: Setup Android Emulator
inputs:
api-level:
description: 'API level to use for the emulator'
required: true
default: '29'
runs:
using: "composite"
steps:
- name: Enable KVM
shell: bash
run: |
echo 'KERNEL=="kvm", GROUP="kvm", MODE="0666", OPTIONS+="static_node=kvm"' | sudo tee /etc/udev/rules.d/99-kvm4all.rules
sudo udevadm control --reload-rules
sudo udevadm trigger --name-match=kvm
- name: Gradle cache
uses: gradle/actions/setup-gradle@v4
- name: AVD cache
uses: useblacksmith/cache@v5
id: avd-cache
with:
path: |
~/.android/avd/*
~/.android/adb*
key: avd-${{ inputs.api-level }}
- name: Create AVD and Generate Snapshot for Caching
if: steps.avd-cache.outputs.cache-hit != 'true'
uses: reactivecircus/android-emulator-runner@v2
with:
api-level: ${{ inputs.api-level }}
force-avd-creation: false
emulator-options: -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none -no-metrics
disable-animations: false
script: echo "Generated AVD snapshot for caching."

View File

@@ -1,36 +0,0 @@
name: Get and Build Source Code
runs:
using: "composite"
steps:
- name: Enable latestcorepack
shell: bash
run: |
echo "Before: corepack version => $(corepack --version || echo 'not installed')"
npm install -g corepack@latest
echo "After : corepack version => $(corepack --version)"
corepack enable
pnpm --version
- name: Install Node.js
uses: useblacksmith/setup-node@v5
with:
node-version-file: '.node-version'
cache: 'pnpm'
- name: Get pnpm store directory
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
- uses: useblacksmith/cache@v5
name: Setup pnpm cache
with:
path: ${{ env.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Install dependencies
shell: bash
run: pnpm install --frozen-lockfile

View File

@@ -1,23 +0,0 @@
# Description
<!-- Please include a summary of the change and which issue is fixed -->
<!-- Please also include relevant motivation and context -->
<!-- Include any links to documentation like RFCs if necessary -->
<!-- Add a link to to relevant preview environments or anything that would simplify visual review process -->
<!-- Supplemental screenshots and video are encouraged, but the primary description should be in text -->
## Manual testing instructions
<!-- Add any actions required to manually test the changes -->
## Tests
- [ ] Tests have been added and/or updated
- [ ] Tests have not been updated, because: <!-- Insert reason for not updating tests here -->
- [ ] I need help with writing tests
## Checklist
- [ ] I've updated the part of the docs that are affected the PR changes
- [ ] I've generated a changeset, if a version bump is required
- [ ] I've updated the jsDoc comments to the public APIs I've modified, or added them when missing

106
.github/workflows/build-and-deploy.yaml vendored Normal file
View File

@@ -0,0 +1,106 @@
name: Build and Deploy
on:
push:
branches: [ "main" ]
jobs:
build-examples:
runs-on: ubuntu-latest
strategy:
matrix:
example: ["chat", "pets", "todo", "inspector"]
# example: ["twit", "chat", "counter-js-auth0", "pets", "twit", "file-drop", "inspector"]
steps:
- uses: actions/checkout@v3
with:
submodules: true
- name: Enable corepack
run: corepack enable
- name: Install Node.js
uses: actions/setup-node@v3
with:
node-version-file: '.node-version'
cache: 'pnpm'
- name: Get pnpm store directory
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
- uses: actions/cache@v3
name: Setup pnpm cache
with:
path: ${{ env.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: gardencmp
password: ${{ secrets.GITHUB_TOKEN }}
- name: Pnpm Build
run: |
pnpm install
pnpm turbo build;
working-directory: ./examples/${{ matrix.example }}
- name: Docker Build & Push
uses: docker/build-push-action@v4
with:
context: ./examples/${{ matrix.example }}
push: true
tags: ghcr.io/gardencmp/${{github.event.repository.name}}-example-${{ matrix.example }}:${{github.head_ref || github.ref_name}}-${{github.sha}}-${{github.run_number}}-${{github.run_attempt}}
cache-from: type=gha
cache-to: type=gha,mode=max
deploy-examples:
runs-on: ubuntu-latest
needs: build-examples
strategy:
matrix:
example: ["chat", "pets", "todo", "inspector"]
# example: ["twit", "chat", "counter-js-auth0", "pets", "twit", "file-drop", "inspector"]
steps:
- uses: actions/checkout@v3
with:
submodules: true
- uses: gacts/install-nomad@v1
- name: Tailscale
uses: tailscale/github-action@v1
with:
authkey: ${{ secrets.TAILSCALE_AUTHKEY }}
- name: Deploy on Nomad
run: |
if [ "${{github.ref_name}}" == "main" ]; then
export BRANCH_SUFFIX="";
export BRANCH_SUBDOMAIN="";
else
export BRANCH_SUFFIX=-${{github.head_ref || github.ref_name}};
export BRANCH_SUBDOMAIN=${{github.head_ref || github.ref_name}}.;
fi
export DOCKER_USER=gardencmp;
export DOCKER_PASSWORD=${{ secrets.DOCKER_PULL_PAT }};
export DOCKER_TAG=ghcr.io/gardencmp/${{github.event.repository.name}}-example-${{ matrix.example }}:${{github.head_ref || github.ref_name}}-${{github.sha}}-${{github.run_number}}-${{github.run_attempt}};
envsubst '${DOCKER_USER} ${DOCKER_PASSWORD} ${DOCKER_TAG} ${BRANCH_SUFFIX} ${BRANCH_SUBDOMAIN}' < job-template.nomad > job-instance.nomad;
cat job-instance.nomad;
NOMAD_ADDR=${{ secrets.NOMAD_ADDR }} nomad job run job-instance.nomad;
working-directory: ./examples/${{ matrix.example }}

View File

@@ -1,27 +0,0 @@
name: Code quality
concurrency:
# For pushes, this lets concurrent runs happen, so each push gets a result.
# But for other events (e.g. PRs), we can cancel the previous runs.
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.sha || github.ref }}
cancel-in-progress: true
on:
push:
branches:
- "main"
pull_request:
jobs:
quality:
runs-on: blacksmith-2vcpu-ubuntu-2404-arm
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Biome
uses: biomejs/setup-biome@v2
with:
version: 2.1.3
- name: Run Biome
run: biome ci .

View File

@@ -1,77 +0,0 @@
name: Test `create-jazz-app` Distribution
concurrency:
# For pushes, this lets concurrent runs happen, so each push gets a result.
# But for other events (e.g. PRs), we can cancel the previous runs.
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.sha || github.ref }}
cancel-in-progress: true
on:
pull_request:
types: [opened, synchronize, reopened]
paths:
- 'packages/create-jazz-app/**'
jobs:
test-create-jazz-app-distribution:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Source Code
uses: ./.github/actions/source-code/
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- name: Build create-jazz-app
run: pnpm build
working-directory: packages/create-jazz-app
- name: Pack create-jazz-app
run: pnpm pack
working-directory: packages/create-jazz-app
- name: Create test directory
run: mkdir -p /tmp/test-create-jazz-app
- name: Initialize test package
run: |
cd /tmp/test-create-jazz-app
bun init -y
- name: Install packed create-jazz-app
run: |
cd /tmp/test-create-jazz-app
bun install ${{ github.workspace }}/packages/create-jazz-app/create-jazz-app-*.tgz
- name: Test basic functionality
run: |
cd /tmp/test-create-jazz-app
bunx create-jazz-app --help
- name: Create test project and validate catalog resolution
run: |
cd /tmp/test-create-jazz-app
mkdir test-project
cd test-project
echo -e "\n\n\n\n\n\n\n\n" | bunx create-jazz-app . --framework react --starter react-passkey-auth --package-manager bun --git false
- name: Validate no unresolved catalog references
run: |
cd /tmp/test-create-jazz-app/test-project
# Check for unresolved catalog: references in package.json
if grep -r "catalog:" package.json; then
echo "❌ Found unresolved catalog: references in generated project"
exit 1
fi
# Check for unresolved workspace: references
if grep -r "workspace:" package.json; then
echo "❌ Found unresolved workspace: references in generated project"
exit 1
fi
echo "✅ All catalog and workspace references resolved successfully"

View File

@@ -1,90 +0,0 @@
name: End-to-End Tests for React Native
concurrency:
# For pushes, this lets concurrent runs happen, so each push gets a result.
# But for other events (e.g. PRs), we can cancel the previous runs.
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.sha || github.ref }}
cancel-in-progress: true
on:
pull_request:
types: [opened, synchronize, reopened]
paths:
- ".github/actions/android-emulator/**"
- ".github/actions/source-code/**"
- ".github/workflows/e2e-rn-test.yml"
- "examples/chat-rn-expo/**"
- "packages/**"
jobs:
e2e-tests:
runs-on: blacksmith-4vcpu-ubuntu-2204
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Source Code
uses: ./.github/actions/source-code/
- name: Create Output Directory
run: |
mkdir -p ~/output
- name: Setup JDK
uses: actions/setup-java@v4
with:
distribution: corretto
java-version: 22
cache: gradle
- name: Pnpm Build
run: pnpm turbo build --filter="./packages/*"
- name: chat-rn-expo App Pre Build
working-directory: ./examples/chat-rn-expo
run: |
pnpm expo prebuild --clean
- name: Install Maestro
run: |
curl -fsSL "https://get.maestro.mobile.dev" | bash
- name: Setup Android Emulator
id: android-emulator
uses: ./.github/actions/android-emulator/
with:
api-level: 29
- name: Test App
uses: reactivecircus/android-emulator-runner@v2
id: e2e_test
continue-on-error: true
with:
api-level: 29
force-avd-creation: false
emulator-options: -no-snapshot-save -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none -no-metrics
disable-animations: true
working-directory: ./examples/chat-rn-expo/
# killall due to this issue: https://github.com/ReactiveCircus/android-emulator-runner/issues/385
script: ./test/e2e/run.sh && ( killall -INT crashpad_handler || true )
- name: Copy Maestro Output
if: steps.e2e_test.outcome != 'success'
run: |
cp -r ~/.maestro/tests/* ~/output
- name: Upload Output Files
if: steps.e2e_test.outcome != 'success'
uses: actions/upload-artifact@v4
with:
name: e2e-test-output
path: ~/output/*
retention-days: 5
- name: Exit with Test Result
if: always()
run: |
if [ "${{ steps.e2e_test.outcome }}" != "success" ]; then
exit 1
fi

View File

@@ -1,34 +0,0 @@
name: Jazz Run Tests
concurrency:
# For pushes, this lets concurrent runs happen, so each push gets a result.
# But for other events (e.g. PRs), we can cancel the previous runs.
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.sha || github.ref }}
cancel-in-progress: true
on:
push:
branches: ["main"]
pull_request:
types: [opened, synchronize, reopened]
jobs:
test:
runs-on: blacksmith-4vcpu-ubuntu-2404
timeout-minutes: 5
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Setup Source Code
uses: ./.github/actions/source-code/
- name: Build jazz-run
run: pnpm exec turbo build && chmod +x dist/index.js;
working-directory: ./packages/jazz-run
- name: Run create account
run: ./dist/index.js account create --name "Jazz Run CI test"
working-directory: ./packages/jazz-run

21
.github/workflows/monorepo-linting.yml vendored Normal file
View File

@@ -0,0 +1,21 @@
name: Monorepo linting
on:
pull_request:
types: [opened, synchronize, reopened]
jobs:
monorepo-linting:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Install Node.js
uses: actions/setup-node@v3
with:
node-version-file: '.node-version'
- name: Run sherif
run: npx sherif@1.0.0

View File

@@ -1,11 +1,5 @@
name: Playwright Tests
concurrency:
# For pushes, this lets concurrent runs happen, so each push gets a result.
# But for other events (e.g. PRs), we can cancel the previous runs.
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.sha || github.ref }}
cancel-in-progress: true
on:
push:
branches: ["main"]
@@ -15,145 +9,56 @@ on:
jobs:
test:
timeout-minutes: 60
runs-on: blacksmith-4vcpu-ubuntu-2404
continue-on-error: true
runs-on: ubuntu-latest
strategy:
matrix:
shard: ["1/2", "2/2"]
project: ["e2e/BinaryCoStream", "examples/pets"]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
with:
submodules: true
- name: Setup Source Code
uses: ./.github/actions/source-code/
- name: Enable corepack
run: corepack enable
- name: Install Node.js
uses: actions/setup-node@v3
with:
node-version-file: '.node-version'
cache: 'pnpm'
- name: Get pnpm store directory
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
- uses: actions/cache@v3
name: Setup pnpm cache
with:
path: ${{ env.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Pnpm Build
run: pnpm turbo build;
working-directory: ./${{ matrix.project }}
- name: Install Playwright Browsers
run: pnpm exec playwright install
run: pnpm exec playwright install --with-deps
working-directory: ./${{ matrix.project }}
- name: Run Playwright tests for shard ${{ matrix.shard }}
run: |
# Parse shard information (e.g., "1/2" -> shard_num=1, total_shards=2)
IFS='/' read -r shard_num total_shards <<< "${{ matrix.shard }}"
shard_index=$((shard_num - 1)) # Convert to 0-based index
# Debug: Print parsed values
echo "Parsed shard_num: $shard_num"
echo "Parsed total_shards: $total_shards"
echo "Calculated shard_index: $shard_index"
# Define all projects to test
all_projects=(
"tests/e2e"
"examples/chat"
"examples/chat-svelte"
"examples/community-clerk-vue"
"examples/clerk"
"examples/betterauth"
"examples/file-share-svelte"
"examples/form"
"examples/inspector"
"examples/music-player"
"examples/organization"
"examples/server-worker-http"
"starters/react-passkey-auth"
"starters/svelte-passkey-auth"
"tests/jazz-svelte"
)
# Calculate which projects this shard should run
shard_projects=()
for i in "${!all_projects[@]}"; do
if [ $((i % total_shards)) -eq $shard_index ]; then
shard_projects+=("${all_projects[i]}")
fi
done
# Track project results
overall_exit_code=0
failed_projects=()
passed_projects=()
echo "=== Running tests for shard ${{ matrix.shard }} ==="
echo "Projects in this shard:"
printf '%s\n' "${shard_projects[@]}"
echo
# Run tests for each project
for project in "${shard_projects[@]}"; do
echo "=== Testing project: $project ==="
# Check if project directory exists
if [ ! -d "$project" ]; then
echo "❌ FAILED: Project directory $project does not exist"
failed_projects+=("$project (directory not found)")
overall_exit_code=1
continue
fi
# Check if project has package.json
if [ ! -f "$project/package.json" ]; then
echo "❌ FAILED: No package.json found in $project"
failed_projects+=("$project (no package.json)")
overall_exit_code=1
continue
fi
# Build the project
echo "🔨 Building $project..."
cd "$project"
if [ -f .env.test ]; then
cp .env.test .env
fi
if ! pnpm turbo build; then
echo "❌ BUILD FAILED: $project"
failed_projects+=("$project (build failed)")
overall_exit_code=1
cd - > /dev/null
continue
fi
# Run Playwright tests
echo "🧪 Running Playwright tests for $project..."
if ! pnpm exec playwright test; then
echo "❌ TESTS FAILED: $project"
failed_projects+=("$project (tests failed)")
overall_exit_code=1
else
echo "✅ TESTS PASSED: $project"
passed_projects+=("$project")
fi
cd - > /dev/null
echo "=== Finished testing $project ==="
echo
done
# Print summary report
echo "=========================================="
echo "📊 TEST SUMMARY FOR SHARD ${{ matrix.shard }}"
echo "=========================================="
if [ ${#passed_projects[@]} -gt 0 ]; then
echo "✅ PASSED (${#passed_projects[@]}):"
printf ' - %s\n' "${passed_projects[@]}"
echo
fi
if [ ${#failed_projects[@]} -gt 0 ]; then
echo "❌ FAILED (${#failed_projects[@]}):"
printf ' - %s\n' "${failed_projects[@]}"
echo
fi
echo "Total projects in shard: ${#shard_projects[@]}"
echo "Passed: ${#passed_projects[@]}"
echo "Failed: ${#failed_projects[@]}"
echo "=========================================="
# Exit with overall status
exit $overall_exit_code
- name: Run Playwright tests
run: pnpm exec playwright test
working-directory: ./${{ matrix.project }}
- uses: actions/upload-artifact@v4
if: failure()
with:
name: ${{ hashFiles(format('{0}/package.json', matrix.project)) }}-playwright-report
path: ./${{ matrix.project }}/playwright-report/
retention-days: 30

View File

@@ -1,109 +0,0 @@
name: Pre-Publish tagged Pull Requests
concurrency:
# For pushes, this lets concurrent runs happen, so each push gets a result.
# But for other events (e.g. PRs), we can cancel the previous runs.
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.sha || github.ref }}
cancel-in-progress: true
on:
pull_request:
types: [opened, synchronize, reopened, labeled]
jobs:
pre-release:
if: contains(github.event.pull_request.labels.*.name, 'pre-release')
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Source Code
uses: ./.github/actions/source-code/
- name: Pnpm Build
run: pnpm turbo build --filter="./packages/*"
- name: Pre publish
run: pnpm exec pkg-pr-new publish --json output.json --comment=off "./packages/*"
- name: Post or update comment
uses: actions/github-script@v6
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const fs = require('fs');
const output = JSON.parse(fs.readFileSync('output.json', 'utf8'));
const packages = output.packages
.map((p) => `- ${p.name}: ${p.url}`)
.join('\n');
const sha =
context.event_name === 'pull_request'
? context.payload.pull_request.head.sha
: context.payload.after;
const resolutions = Object.fromEntries(
output.packages.map((p) => [p.name, p.url])
);
const commitUrl = `https://github.com/${context.repo.owner}/${context.repo.repo}/commit/${sha}`;
const body = `## Jazz pre-release
### Packages:
\`\`\`json
${JSON.stringify(resolutions, null, 4)}
\`\`\`
[View Commit](${commitUrl})`;
async function logPublishInfo() {
console.log('\n' + '='.repeat(50));
console.log('Publish Information');
console.log('='.repeat(50));
console.log('\nPublished Packages:');
console.log(output.packages);
console.log('\nTemplates:');
console.log(templates);
console.log(`\nCommit URL: ${commitUrl}`);
console.log('\n' + '='.repeat(50));
}
if (context.eventName === 'pull_request') {
if (context.issue.number) {
await github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: body,
});
}
} else if (context.eventName === 'push') {
const pullRequests = await github.rest.pulls.list({
owner: context.repo.owner,
repo: context.repo.repo,
state: 'open',
head: `${context.repo.owner}:${context.ref.replace(
'refs/heads/',
''
)}`,
});
if (pullRequests.data.length > 0) {
await github.rest.issues.createComment({
issue_number: pullRequests.data[0].number,
owner: context.repo.owner,
repo: context.repo.repo,
body: body,
});
} else {
console.log(
'No open pull request found for this push. Logging publish information to console:'
);
await logPublishInfo();
}
}

View File

@@ -1,46 +0,0 @@
name: Release
on:
push:
branches:
- main
workflow_dispatch:
inputs:
debug_enabled:
type: boolean
description: "Run tmate session for debugging"
required: false
default: false
concurrency: ${{ github.workflow }}-${{ github.ref }}
jobs:
release:
name: Release
runs-on: blacksmith-4vcpu-ubuntu-2404
steps:
- name: Checkout Repo
uses: actions/checkout@v4
- name: Setup Source Code
uses: ./.github/actions/source-code/
- name: Build packages
run: pnpm exec turbo run build --filter='./packages/*'
- name: Create Release Pull Request or Publish to npm
id: changesets
uses: changesets/action@v1
with:
version: pnpm changeset-version
publish: pnpm release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
# Enable tmate debugging only if the workflow is manually triggered, debug_enabled is true, and the workflow failed
- name: Setup tmate session for debugging
if: ${{ failure() && github.event_name == 'workflow_dispatch' && inputs.debug_enabled }}
uses: mxschmitt/action-tmate@v3
with:
timeout-minutes: 15

View File

@@ -1,11 +1,5 @@
name: Unit Tests
concurrency:
# For pushes, this lets concurrent runs happen, so each push gets a result.
# But for other events (e.g. PRs), we can cancel the previous runs.
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.sha || github.ref }}
cancel-in-progress: true
on:
pull_request:
types: [opened, synchronize, reopened]
@@ -15,20 +9,39 @@ on:
jobs:
unit-tests:
runs-on: blacksmith-4vcpu-ubuntu-2404
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Setup Source Code
uses: ./.github/actions/source-code/
- name: Enable corepack
run: corepack enable
- name: Install Node.js
uses: actions/setup-node@v3
with:
node-version-file: '.node-version'
cache: 'pnpm'
- name: Get pnpm store directory
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
- uses: actions/cache@v3
name: Setup pnpm cache
with:
path: ${{ env.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Pnpm Build
run: pnpm turbo build --filter="./packages/*"
- name: Install Playwright Browsers
run: pnpm exec playwright install
run: pnpm turbo build;
- name: Unit Tests
run: pnpm test:ci
run: pnpm test

32
.gitignore vendored
View File

@@ -3,34 +3,4 @@ yarn-error.log
lerna-debug.log
docsTmp
.DS_Store
.turbo
coverage
.direnv
# Typescript
**/*.tsbuildinfo
# Next.js
**/.next
# Vite output
**/dist
__screenshots__
# Playwright
test-results
# Java
.java-version
.husky
.vscode/*
.idea/*
.svelte-kit
.cursorrules
.windsurfrules
playwright-report
.turbo

View File

@@ -1 +1 @@
22
20

1
.npmrc
View File

@@ -1 +0,0 @@
node-linker=hoisted

1
.nvmrc
View File

@@ -1 +0,0 @@
22

View File

@@ -1,131 +0,0 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, caste, color, religion, or sexual
identity and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
- Demonstrating empathy and kindness toward other people
- Being respectful of differing opinions, viewpoints, and experiences
- Giving and gracefully accepting constructive feedback
- Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
- Focusing on what is best not just for us as individuals, but for the overall
community
Examples of unacceptable behavior include:
- The use of sexualized language or imagery, and sexual attention or advances of
any kind
- Trolling, insulting or derogatory comments, and personal or political attacks
- Public or private harassment
- Publishing others' private information, such as a physical or email address,
without their explicit permission
- Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official email address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to [the community leaders responsible for enforcement](mailto:hello@garden.co).
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series of
actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or permanent
ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within the
community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.1, available at
[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].
Community Impact Guidelines were inspired by
[Mozilla's code of conduct enforcement ladder][Mozilla CoC].
For answers to common questions about this code of conduct, see the FAQ at
[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at
[https://www.contributor-covenant.org/translations][translations].
[homepage]: https://www.contributor-covenant.org
[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html
[Mozilla CoC]: https://github.com/mozilla/diversity
[FAQ]: https://www.contributor-covenant.org/faq
[translations]: https://www.contributor-covenant.org/translations

View File

@@ -1,99 +0,0 @@
# Contribution Guide
Thank you for considering contributing to Jazz! Jazz is an open-source framework for building local-first apps. We value your time and effort and are excited to collaborate with you. This guide will help you get started with contributing.
## How to Contribute
### 1. Reporting Bugs
If you find a bug, please [open an issue with as much detail as possible](https://github.com/garden-co/jazz/issues). Include:
- A clear and descriptive title.
- Steps to reproduce the issue.
- What you expected to happen.
- What actually happened.
### 2. Suggesting Enhancements
We welcome all ideas! If you have suggestions, feel free to open an issue marked with the "enhancement" label. Please provide context on why the enhancement would be beneficial and how it could be implemented.
### 3. Pull Requests
1. **Fork the repository** and create your feature branch (see [GitHub's guide on forking a repository](https://docs.github.com/en/get-started/quickstart/fork-a-repo) if you're unfamiliar with the process):
2. **Make your changes**, ensuring that you follow our coding standards (`pnpm format` (prettier) and `pnpm lint` (eslint) will automatically let you know there are issues).
3. **Commit your changes** with a descriptive commit message.
4. **Push to your fork** and submit a pull request.
5. **Describe your pull request**, explaining the problem it solves or the enhancement it adds.
### 4. Code Style Guidelines
- We use [Prettier](https://prettier.io/) for formatting. Please ensure your code is formatted before submitting.
- Write descriptive comments where necessary.
### 5. Local Setup
You'll need Node.js 22.x installed (we're working on support for 23.x), and pnpm 9.x installed. If you're using nix, run `nix develop` to get a shell with the correct versions of everything installed.
1. **Clone the repository**:
```bash
git clone https://github.com/garden-co/jazz.git
```
2. **Install dependencies**:
```bash
pnpm install
```
3. **Install homepage dependencies**:
```bash
cd homepage && pnpm install
```
4. **Go back to the project root**:
```bash
cd ..
```
4. **Build the packages**:
```bash
pnpm build:packages
```
5. **Run tests** to verify everything is working:
```bash
pnpm test
```
### 6. Testing
Please write tests for any new features or bug fixes. We use Vitest for unit tests, and Playwright for e2e tests. Make sure all tests pass before submitting a pull request.
```bash
pnpm test
```
NB: You'll need to run `pnpm exec playwright install` to install the Playwright browsers before first run.
### 7. Communication
- If you're unsure about anything, feel free to ask questions by opening a discussion, reaching out via issues, or on our [Discord](https://discord.gg/utDMjHYg42).
- Be respectful and constructive, this is a welcoming community for everyone.
- Please be mindful of GitHubs [Community Guidelines](https://docs.github.com/en/site-policy/github-terms/github-community-guidelines), which include being kind, avoiding disruptive behavior, and respecting others.
## Code of Conduct
Please read and adhere to our [Code of Conduct](./CODE_OF_CONDUCT.md) to ensure a positive experience for all contributors.
---
Thank you again for your interest in contributing to Jazz. Your help makes this project better for everyone!
If you have any questions, don't hesitate to reach out. Let's make something great together!

View File

@@ -1,4 +1,4 @@
Copyright 2025, Garden Computing, Inc.
Copyright 2024, Garden Computing, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
@@ -16,4 +16,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
SOFTWARE.

View File

@@ -1,20 +1,12 @@
# Jazz - Build local-first apps
# Jazz - Instant sync
**Jazz is an open-source toolkit for building apps with *distributed state.***
## Getting started
We recommend reading the [homepage](https://jazz.tools) and [docs](https://jazz.tools/docs) to get an overview of what Jazz is and how it works.
If you're interested in contributing, please read [CONTRIBUTING.md](./CONTRIBUTING.md).
For community and support, please join our [Discord](https://discord.gg/utDMjHYg42).
---
- Homepage: [jazz.tools](https://jazz.tools)
- Docs: [jazz.tools/docs](https://jazz.tools/docs)
- Community & support: [Discord](https://discord.gg/utDMjHYg42)
- Updates: [X](https://x.com/jazz_tools) & [Email](https://garden.co/news)
- Updates: [Twitter](https://twitter.com/jazz_tools) & [Email](https://gcmp.io/news)
Copyright 2025 &mdash; Garden Computing, Inc.
Copyright 2024 &mdash; Garden Computing, Inc.

View File

@@ -1,171 +0,0 @@
import { describe, bench } from "vitest";
import * as tools from "jazz-tools";
import * as toolsLatest from "jazz-tools-latest";
import { WasmCrypto } from "cojson/crypto/WasmCrypto";
import { WasmCrypto as WasmCryptoLatest } from "cojson-latest/crypto/WasmCrypto";
import { PureJSCrypto } from "cojson/crypto/PureJSCrypto";
import { PureJSCrypto as PureJSCryptoLatest } from "cojson-latest/crypto/PureJSCrypto";
const sampleReactions = ["👍", "❤️", "😄", "🎉"];
const sampleHiddenIn = ["user1", "user2", "user3"];
// Define the schemas based on the provided Message schema
async function createSchema(
tools: typeof toolsLatest,
WasmCrypto: typeof WasmCryptoLatest,
) {
const Embed = tools.co.map({
url: tools.z.string(),
title: tools.z.string().optional(),
description: tools.z.string().optional(),
image: tools.z.string().optional(),
});
const Message = tools.co.map({
content: tools.z.string(),
createdAt: tools.z.date(),
updatedAt: tools.z.date(),
hiddenIn: tools.co.list(tools.z.string()),
replyTo: tools.z.string().optional(),
reactions: tools.co.list(tools.z.string()),
softDeleted: tools.z.boolean().optional(),
embeds: tools.co.optional(tools.co.list(Embed)),
author: tools.z.string().optional(),
threadId: tools.z.string().optional(),
});
const ctx = await tools.createJazzContextForNewAccount({
creationProps: {
name: "Test Account",
},
// @ts-expect-error
crypto: await WasmCrypto.create(),
});
return {
Message,
sampleReactions,
sampleHiddenIn,
Group: tools.Group,
account: ctx.account,
};
}
const PUREJS = false;
// @ts-expect-error
const schema = await createSchema(tools, PUREJS ? PureJSCrypto : WasmCrypto);
const schemaLatest = await createSchema(
toolsLatest,
// @ts-expect-error
PUREJS ? PureJSCryptoLatest : WasmCryptoLatest,
);
const message = schema.Message.create(
{
content: "A".repeat(1024),
createdAt: new Date(),
updatedAt: new Date(),
hiddenIn: sampleHiddenIn,
reactions: sampleReactions,
author: "user123",
},
schema.Group.create(schema.account).makePublic(),
);
const content = await tools.exportCoValue(schema.Message, message.id, {
// @ts-expect-error
loadAs: schema.account,
});
tools.importContentPieces(content ?? [], schema.account as any);
toolsLatest.importContentPieces(content ?? [], schemaLatest.account);
schema.account._raw.core.node.internalDeleteCoValue(message.id as any);
schemaLatest.account._raw.core.node.internalDeleteCoValue(message.id as any);
describe("Message.create", () => {
bench(
"current version",
() => {
schema.Message.create(
{
content: "A".repeat(1024),
createdAt: new Date(),
updatedAt: new Date(),
hiddenIn: sampleHiddenIn,
reactions: sampleReactions,
author: "user123",
},
schema.Group.create(schema.account),
);
},
{ iterations: 1000 },
);
bench(
"Jazz 0.17.9",
() => {
schemaLatest.Message.create(
{
content: "A".repeat(1024),
createdAt: new Date(),
updatedAt: new Date(),
hiddenIn: sampleHiddenIn,
reactions: sampleReactions,
author: "user123",
},
schemaLatest.Group.create(schemaLatest.account),
);
},
{ iterations: 1000 },
);
});
describe("Message import", () => {
bench(
"current version",
() => {
tools.importContentPieces(content ?? [], schema.account as any);
schema.account._raw.core.node.internalDeleteCoValue(message.id as any);
},
{ iterations: 5000 },
);
bench(
"Jazz 0.17.9",
() => {
toolsLatest.importContentPieces(content ?? [], schemaLatest.account);
schemaLatest.account._raw.core.node.internalDeleteCoValue(
message.id as any,
);
},
{ iterations: 5000 },
);
});
describe("import+ decrypt", () => {
bench(
"current version",
() => {
tools.importContentPieces(content ?? [], schema.account as any);
const node = schema.account._raw.core.node;
node.expectCoValueLoaded(message.id as any).getCurrentContent();
node.internalDeleteCoValue(message.id as any);
},
{ iterations: 5000 },
);
bench(
"Jazz 0.17.9",
() => {
toolsLatest.importContentPieces(content ?? [], schemaLatest.account);
const node = schemaLatest.account._raw.core.node;
node.expectCoValueLoaded(message.id as any).getCurrentContent();
node.internalDeleteCoValue(message.id as any);
},
{ iterations: 5000 },
);
});

View File

@@ -1,14 +0,0 @@
{
"name": "jazz-tools-benchmark",
"private": true,
"type": "module",
"dependencies": {
"cojson": "workspace:*",
"jazz-tools": "workspace:*",
"cojson-latest": "npm:cojson@0.17.9",
"jazz-tools-latest": "npm:jazz-tools@0.17.9"
},
"scripts": {
"bench": "vitest bench"
}
}

View File

@@ -1,7 +0,0 @@
import { defineProject } from "vitest/config";
export default defineProject({
test: {
name: "bench",
},
});

View File

@@ -1,93 +0,0 @@
{
"$schema": "https://biomejs.dev/schemas/2.1.3/schema.json",
"vcs": {
"enabled": true,
"clientKind": "git",
"useIgnoreFile": true
},
"files": {
"ignoreUnknown": false,
"includes": [
"**",
"!crates/**",
"!**/jazz-tools.json",
"!**/ios/**",
"!**/android/**",
"!**/tests/jazz-svelte/src/**",
"!**/examples/**/*svelte*/**",
"!**/starters/**/*svelte*/**",
"!**/examples/server-worker-inbox/src/routeTree.gen.ts",
"!**/homepage/homepage/**",
"!**/package.json",
"!**/*svelte*/**"
]
},
"formatter": {
"enabled": true,
"indentStyle": "space"
},
"assist": { "actions": { "source": { "organizeImports": "off" } } },
"linter": {
"enabled": false,
"rules": {
"recommended": true,
"correctness": {
"useExhaustiveDependencies": "off",
"useImportExtensions": {
"level": "error",
"options": {
"forceJsExtensions": true
}
}
}
}
},
"overrides": [
{
"includes": ["packages/community-jazz-vue/src/**"],
"linter": {
"enabled": true,
"rules": {
"recommended": true
}
}
},
{
"includes": ["**/packages/**/src/**"],
"linter": {
"enabled": true,
"rules": {
"recommended": false
}
}
},
{
"includes": [
"**/packages/cojson/src/storage/**/*/**",
"**/cojson-transport-ws/**"
],
"linter": {
"enabled": true,
"rules": {
"recommended": true
}
}
},
{
"includes": ["**/tests/**"],
"linter": {
"rules": {
"correctness": {
"useImportExtensions": "off"
},
"style": {
"noNonNullAssertion": "off"
},
"suspicious": {
"noExplicitAny": "off"
}
}
}
}
]
}

8
crates/.gitignore vendored
View File

@@ -1,8 +0,0 @@
# Rust
/target
# Test artifacts
lzy/compressed_66k.lzy
# OS generated files
.DS_Store

1164
crates/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +0,0 @@
[workspace]
resolver = "2"
members = [
"lzy",
"cojson-core",
"cojson-core-wasm",
]

View File

@@ -1,3 +0,0 @@
# cojson-core-wasm
## 0.17.10

View File

@@ -1,29 +0,0 @@
[package]
name = "cojson-core-wasm"
version = "0.1.0"
edition = "2021"
[lib]
crate-type = ["cdylib", "rlib"]
[dependencies]
cojson-core = { path = "../cojson-core" }
wasm-bindgen = "0.2"
console_error_panic_hook = { version = "0.1.7", optional = true }
ed25519-dalek = { version = "2.2.0", default-features = false, features = ["rand_core"] }
serde_json = "1.0"
serde-wasm-bindgen = "0.6"
serde = { version = "1.0", features = ["derive"] }
js-sys = "0.3"
getrandom = { version = "0.2", features = ["js"] }
thiserror = "1.0"
hex = "0.4"
blake3 = "1.5"
x25519-dalek = { version = "2.0", features = ["getrandom", "static_secrets"] }
crypto_secretbox = { version = "0.1.1", features = ["getrandom"] }
salsa20 = "0.10.2"
rand = "0.8"
bs58 = "0.5"
[features]
default = ["console_error_panic_hook"]

View File

@@ -1,26 +0,0 @@
import { mkdirSync, readFileSync, writeFileSync } from "node:fs";
mkdirSync("./public", { recursive: true });
const wasm = readFileSync("./pkg/cojson_core_wasm_bg.wasm");
writeFileSync(
"./public/cojson_core_wasm.wasm.js",
`export const data = "data:application/wasm;base64,${wasm.toString("base64")}";`,
);
writeFileSync(
"./public/cojson_core_wasm.wasm.d.ts",
"export const data: string;",
);
const glueJs = readFileSync("./pkg/cojson_core_wasm.js", "utf8").replace(
"module_or_path = new URL('cojson_core_wasm_bg.wasm', import.meta.url);",
"throw new Error();",
);
writeFileSync("./public/cojson_core_wasm.js", glueJs);
writeFileSync(
"./public/cojson_core_wasm.d.ts",
readFileSync("./pkg/cojson_core_wasm.d.ts", "utf8"),
);

View File

@@ -1,3 +0,0 @@
export * from "./public/cojson_core_wasm.js";
export async function initialize(): Promise<void>;

View File

@@ -1,8 +0,0 @@
export * from "./public/cojson_core_wasm.js";
import __wbg_init from "./public/cojson_core_wasm.js";
import { data } from "./public/cojson_core_wasm.wasm.js";
export async function initialize() {
return await __wbg_init({ module_or_path: data });
}

View File

@@ -1,22 +0,0 @@
{
"name": "cojson-core-wasm",
"type": "module",
"version": "0.17.10",
"files": [
"public/cojson_core_wasm.js",
"public/cojson_core_wasm.d.ts",
"public/cojson_core_wasm.wasm.js",
"public/cojson_core_wasm.wasm.d.ts",
"index.js",
"index.d.ts"
],
"main": "index.js",
"types": "index.d.ts",
"scripts": {
"build:wasm": "wasm-pack build --release --target web && node build.js",
"build:dev": "wasm-pack build --dev --target web && node build.js"
},
"devDependencies": {
"wasm-pack": "^0.13.1"
}
}

View File

@@ -1,291 +0,0 @@
/* tslint:disable */
/* eslint-disable */
/**
* WASM-exposed function for XSalsa20 encryption without authentication.
* - `key`: 32-byte key for encryption
* - `nonce_material`: Raw bytes used to generate a 24-byte nonce via BLAKE3
* - `plaintext`: Raw bytes to encrypt
* Returns the encrypted bytes or throws a JsError if encryption fails.
* Note: This function does not provide authentication. Use encrypt_xsalsa20_poly1305 for authenticated encryption.
*/
export function encrypt_xsalsa20(key: Uint8Array, nonce_material: Uint8Array, plaintext: Uint8Array): Uint8Array;
/**
* WASM-exposed function for XSalsa20 decryption without authentication.
* - `key`: 32-byte key for decryption (must match encryption key)
* - `nonce_material`: Raw bytes used to generate a 24-byte nonce (must match encryption)
* - `ciphertext`: Encrypted bytes to decrypt
* Returns the decrypted bytes or throws a JsError if decryption fails.
* Note: This function does not provide authentication. Use decrypt_xsalsa20_poly1305 for authenticated decryption.
*/
export function decrypt_xsalsa20(key: Uint8Array, nonce_material: Uint8Array, ciphertext: Uint8Array): Uint8Array;
/**
* Generate a new Ed25519 signing key using secure random number generation.
* Returns 32 bytes of raw key material suitable for use with other Ed25519 functions.
*/
export function new_ed25519_signing_key(): Uint8Array;
/**
* WASM-exposed function to derive an Ed25519 verifying key from a signing key.
* - `signing_key`: 32 bytes of signing key material
* Returns 32 bytes of verifying key material or throws JsError if key is invalid.
*/
export function ed25519_verifying_key(signing_key: Uint8Array): Uint8Array;
/**
* WASM-exposed function to sign a message using Ed25519.
* - `signing_key`: 32 bytes of signing key material
* - `message`: Raw bytes to sign
* Returns 64 bytes of signature material or throws JsError if signing fails.
*/
export function ed25519_sign(signing_key: Uint8Array, message: Uint8Array): Uint8Array;
/**
* WASM-exposed function to verify an Ed25519 signature.
* - `verifying_key`: 32 bytes of verifying key material
* - `message`: Raw bytes that were signed
* - `signature`: 64 bytes of signature material
* Returns true if signature is valid, false otherwise, or throws JsError if verification fails.
*/
export function ed25519_verify(verifying_key: Uint8Array, message: Uint8Array, signature: Uint8Array): boolean;
/**
* WASM-exposed function to validate and copy Ed25519 signing key bytes.
* - `bytes`: 32 bytes of signing key material to validate
* Returns the same 32 bytes if valid or throws JsError if invalid.
*/
export function ed25519_signing_key_from_bytes(bytes: Uint8Array): Uint8Array;
/**
* WASM-exposed function to derive the public key from an Ed25519 signing key.
* - `signing_key`: 32 bytes of signing key material
* Returns 32 bytes of public key material or throws JsError if key is invalid.
*/
export function ed25519_signing_key_to_public(signing_key: Uint8Array): Uint8Array;
/**
* WASM-exposed function to sign a message with an Ed25519 signing key.
* - `signing_key`: 32 bytes of signing key material
* - `message`: Raw bytes to sign
* Returns 64 bytes of signature material or throws JsError if signing fails.
*/
export function ed25519_signing_key_sign(signing_key: Uint8Array, message: Uint8Array): Uint8Array;
/**
* WASM-exposed function to validate and copy Ed25519 verifying key bytes.
* - `bytes`: 32 bytes of verifying key material to validate
* Returns the same 32 bytes if valid or throws JsError if invalid.
*/
export function ed25519_verifying_key_from_bytes(bytes: Uint8Array): Uint8Array;
/**
* WASM-exposed function to validate and copy Ed25519 signature bytes.
* - `bytes`: 64 bytes of signature material to validate
* Returns the same 64 bytes if valid or throws JsError if invalid.
*/
export function ed25519_signature_from_bytes(bytes: Uint8Array): Uint8Array;
/**
* WASM-exposed function to sign a message using Ed25519.
* - `message`: Raw bytes to sign
* - `secret`: Raw Ed25519 signing key bytes
* Returns base58-encoded signature with "signature_z" prefix or throws JsError if signing fails.
*/
export function sign(message: Uint8Array, secret: Uint8Array): string;
/**
* WASM-exposed function to verify an Ed25519 signature.
* - `signature`: Raw signature bytes
* - `message`: Raw bytes that were signed
* - `id`: Raw Ed25519 verifying key bytes
* Returns true if signature is valid, false otherwise, or throws JsError if verification fails.
*/
export function verify(signature: Uint8Array, message: Uint8Array, id: Uint8Array): boolean;
/**
* WASM-exposed function to derive a signer ID from a signing key.
* - `secret`: Raw Ed25519 signing key bytes
* Returns base58-encoded verifying key with "signer_z" prefix or throws JsError if derivation fails.
*/
export function get_signer_id(secret: Uint8Array): string;
/**
* Generate a 24-byte nonce from input material using BLAKE3.
* - `nonce_material`: Raw bytes to derive the nonce from
* Returns 24 bytes suitable for use as a nonce in cryptographic operations.
* This function is deterministic - the same input will produce the same nonce.
*/
export function generate_nonce(nonce_material: Uint8Array): Uint8Array;
/**
* Hash data once using BLAKE3.
* - `data`: Raw bytes to hash
* Returns 32 bytes of hash output.
* This is the simplest way to compute a BLAKE3 hash of a single piece of data.
*/
export function blake3_hash_once(data: Uint8Array): Uint8Array;
/**
* Hash data once using BLAKE3 with a context prefix.
* - `data`: Raw bytes to hash
* - `context`: Context bytes to prefix to the data
* Returns 32 bytes of hash output.
* This is useful for domain separation - the same data hashed with different contexts will produce different outputs.
*/
export function blake3_hash_once_with_context(data: Uint8Array, context: Uint8Array): Uint8Array;
/**
* Get an empty BLAKE3 state for incremental hashing.
* Returns a new Blake3Hasher instance for incremental hashing.
*/
export function blake3_empty_state(): Blake3Hasher;
/**
* Update a BLAKE3 state with new data for incremental hashing.
* - `state`: Current Blake3Hasher instance
* - `data`: New data to incorporate into the hash
* Returns the updated Blake3Hasher.
*/
export function blake3_update_state(state: Blake3Hasher, data: Uint8Array): void;
/**
* Get the final hash from a BLAKE3 state.
* - `state`: The Blake3Hasher to finalize
* Returns 32 bytes of hash output.
* This finalizes an incremental hashing operation.
*/
export function blake3_digest_for_state(state: Blake3Hasher): Uint8Array;
/**
* Generate a new X25519 private key using secure random number generation.
* Returns 32 bytes of raw key material suitable for use with other X25519 functions.
* This key can be reused for multiple Diffie-Hellman exchanges.
*/
export function new_x25519_private_key(): Uint8Array;
/**
* WASM-exposed function to derive an X25519 public key from a private key.
* - `private_key`: 32 bytes of private key material
* Returns 32 bytes of public key material or throws JsError if key is invalid.
*/
export function x25519_public_key(private_key: Uint8Array): Uint8Array;
/**
* WASM-exposed function to perform X25519 Diffie-Hellman key exchange.
* - `private_key`: 32 bytes of private key material
* - `public_key`: 32 bytes of public key material
* Returns 32 bytes of shared secret material or throws JsError if key exchange fails.
*/
export function x25519_diffie_hellman(private_key: Uint8Array, public_key: Uint8Array): Uint8Array;
/**
* WASM-exposed function to derive a sealer ID from a sealer secret.
* - `secret`: Raw bytes of the sealer secret
* Returns a base58-encoded sealer ID with "sealer_z" prefix or throws JsError if derivation fails.
*/
export function get_sealer_id(secret: Uint8Array): string;
/**
* WASM-exposed function for sealing a message using X25519 + XSalsa20-Poly1305.
* Provides authenticated encryption with perfect forward secrecy.
* - `message`: Raw bytes to seal
* - `sender_secret`: Base58-encoded sender's private key with "sealerSecret_z" prefix
* - `recipient_id`: Base58-encoded recipient's public key with "sealer_z" prefix
* - `nonce_material`: Raw bytes used to generate the nonce
* Returns sealed bytes or throws JsError if sealing fails.
*/
export function seal(message: Uint8Array, sender_secret: string, recipient_id: string, nonce_material: Uint8Array): Uint8Array;
/**
* WASM-exposed function for unsealing a message using X25519 + XSalsa20-Poly1305.
* Provides authenticated decryption with perfect forward secrecy.
* - `sealed_message`: The sealed bytes to decrypt
* - `recipient_secret`: Base58-encoded recipient's private key with "sealerSecret_z" prefix
* - `sender_id`: Base58-encoded sender's public key with "sealer_z" prefix
* - `nonce_material`: Raw bytes used to generate the nonce (must match sealing)
* Returns unsealed bytes or throws JsError if unsealing fails.
*/
export function unseal(sealed_message: Uint8Array, recipient_secret: string, sender_id: string, nonce_material: Uint8Array): Uint8Array;
/**
* WASM-exposed function to encrypt bytes with a key secret and nonce material.
* - `value`: The raw bytes to encrypt
* - `key_secret`: A base58-encoded key secret with "keySecret_z" prefix
* - `nonce_material`: Raw bytes used to generate the nonce
* Returns the encrypted bytes or throws a JsError if encryption fails.
*/
export function encrypt(value: Uint8Array, key_secret: string, nonce_material: Uint8Array): Uint8Array;
/**
* WASM-exposed function to decrypt bytes with a key secret and nonce material.
* - `ciphertext`: The encrypted bytes to decrypt
* - `key_secret`: A base58-encoded key secret with "keySecret_z" prefix
* - `nonce_material`: Raw bytes used to generate the nonce (must match encryption)
* Returns the decrypted bytes or throws a JsError if decryption fails.
*/
export function decrypt(ciphertext: Uint8Array, key_secret: string, nonce_material: Uint8Array): Uint8Array;
export class Blake3Hasher {
free(): void;
constructor();
update(data: Uint8Array): void;
finalize(): Uint8Array;
clone(): Blake3Hasher;
}
export class SessionLog {
free(): void;
constructor(co_id: string, session_id: string, signer_id?: string | null);
clone(): SessionLog;
tryAdd(transactions_json: string[], new_signature_str: string, skip_verify: boolean): void;
addNewPrivateTransaction(changes_json: string, signer_secret: string, encryption_key: string, key_id: string, made_at: number): string;
addNewTrustingTransaction(changes_json: string, signer_secret: string, made_at: number): string;
decryptNextTransactionChangesJson(tx_index: number, encryption_key: string): string;
}
export type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembly.Module;
export interface InitOutput {
readonly memory: WebAssembly.Memory;
readonly decrypt_xsalsa20: (a: number, b: number, c: number, d: number, e: number, f: number) => [number, number, number, number];
readonly encrypt_xsalsa20: (a: number, b: number, c: number, d: number, e: number, f: number) => [number, number, number, number];
readonly __wbg_sessionlog_free: (a: number, b: number) => void;
readonly sessionlog_new: (a: number, b: number, c: number, d: number, e: number, f: number) => number;
readonly sessionlog_clone: (a: number) => number;
readonly sessionlog_tryAdd: (a: number, b: number, c: number, d: number, e: number, f: number) => [number, number];
readonly sessionlog_addNewPrivateTransaction: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number, j: number) => [number, number, number, number];
readonly sessionlog_addNewTrustingTransaction: (a: number, b: number, c: number, d: number, e: number, f: number) => [number, number, number, number];
readonly sessionlog_decryptNextTransactionChangesJson: (a: number, b: number, c: number, d: number) => [number, number, number, number];
readonly new_ed25519_signing_key: () => [number, number];
readonly ed25519_sign: (a: number, b: number, c: number, d: number) => [number, number, number, number];
readonly ed25519_verify: (a: number, b: number, c: number, d: number, e: number, f: number) => [number, number, number];
readonly ed25519_signing_key_from_bytes: (a: number, b: number) => [number, number, number, number];
readonly ed25519_signing_key_to_public: (a: number, b: number) => [number, number, number, number];
readonly ed25519_verifying_key_from_bytes: (a: number, b: number) => [number, number, number, number];
readonly ed25519_signature_from_bytes: (a: number, b: number) => [number, number, number, number];
readonly ed25519_verifying_key: (a: number, b: number) => [number, number, number, number];
readonly ed25519_signing_key_sign: (a: number, b: number, c: number, d: number) => [number, number, number, number];
readonly sign: (a: number, b: number, c: number, d: number) => [number, number, number, number];
readonly verify: (a: number, b: number, c: number, d: number, e: number, f: number) => [number, number, number];
readonly get_signer_id: (a: number, b: number) => [number, number, number, number];
readonly generate_nonce: (a: number, b: number) => [number, number];
readonly blake3_hash_once: (a: number, b: number) => [number, number];
readonly blake3_hash_once_with_context: (a: number, b: number, c: number, d: number) => [number, number];
readonly __wbg_blake3hasher_free: (a: number, b: number) => void;
readonly blake3hasher_finalize: (a: number) => [number, number];
readonly blake3hasher_clone: (a: number) => number;
readonly blake3_empty_state: () => number;
readonly blake3_update_state: (a: number, b: number, c: number) => void;
readonly blake3_digest_for_state: (a: number) => [number, number];
readonly blake3hasher_update: (a: number, b: number, c: number) => void;
readonly blake3hasher_new: () => number;
readonly new_x25519_private_key: () => [number, number];
readonly x25519_public_key: (a: number, b: number) => [number, number, number, number];
readonly x25519_diffie_hellman: (a: number, b: number, c: number, d: number) => [number, number, number, number];
readonly get_sealer_id: (a: number, b: number) => [number, number, number, number];
readonly seal: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number) => [number, number, number, number];
readonly unseal: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number) => [number, number, number, number];
readonly encrypt: (a: number, b: number, c: number, d: number, e: number, f: number) => [number, number, number, number];
readonly decrypt: (a: number, b: number, c: number, d: number, e: number, f: number) => [number, number, number, number];
readonly __wbindgen_malloc: (a: number, b: number) => number;
readonly __wbindgen_realloc: (a: number, b: number, c: number, d: number) => number;
readonly __wbindgen_exn_store: (a: number) => void;
readonly __externref_table_alloc: () => number;
readonly __wbindgen_export_4: WebAssembly.Table;
readonly __externref_table_dealloc: (a: number) => void;
readonly __wbindgen_free: (a: number, b: number, c: number) => void;
readonly __wbindgen_start: () => void;
}
export type SyncInitInput = BufferSource | WebAssembly.Module;
/**
* Instantiates the given `module`, which can either be bytes or
* a precompiled `WebAssembly.Module`.
*
* @param {{ module: SyncInitInput }} module - Passing `SyncInitInput` directly is deprecated.
*
* @returns {InitOutput}
*/
export function initSync(module: { module: SyncInitInput } | SyncInitInput): InitOutput;
/**
* If `module_or_path` is {RequestInfo} or {URL}, makes a request and
* for everything else, calls `WebAssembly.instantiate` directly.
*
* @param {{ module_or_path: InitInput | Promise<InitInput> }} module_or_path - Passing `InitInput` directly is deprecated.
*
* @returns {Promise<InitOutput>}
*/
export default function __wbg_init (module_or_path?: { module_or_path: InitInput | Promise<InitInput> } | InitInput | Promise<InitInput>): Promise<InitOutput>;

File diff suppressed because it is too large Load Diff

View File

@@ -1 +0,0 @@
export const data: string;

File diff suppressed because one or more lines are too long

View File

@@ -1,240 +0,0 @@
use crate::error::CryptoError;
use ed25519_dalek::{Signer, SigningKey, Verifier, VerifyingKey};
use rand::rngs::OsRng;
use wasm_bindgen::prelude::*;
/// Generate a new Ed25519 signing key using secure random number generation.
/// Returns 32 bytes of raw key material suitable for use with other Ed25519 functions.
#[wasm_bindgen]
pub fn new_ed25519_signing_key() -> Box<[u8]> {
let mut rng = OsRng;
let signing_key = SigningKey::generate(&mut rng);
signing_key.to_bytes().into()
}
/// Internal function to derive an Ed25519 verifying key from a signing key.
/// Takes 32 bytes of signing key material and returns 32 bytes of verifying key material.
/// Returns CryptoError if the key length is invalid.
pub(crate) fn ed25519_verifying_key_internal(signing_key: &[u8]) -> Result<Box<[u8]>, CryptoError> {
let key_bytes: [u8; 32] = signing_key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, signing_key.len()))?;
let signing_key = SigningKey::from_bytes(&key_bytes);
Ok(signing_key.verifying_key().to_bytes().into())
}
/// WASM-exposed function to derive an Ed25519 verifying key from a signing key.
/// - `signing_key`: 32 bytes of signing key material
/// Returns 32 bytes of verifying key material or throws JsError if key is invalid.
#[wasm_bindgen]
pub fn ed25519_verifying_key(signing_key: &[u8]) -> Result<Box<[u8]>, JsError> {
ed25519_verifying_key_internal(signing_key).map_err(|e| JsError::new(&e.to_string()))
}
/// Internal function to sign a message using Ed25519.
/// Takes 32 bytes of signing key material and arbitrary message bytes.
/// Returns 64 bytes of signature material or CryptoError if key is invalid.
pub(crate) fn ed25519_sign_internal(
signing_key: &[u8],
message: &[u8],
) -> Result<[u8; 64], CryptoError> {
let key_bytes: [u8; 32] = signing_key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, signing_key.len()))?;
let signing_key = SigningKey::from_bytes(&key_bytes);
Ok(signing_key.sign(message).to_bytes())
}
/// WASM-exposed function to sign a message using Ed25519.
/// - `signing_key`: 32 bytes of signing key material
/// - `message`: Raw bytes to sign
/// Returns 64 bytes of signature material or throws JsError if signing fails.
#[wasm_bindgen]
pub fn ed25519_sign(signing_key: &[u8], message: &[u8]) -> Result<Box<[u8]>, JsError> {
Ok(ed25519_sign_internal(signing_key, message)?.into())
}
/// Internal function to verify an Ed25519 signature.
/// - `verifying_key`: 32 bytes of verifying key material
/// - `message`: Raw bytes that were signed
/// - `signature`: 64 bytes of signature material
/// Returns true if signature is valid, false otherwise, or CryptoError if key/signature format is invalid.
pub(crate) fn ed25519_verify_internal(
verifying_key: &[u8],
message: &[u8],
signature: &[u8],
) -> Result<bool, CryptoError> {
let key_bytes: [u8; 32] = verifying_key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, verifying_key.len()))?;
let verifying_key = VerifyingKey::from_bytes(&key_bytes)
.map_err(|e| CryptoError::InvalidVerifyingKey(e.to_string()))?;
let sig_bytes: [u8; 64] = signature
.try_into()
.map_err(|_| CryptoError::InvalidSignatureLength)?;
let signature = ed25519_dalek::Signature::from_bytes(&sig_bytes);
Ok(verifying_key.verify(message, &signature).is_ok())
}
/// WASM-exposed function to verify an Ed25519 signature.
/// - `verifying_key`: 32 bytes of verifying key material
/// - `message`: Raw bytes that were signed
/// - `signature`: 64 bytes of signature material
/// Returns true if signature is valid, false otherwise, or throws JsError if verification fails.
#[wasm_bindgen]
pub fn ed25519_verify(
verifying_key: &[u8],
message: &[u8],
signature: &[u8],
) -> Result<bool, JsError> {
ed25519_verify_internal(verifying_key, message, signature)
.map_err(|e| JsError::new(&e.to_string()))
}
/// WASM-exposed function to validate and copy Ed25519 signing key bytes.
/// - `bytes`: 32 bytes of signing key material to validate
/// Returns the same 32 bytes if valid or throws JsError if invalid.
#[wasm_bindgen]
pub fn ed25519_signing_key_from_bytes(bytes: &[u8]) -> Result<Box<[u8]>, JsError> {
let key_bytes: [u8; 32] = bytes
.try_into()
.map_err(|_| JsError::new("Invalid signing key length"))?;
Ok(key_bytes.into())
}
/// WASM-exposed function to derive the public key from an Ed25519 signing key.
/// - `signing_key`: 32 bytes of signing key material
/// Returns 32 bytes of public key material or throws JsError if key is invalid.
#[wasm_bindgen]
pub fn ed25519_signing_key_to_public(signing_key: &[u8]) -> Result<Box<[u8]>, JsError> {
ed25519_verifying_key_internal(signing_key).map_err(|e| JsError::new(&e.to_string()))
}
/// WASM-exposed function to sign a message with an Ed25519 signing key.
/// - `signing_key`: 32 bytes of signing key material
/// - `message`: Raw bytes to sign
/// Returns 64 bytes of signature material or throws JsError if signing fails.
#[wasm_bindgen]
pub fn ed25519_signing_key_sign(signing_key: &[u8], message: &[u8]) -> Result<Box<[u8]>, JsError> {
Ok(ed25519_sign_internal(signing_key, message)?.into())
}
/// WASM-exposed function to validate and copy Ed25519 verifying key bytes.
/// - `bytes`: 32 bytes of verifying key material to validate
/// Returns the same 32 bytes if valid or throws JsError if invalid.
#[wasm_bindgen]
pub fn ed25519_verifying_key_from_bytes(bytes: &[u8]) -> Result<Box<[u8]>, JsError> {
let key_bytes: [u8; 32] = bytes
.try_into()
.map_err(|_| JsError::new("Invalid verifying key length"))?;
Ok(key_bytes.into())
}
/// WASM-exposed function to validate and copy Ed25519 signature bytes.
/// - `bytes`: 64 bytes of signature material to validate
/// Returns the same 64 bytes if valid or throws JsError if invalid.
#[wasm_bindgen]
pub fn ed25519_signature_from_bytes(bytes: &[u8]) -> Result<Box<[u8]>, JsError> {
let sig_bytes: [u8; 64] = bytes
.try_into()
.map_err(|_| JsError::new("Invalid signature length"))?;
Ok(sig_bytes.into())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_ed25519_key_generation_and_signing() {
// Test key generation
let signing_key = new_ed25519_signing_key();
assert_eq!(signing_key.len(), 32, "Signing key should be 32 bytes");
// Test verifying key derivation
let verifying_key = ed25519_verifying_key_internal(&signing_key).unwrap();
assert_eq!(verifying_key.len(), 32, "Verifying key should be 32 bytes");
// Test that different signing keys produce different verifying keys
let signing_key2 = new_ed25519_signing_key();
let verifying_key2 = ed25519_verifying_key_internal(&signing_key2).unwrap();
assert_ne!(
verifying_key, verifying_key2,
"Different signing keys should produce different verifying keys"
);
// Test signing and verification
let message = b"Test message";
let signature = ed25519_sign_internal(&signing_key, message).unwrap();
assert_eq!(signature.len(), 64, "Signature should be 64 bytes");
// Test successful verification
let verification_result =
ed25519_verify_internal(&verifying_key, message, &signature).unwrap();
assert!(
verification_result,
"Valid signature should verify successfully"
);
// Test verification with wrong message
let wrong_message = b"Wrong message";
let wrong_verification =
ed25519_verify_internal(&verifying_key, wrong_message, &signature).unwrap();
assert!(
!wrong_verification,
"Signature should not verify with wrong message"
);
// Test verification with wrong key
let wrong_verification =
ed25519_verify_internal(&verifying_key2, message, &signature).unwrap();
assert!(
!wrong_verification,
"Signature should not verify with wrong key"
);
// Test verification with tampered signature
let mut tampered_signature = signature.clone();
tampered_signature[0] ^= 1;
let wrong_verification =
ed25519_verify_internal(&verifying_key, message, &tampered_signature).unwrap();
assert!(!wrong_verification, "Tampered signature should not verify");
}
#[test]
fn test_ed25519_error_cases() {
// Test invalid signing key length
let invalid_signing_key = vec![0u8; 31]; // Too short
let result = ed25519_verifying_key_internal(&invalid_signing_key);
assert!(result.is_err());
let result = ed25519_sign_internal(&invalid_signing_key, b"test");
assert!(result.is_err());
// Test invalid verifying key length
let invalid_verifying_key = vec![0u8; 31]; // Too short
let valid_signing_key = new_ed25519_signing_key();
let valid_signature = ed25519_sign_internal(&valid_signing_key, b"test").unwrap();
let result = ed25519_verify_internal(&invalid_verifying_key, b"test", &valid_signature);
assert!(result.is_err());
// Test invalid signature length
let valid_verifying_key = ed25519_verifying_key_internal(&valid_signing_key).unwrap();
let invalid_signature = vec![0u8; 63]; // Too short
let result = ed25519_verify_internal(&valid_verifying_key, b"test", &invalid_signature);
assert!(result.is_err());
// Test with too long keys
let too_long_key = vec![0u8; 33]; // Too long
let result = ed25519_verifying_key_internal(&too_long_key);
assert!(result.is_err());
let result = ed25519_sign_internal(&too_long_key, b"test");
assert!(result.is_err());
// Test with too long signature
let too_long_signature = vec![0u8; 65]; // Too long
let result = ed25519_verify_internal(&valid_verifying_key, b"test", &too_long_signature);
assert!(result.is_err());
}
}

View File

@@ -1,113 +0,0 @@
use crate::error::CryptoError;
use crate::hash::blake3::generate_nonce;
use bs58;
use wasm_bindgen::prelude::*;
/// Internal function to encrypt bytes with a key secret and nonce material.
/// Takes a base58-encoded key secret with "keySecret_z" prefix and raw nonce material.
/// Returns the encrypted bytes or a CryptoError if the key format is invalid.
pub fn encrypt_internal(
plaintext: &[u8],
key_secret: &str,
nonce_material: &[u8],
) -> Result<Box<[u8]>, CryptoError> {
// Decode the base58 key secret (removing the "keySecret_z" prefix)
let key_secret = key_secret
.strip_prefix("keySecret_z")
.ok_or(CryptoError::InvalidPrefix("key secret", "keySecret_z"))?;
let key = bs58::decode(key_secret)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
// Generate nonce from nonce material
let nonce = generate_nonce(nonce_material);
// Encrypt using XSalsa20
Ok(super::xsalsa20::encrypt_xsalsa20_raw_internal(&key, &nonce, plaintext)?.into())
}
/// Internal function to decrypt bytes with a key secret and nonce material.
/// Takes a base58-encoded key secret with "keySecret_z" prefix and raw nonce material.
/// Returns the decrypted bytes or a CryptoError if the key format is invalid.
pub fn decrypt_internal(
ciphertext: &[u8],
key_secret: &str,
nonce_material: &[u8],
) -> Result<Box<[u8]>, CryptoError> {
// Decode the base58 key secret (removing the "keySecret_z" prefix)
let key_secret = key_secret
.strip_prefix("keySecret_z")
.ok_or(CryptoError::InvalidPrefix("key secret", "keySecret_z"))?;
let key = bs58::decode(key_secret)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
// Generate nonce from nonce material
let nonce = generate_nonce(nonce_material);
// Decrypt using XSalsa20
Ok(super::xsalsa20::decrypt_xsalsa20_raw_internal(&key, &nonce, ciphertext)?.into())
}
/// WASM-exposed function to encrypt bytes with a key secret and nonce material.
/// - `value`: The raw bytes to encrypt
/// - `key_secret`: A base58-encoded key secret with "keySecret_z" prefix
/// - `nonce_material`: Raw bytes used to generate the nonce
/// Returns the encrypted bytes or throws a JsError if encryption fails.
#[wasm_bindgen(js_name = encrypt)]
pub fn encrypt(
value: &[u8],
key_secret: &str,
nonce_material: &[u8],
) -> Result<Box<[u8]>, JsError> {
encrypt_internal(value, key_secret, nonce_material).map_err(|e| JsError::new(&e.to_string()))
}
/// WASM-exposed function to decrypt bytes with a key secret and nonce material.
/// - `ciphertext`: The encrypted bytes to decrypt
/// - `key_secret`: A base58-encoded key secret with "keySecret_z" prefix
/// - `nonce_material`: Raw bytes used to generate the nonce (must match encryption)
/// Returns the decrypted bytes or throws a JsError if decryption fails.
#[wasm_bindgen(js_name = decrypt)]
pub fn decrypt(
ciphertext: &[u8],
key_secret: &str,
nonce_material: &[u8],
) -> Result<Box<[u8]>, JsError> {
Ok(decrypt_internal(ciphertext, key_secret, nonce_material)?.into())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_encrypt_decrypt() {
// Test data
let plaintext = b"Hello, World!";
let key_secret = "keySecret_z11111111111111111111111111111111"; // Example base58 encoded key
let nonce_material = b"test_nonce_material";
// Test encryption
let ciphertext = encrypt_internal(plaintext, key_secret, nonce_material).unwrap();
assert!(!ciphertext.is_empty());
// Test decryption
let decrypted = decrypt_internal(&ciphertext, key_secret, nonce_material).unwrap();
assert_eq!(&*decrypted, plaintext);
}
#[test]
fn test_invalid_key_secret() {
let plaintext = b"test";
let nonce_material = b"nonce";
// Test with invalid key secret format
let result = encrypt_internal(plaintext, "invalid_key", nonce_material);
assert!(result.is_err());
// Test with invalid base58 encoding
let result = encrypt_internal(plaintext, "keySecret_z!!!!", nonce_material);
assert!(result.is_err());
}
}

View File

@@ -1,200 +0,0 @@
use crate::crypto::x25519::x25519_diffie_hellman_internal;
use crate::crypto::xsalsa20::{decrypt_xsalsa20_poly1305, encrypt_xsalsa20_poly1305};
use crate::error::CryptoError;
use crate::hash::blake3::generate_nonce;
use bs58;
use wasm_bindgen::prelude::*;
/// Internal function to seal a message using X25519 + XSalsa20-Poly1305.
/// - `message`: Raw bytes to seal
/// - `sender_secret`: Base58-encoded sender's private key with "sealerSecret_z" prefix
/// - `recipient_id`: Base58-encoded recipient's public key with "sealer_z" prefix
/// - `nonce_material`: Raw bytes used to generate the nonce
/// Returns sealed bytes or CryptoError if key formats are invalid.
///
/// The sealing process:
/// 1. Decode base58 keys and validate prefixes
/// 2. Generate shared secret using X25519 key exchange
/// 3. Generate nonce from nonce material using BLAKE3
/// 4. Encrypt message using XSalsa20-Poly1305 with the shared secret
pub fn seal_internal(
message: &[u8],
sender_secret: &str,
recipient_id: &str,
nonce_material: &[u8],
) -> Result<Vec<u8>, CryptoError> {
// Decode the base58 sender secret (removing the "sealerSecret_z" prefix)
let sender_secret =
sender_secret
.strip_prefix("sealerSecret_z")
.ok_or(CryptoError::InvalidPrefix(
"sealer secret",
"sealerSecret_z",
))?;
let sender_private_key = bs58::decode(sender_secret)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
// Decode the base58 recipient ID (removing the "sealer_z" prefix)
let recipient_id = recipient_id
.strip_prefix("sealer_z")
.ok_or(CryptoError::InvalidPrefix("sealer ID", "sealer_z"))?;
let recipient_public_key = bs58::decode(recipient_id)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
let nonce = generate_nonce(nonce_material);
// Generate shared secret using X25519
let shared_secret = x25519_diffie_hellman_internal(&sender_private_key, &recipient_public_key)?;
// Encrypt message using XSalsa20-Poly1305
Ok(encrypt_xsalsa20_poly1305(&shared_secret, &nonce, message)?.into())
}
/// Internal function to unseal a message using X25519 + XSalsa20-Poly1305.
/// - `sealed_message`: The sealed bytes to decrypt
/// - `recipient_secret`: Base58-encoded recipient's private key with "sealerSecret_z" prefix
/// - `sender_id`: Base58-encoded sender's public key with "sealer_z" prefix
/// - `nonce_material`: Raw bytes used to generate the nonce (must match sealing)
/// Returns unsealed bytes or CryptoError if key formats are invalid or authentication fails.
///
/// The unsealing process:
/// 1. Decode base58 keys and validate prefixes
/// 2. Generate shared secret using X25519 key exchange
/// 3. Generate nonce from nonce material using BLAKE3
/// 4. Decrypt and authenticate message using XSalsa20-Poly1305 with the shared secret
fn unseal_internal(
sealed_message: &[u8],
recipient_secret: &str,
sender_id: &str,
nonce_material: &[u8],
) -> Result<Box<[u8]>, CryptoError> {
// Decode the base58 recipient secret (removing the "sealerSecret_z" prefix)
let recipient_secret =
recipient_secret
.strip_prefix("sealerSecret_z")
.ok_or(CryptoError::InvalidPrefix(
"sealer secret",
"sealerSecret_z",
))?;
let recipient_private_key = bs58::decode(recipient_secret)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
// Decode the base58 sender ID (removing the "sealer_z" prefix)
let sender_id = sender_id
.strip_prefix("sealer_z")
.ok_or(CryptoError::InvalidPrefix("sealer ID", "sealer_z"))?;
let sender_public_key = bs58::decode(sender_id)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
let nonce = generate_nonce(nonce_material);
// Generate shared secret using X25519
let shared_secret = x25519_diffie_hellman_internal(&recipient_private_key, &sender_public_key)?;
// Decrypt message using XSalsa20-Poly1305
Ok(decrypt_xsalsa20_poly1305(&shared_secret, &nonce, sealed_message)?.into())
}
/// WASM-exposed function for sealing a message using X25519 + XSalsa20-Poly1305.
/// Provides authenticated encryption with perfect forward secrecy.
/// - `message`: Raw bytes to seal
/// - `sender_secret`: Base58-encoded sender's private key with "sealerSecret_z" prefix
/// - `recipient_id`: Base58-encoded recipient's public key with "sealer_z" prefix
/// - `nonce_material`: Raw bytes used to generate the nonce
/// Returns sealed bytes or throws JsError if sealing fails.
#[wasm_bindgen(js_name = seal)]
pub fn seal(
message: &[u8],
sender_secret: &str,
recipient_id: &str,
nonce_material: &[u8],
) -> Result<Box<[u8]>, JsError> {
Ok(seal_internal(message, sender_secret, recipient_id, nonce_material)?.into())
}
/// WASM-exposed function for unsealing a message using X25519 + XSalsa20-Poly1305.
/// Provides authenticated decryption with perfect forward secrecy.
/// - `sealed_message`: The sealed bytes to decrypt
/// - `recipient_secret`: Base58-encoded recipient's private key with "sealerSecret_z" prefix
/// - `sender_id`: Base58-encoded sender's public key with "sealer_z" prefix
/// - `nonce_material`: Raw bytes used to generate the nonce (must match sealing)
/// Returns unsealed bytes or throws JsError if unsealing fails.
#[wasm_bindgen(js_name = unseal)]
pub fn unseal(
sealed_message: &[u8],
recipient_secret: &str,
sender_id: &str,
nonce_material: &[u8],
) -> Result<Box<[u8]>, JsError> {
Ok(unseal_internal(sealed_message, recipient_secret, sender_id, nonce_material)?.into())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::crypto::x25519::{new_x25519_private_key, x25519_public_key_internal};
#[test]
fn test_seal_unseal() {
// Generate real keys
let sender_private = new_x25519_private_key();
let sender_public = x25519_public_key_internal(&sender_private).unwrap();
// Encode keys with proper prefixes
let sender_secret = format!(
"sealerSecret_z{}",
bs58::encode(&sender_private).into_string()
);
let recipient_id = format!("sealer_z{}", bs58::encode(&sender_public).into_string());
// Test data
let message = b"Secret message";
let nonce_material = b"test_nonce_material";
// Test sealing
let sealed = seal_internal(message, &sender_secret, &recipient_id, nonce_material).unwrap();
assert!(!sealed.is_empty());
// Test unsealing (using same keys since it's a test)
let unsealed =
unseal_internal(&sealed, &sender_secret, &recipient_id, nonce_material).unwrap();
assert_eq!(&*unsealed, message);
}
#[test]
fn test_invalid_keys() {
let message = b"test";
let nonce_material = b"nonce";
// Test with invalid sender secret format
let result = seal_internal(
message,
"invalid_key",
"sealer_z22222222222222222222222222222222",
nonce_material,
);
assert!(result.is_err());
// Test with invalid recipient ID format
let result = seal_internal(
message,
"sealerSecret_z11111111111111111111111111111111",
"invalid_key",
nonce_material,
);
assert!(result.is_err());
// Test with invalid base58 encoding
let result = seal_internal(
message,
"sealerSecret_z!!!!",
"sealer_z22222222222222222222222222222222",
nonce_material,
);
assert!(result.is_err());
}
}

View File

@@ -1,184 +0,0 @@
use crate::crypto::ed25519::{
ed25519_sign_internal, ed25519_verify_internal, ed25519_verifying_key_internal,
};
use crate::error::CryptoError;
use bs58;
use wasm_bindgen::prelude::*;
/// Internal function to sign a message using Ed25519.
/// - `message`: Raw bytes to sign
/// - `secret`: Base58-encoded signing key with "signerSecret_z" prefix
/// Returns base58-encoded signature with "signature_z" prefix or error string.
pub fn sign_internal(message: &[u8], secret: &str) -> Result<String, CryptoError> {
let secret_bytes = bs58::decode(secret.strip_prefix("signerSecret_z").ok_or(
CryptoError::InvalidPrefix("signer secret", "signerSecret_z"),
)?)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
let signature = ed25519_sign_internal(&secret_bytes, message)
.map_err(|e| CryptoError::InvalidVerifyingKey(e.to_string()))?;
Ok(format!(
"signature_z{}",
bs58::encode(signature).into_string()
))
}
/// Internal function to verify an Ed25519 signature.
/// - `signature`: Base58-encoded signature with "signature_z" prefix
/// - `message`: Raw bytes that were signed
/// - `id`: Base58-encoded verifying key with "signer_z" prefix
/// Returns true if signature is valid, false otherwise, or error string if formats are invalid.
pub fn verify_internal(signature: &str, message: &[u8], id: &str) -> Result<bool, CryptoError> {
let signature_bytes = bs58::decode(
signature
.strip_prefix("signature_z")
.ok_or(CryptoError::InvalidPrefix("signature_z", "signature"))?,
)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
let verifying_key = bs58::decode(
id.strip_prefix("signer_z")
.ok_or(CryptoError::InvalidPrefix("signer_z", "signer ID"))?,
)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
ed25519_verify_internal(&verifying_key, message, &signature_bytes)
.map_err(|e| CryptoError::InvalidVerifyingKey(e.to_string()))
}
/// Internal function to derive a signer ID from a signing key.
/// - `secret`: Base58-encoded signing key with "signerSecret_z" prefix
/// Returns base58-encoded verifying key with "signer_z" prefix or error string.
pub fn get_signer_id_internal(secret: &str) -> Result<String, CryptoError> {
let secret_bytes = bs58::decode(secret.strip_prefix("signerSecret_z").ok_or(
CryptoError::InvalidPrefix("signerSecret_z", "signer secret"),
)?)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
let verifying_key = ed25519_verifying_key_internal(&secret_bytes)
.map_err(|e| CryptoError::InvalidVerifyingKey(e.to_string()))?;
Ok(format!(
"signer_z{}",
bs58::encode(verifying_key).into_string()
))
}
/// WASM-exposed function to sign a message using Ed25519.
/// - `message`: Raw bytes to sign
/// - `secret`: Raw Ed25519 signing key bytes
/// Returns base58-encoded signature with "signature_z" prefix or throws JsError if signing fails.
#[wasm_bindgen(js_name = sign)]
pub fn sign(message: &[u8], secret: &[u8]) -> Result<String, JsError> {
let secret_str = std::str::from_utf8(secret)
.map_err(|e| JsError::new(&format!("Invalid UTF-8 in secret: {:?}", e)))?;
sign_internal(message, secret_str).map_err(|e| JsError::new(&e.to_string()))
}
/// WASM-exposed function to verify an Ed25519 signature.
/// - `signature`: Raw signature bytes
/// - `message`: Raw bytes that were signed
/// - `id`: Raw Ed25519 verifying key bytes
/// Returns true if signature is valid, false otherwise, or throws JsError if verification fails.
#[wasm_bindgen(js_name = verify)]
pub fn verify(signature: &[u8], message: &[u8], id: &[u8]) -> Result<bool, JsError> {
let signature_str = std::str::from_utf8(signature)
.map_err(|e| JsError::new(&format!("Invalid UTF-8 in signature: {:?}", e)))?;
let id_str = std::str::from_utf8(id)
.map_err(|e| JsError::new(&format!("Invalid UTF-8 in id: {:?}", e)))?;
verify_internal(signature_str, message, id_str).map_err(|e| JsError::new(&e.to_string()))
}
/// WASM-exposed function to derive a signer ID from a signing key.
/// - `secret`: Raw Ed25519 signing key bytes
/// Returns base58-encoded verifying key with "signer_z" prefix or throws JsError if derivation fails.
#[wasm_bindgen(js_name = get_signer_id)]
pub fn get_signer_id(secret: &[u8]) -> Result<String, JsError> {
let secret_str = std::str::from_utf8(secret)
.map_err(|e| JsError::new(&format!("Invalid UTF-8 in secret: {:?}", e)))?;
get_signer_id_internal(secret_str).map_err(|e| JsError::new(&e.to_string()))
}
#[cfg(test)]
mod tests {
use super::*;
use crate::crypto::ed25519::new_ed25519_signing_key;
#[test]
fn test_sign_and_verify() {
let message = b"hello world";
// Create a test signing key
let signing_key = new_ed25519_signing_key();
let secret = format!("signerSecret_z{}", bs58::encode(&signing_key).into_string());
// Sign the message
let signature = sign_internal(message, &secret).unwrap();
// Get the public key for verification
let secret_bytes = bs58::decode(secret.strip_prefix("signerSecret_z").unwrap())
.into_vec()
.unwrap();
let verifying_key = ed25519_verifying_key_internal(&secret_bytes).unwrap();
let signer_id = format!("signer_z{}", bs58::encode(&verifying_key).into_string());
// Verify the signature
assert!(verify_internal(&signature, message, &signer_id).unwrap());
}
#[test]
fn test_invalid_inputs() {
let message = b"hello world";
// Test invalid base58 in secret
let result = sign_internal(message, "signerSecret_z!!!invalid!!!");
assert!(matches!(result, Err(CryptoError::Base58Error(_))));
// Test invalid signature format
let result = verify_internal("not_a_signature", message, "signer_z123");
assert!(matches!(
result,
Err(CryptoError::InvalidPrefix("signature_z", "signature"))
));
// Test invalid signer ID format
let result = verify_internal("signature_z123", message, "not_a_signer");
assert!(matches!(
result,
Err(CryptoError::InvalidPrefix("signer_z", "signer ID"))
));
}
#[test]
fn test_get_signer_id() {
// Create a test signing key
let signing_key = new_ed25519_signing_key();
let secret = format!("signerSecret_z{}", bs58::encode(&signing_key).into_string());
// Get signer ID
let signer_id = get_signer_id_internal(&secret).unwrap();
assert!(signer_id.starts_with("signer_z"));
// Test that same secret produces same ID
let signer_id2 = get_signer_id_internal(&secret).unwrap();
assert_eq!(signer_id, signer_id2);
// Test invalid secret format
let result = get_signer_id_internal("invalid_secret");
assert!(matches!(
result,
Err(CryptoError::InvalidPrefix(
"signerSecret_z",
"signer secret"
))
));
// Test invalid base58
let result = get_signer_id_internal("signerSecret_z!!!invalid!!!");
assert!(matches!(result, Err(CryptoError::Base58Error(_))));
}
}

View File

@@ -1,168 +0,0 @@
use crate::error::CryptoError;
use bs58;
use wasm_bindgen::prelude::*;
use x25519_dalek::{PublicKey, StaticSecret};
/// Generate a new X25519 private key using secure random number generation.
/// Returns 32 bytes of raw key material suitable for use with other X25519 functions.
/// This key can be reused for multiple Diffie-Hellman exchanges.
#[wasm_bindgen]
pub fn new_x25519_private_key() -> Vec<u8> {
let secret = StaticSecret::random();
secret.to_bytes().to_vec()
}
/// Internal function to derive an X25519 public key from a private key.
/// Takes 32 bytes of private key material and returns 32 bytes of public key material.
/// Returns CryptoError if the key length is invalid.
pub(crate) fn x25519_public_key_internal(private_key: &[u8]) -> Result<[u8; 32], CryptoError> {
let bytes: [u8; 32] = private_key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, private_key.len()))?;
let secret = StaticSecret::from(bytes);
Ok(PublicKey::from(&secret).to_bytes())
}
/// WASM-exposed function to derive an X25519 public key from a private key.
/// - `private_key`: 32 bytes of private key material
/// Returns 32 bytes of public key material or throws JsError if key is invalid.
#[wasm_bindgen]
pub fn x25519_public_key(private_key: &[u8]) -> Result<Vec<u8>, JsError> {
Ok(x25519_public_key_internal(private_key)?.to_vec())
}
/// Internal function to perform X25519 Diffie-Hellman key exchange.
/// Takes 32 bytes each of private and public key material.
/// Returns 32 bytes of shared secret material or CryptoError if key lengths are invalid.
pub(crate) fn x25519_diffie_hellman_internal(
private_key: &[u8],
public_key: &[u8],
) -> Result<[u8; 32], CryptoError> {
let private_bytes: [u8; 32] = private_key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, private_key.len()))?;
let public_bytes: [u8; 32] = public_key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, public_key.len()))?;
let secret = StaticSecret::from(private_bytes);
let public = PublicKey::from(public_bytes);
Ok(secret.diffie_hellman(&public).to_bytes())
}
/// WASM-exposed function to perform X25519 Diffie-Hellman key exchange.
/// - `private_key`: 32 bytes of private key material
/// - `public_key`: 32 bytes of public key material
/// Returns 32 bytes of shared secret material or throws JsError if key exchange fails.
#[wasm_bindgen]
pub fn x25519_diffie_hellman(private_key: &[u8], public_key: &[u8]) -> Result<Vec<u8>, JsError> {
Ok(x25519_diffie_hellman_internal(private_key, public_key)?.to_vec())
}
/// Internal function to derive a sealer ID from a sealer secret.
/// Takes a base58-encoded sealer secret with "sealerSecret_z" prefix.
/// Returns a base58-encoded sealer ID with "sealer_z" prefix or error string if format is invalid.
pub fn get_sealer_id_internal(secret: &str) -> Result<String, CryptoError> {
let private_bytes = bs58::decode(secret.strip_prefix("sealerSecret_z").ok_or(
CryptoError::InvalidPrefix("sealerSecret_z", "sealer secret"),
)?)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
let public_bytes = x25519_public_key_internal(&private_bytes)
.map_err(|e| CryptoError::InvalidPublicKey(e.to_string()))?;
Ok(format!(
"sealer_z{}",
bs58::encode(public_bytes).into_string()
))
}
/// WASM-exposed function to derive a sealer ID from a sealer secret.
/// - `secret`: Raw bytes of the sealer secret
/// Returns a base58-encoded sealer ID with "sealer_z" prefix or throws JsError if derivation fails.
#[wasm_bindgen]
pub fn get_sealer_id(secret: &[u8]) -> Result<String, JsError> {
let secret_str = std::str::from_utf8(secret)
.map_err(|e| JsError::new(&format!("Invalid UTF-8 in secret: {:?}", e)))?;
get_sealer_id_internal(secret_str).map_err(|e| JsError::new(&e.to_string()))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_x25519_key_generation() {
// Test that we get the correct length keys
let private_key = new_x25519_private_key();
assert_eq!(private_key.len(), 32);
// Test that public key generation works and produces correct length
let public_key = x25519_public_key_internal(&private_key).unwrap();
assert_eq!(public_key.len(), 32);
// Test that different private keys produce different public keys
let private_key2 = new_x25519_private_key();
let public_key2 = x25519_public_key_internal(&private_key2).unwrap();
assert_ne!(public_key, public_key2);
}
#[test]
fn test_x25519_key_exchange() {
// Generate sender's keypair
let sender_private = new_x25519_private_key();
let sender_public = x25519_public_key_internal(&sender_private).unwrap();
// Generate recipient's keypair
let recipient_private = new_x25519_private_key();
let recipient_public = x25519_public_key_internal(&recipient_private).unwrap();
// Test properties we expect from the shared secret
let shared_secret1 =
x25519_diffie_hellman_internal(&sender_private, &recipient_public).unwrap();
let shared_secret2 =
x25519_diffie_hellman_internal(&recipient_private, &sender_public).unwrap();
// Both sides should arrive at the same shared secret
assert_eq!(shared_secret1, shared_secret2);
// Shared secret should be 32 bytes
assert_eq!(shared_secret1.len(), 32);
// Different recipient should produce different shared secret
let other_recipient_private = new_x25519_private_key();
let other_recipient_public = x25519_public_key_internal(&other_recipient_private).unwrap();
let different_shared_secret =
x25519_diffie_hellman_internal(&sender_private, &other_recipient_public).unwrap();
assert_ne!(shared_secret1, different_shared_secret);
}
#[test]
fn test_get_sealer_id() {
// Create a test private key
let private_key = new_x25519_private_key();
let secret = format!("sealerSecret_z{}", bs58::encode(&private_key).into_string());
// Get sealer ID
let sealer_id = get_sealer_id_internal(&secret).unwrap();
assert!(sealer_id.starts_with("sealer_z"));
// Test that same secret produces same ID
let sealer_id2 = get_sealer_id_internal(&secret).unwrap();
assert_eq!(sealer_id, sealer_id2);
// Test invalid secret format
let result = get_sealer_id_internal("invalid_secret");
assert!(matches!(
result,
Err(CryptoError::InvalidPrefix(
"sealerSecret_z",
"sealer secret"
))
));
// Test invalid base58
let result = get_sealer_id_internal("sealerSecret_z!!!invalid!!!");
assert!(matches!(result, Err(CryptoError::Base58Error(_))));
}
}

View File

@@ -1,256 +0,0 @@
use crate::error::CryptoError;
use crate::hash::blake3::generate_nonce;
use crypto_secretbox::{
aead::{Aead, KeyInit},
XSalsa20Poly1305,
};
use salsa20::cipher::{KeyIvInit, StreamCipher};
use salsa20::XSalsa20;
use wasm_bindgen::prelude::*;
/// WASM-exposed function for XSalsa20 encryption without authentication.
/// - `key`: 32-byte key for encryption
/// - `nonce_material`: Raw bytes used to generate a 24-byte nonce via BLAKE3
/// - `plaintext`: Raw bytes to encrypt
/// Returns the encrypted bytes or throws a JsError if encryption fails.
/// Note: This function does not provide authentication. Use encrypt_xsalsa20_poly1305 for authenticated encryption.
#[wasm_bindgen]
pub fn encrypt_xsalsa20(
key: &[u8],
nonce_material: &[u8],
plaintext: &[u8],
) -> Result<Box<[u8]>, JsError> {
let nonce = generate_nonce(nonce_material);
Ok(encrypt_xsalsa20_raw_internal(key, &nonce, plaintext)?.into())
}
/// WASM-exposed function for XSalsa20 decryption without authentication.
/// - `key`: 32-byte key for decryption (must match encryption key)
/// - `nonce_material`: Raw bytes used to generate a 24-byte nonce (must match encryption)
/// - `ciphertext`: Encrypted bytes to decrypt
/// Returns the decrypted bytes or throws a JsError if decryption fails.
/// Note: This function does not provide authentication. Use decrypt_xsalsa20_poly1305 for authenticated decryption.
#[wasm_bindgen]
pub fn decrypt_xsalsa20(
key: &[u8],
nonce_material: &[u8],
ciphertext: &[u8],
) -> Result<Box<[u8]>, JsError> {
let nonce = generate_nonce(nonce_material);
Ok(decrypt_xsalsa20_raw_internal(key, &nonce, ciphertext)?.into())
}
/// Internal function for raw XSalsa20 encryption without nonce generation.
/// Takes a 32-byte key and 24-byte nonce directly.
/// Returns encrypted bytes or CryptoError if key/nonce lengths are invalid.
pub fn encrypt_xsalsa20_raw_internal(
key: &[u8],
nonce: &[u8],
plaintext: &[u8],
) -> Result<Box<[u8]>, CryptoError> {
// Key must be 32 bytes
let key_bytes: [u8; 32] = key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, key.len()))?;
// Nonce must be 24 bytes
let nonce_bytes: [u8; 24] = nonce
.try_into()
.map_err(|_| CryptoError::InvalidNonceLength)?;
// Create cipher instance and encrypt
let mut cipher = XSalsa20::new_from_slices(&key_bytes, &nonce_bytes)
.map_err(|_| CryptoError::CipherError)?;
let mut buffer = plaintext.to_vec();
cipher.apply_keystream(&mut buffer);
Ok(buffer.into_boxed_slice())
}
/// Internal function for raw XSalsa20 decryption without nonce generation.
/// Takes a 32-byte key and 24-byte nonce directly.
/// Returns decrypted bytes or CryptoError if key/nonce lengths are invalid.
pub fn decrypt_xsalsa20_raw_internal(
key: &[u8],
nonce: &[u8],
ciphertext: &[u8],
) -> Result<Box<[u8]>, CryptoError> {
// Key must be 32 bytes
let key_bytes: [u8; 32] = key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, key.len()))?;
// Nonce must be 24 bytes
let nonce_bytes: [u8; 24] = nonce
.try_into()
.map_err(|_| CryptoError::InvalidNonceLength)?;
// Create cipher instance and decrypt (XSalsa20 is symmetric)
let mut cipher = XSalsa20::new_from_slices(&key_bytes, &nonce_bytes)
.map_err(|_| CryptoError::CipherError)?;
let mut buffer = ciphertext.to_vec();
cipher.apply_keystream(&mut buffer);
Ok(buffer.into_boxed_slice())
}
/// XSalsa20-Poly1305 encryption
pub fn encrypt_xsalsa20_poly1305(
key: &[u8],
nonce: &[u8],
plaintext: &[u8],
) -> Result<Box<[u8]>, CryptoError> {
// Key must be 32 bytes
let key_bytes: [u8; 32] = key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, key.len()))?;
// Nonce must be 24 bytes
let nonce_bytes: [u8; 24] = nonce
.try_into()
.map_err(|_| CryptoError::InvalidNonceLength)?;
// Create cipher instance
let cipher = XSalsa20Poly1305::new(&key_bytes.into());
// Encrypt the plaintext
cipher
.encrypt(&nonce_bytes.into(), plaintext)
.map(|v| v.into_boxed_slice())
.map_err(|_| CryptoError::WrongTag)
}
/// XSalsa20-Poly1305 decryption
pub fn decrypt_xsalsa20_poly1305(
key: &[u8],
nonce: &[u8],
ciphertext: &[u8],
) -> Result<Box<[u8]>, CryptoError> {
// Key must be 32 bytes
let key_bytes: [u8; 32] = key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, key.len()))?;
// Nonce must be 24 bytes
let nonce_bytes: [u8; 24] = nonce
.try_into()
.map_err(|_| CryptoError::InvalidNonceLength)?;
// Create cipher instance
let cipher = XSalsa20Poly1305::new(&key_bytes.into());
// Decrypt the ciphertext
cipher
.decrypt(&nonce_bytes.into(), ciphertext)
.map(|v| v.into_boxed_slice())
.map_err(|_| CryptoError::WrongTag)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_xsalsa20() {
// Test vectors
let key = [0u8; 32]; // All zeros key
let nonce = [0u8; 24]; // All zeros nonce
let plaintext = b"Hello, World!";
// Test encryption
let ciphertext = encrypt_xsalsa20_raw_internal(&key, &nonce, plaintext).unwrap();
assert_ne!(&*ciphertext, plaintext); // Ciphertext should be different from plaintext
// Test decryption
let decrypted = decrypt_xsalsa20_raw_internal(&key, &nonce, &ciphertext).unwrap();
assert_eq!(&*decrypted, plaintext);
// Test that different nonce produces different ciphertext
let nonce2 = [1u8; 24];
let ciphertext2 = encrypt_xsalsa20_raw_internal(&key, &nonce2, plaintext).unwrap();
assert_ne!(ciphertext, ciphertext2);
// Test that different key produces different ciphertext
let key2 = [1u8; 32];
let ciphertext3 = encrypt_xsalsa20_raw_internal(&key2, &nonce, plaintext).unwrap();
assert_ne!(ciphertext, ciphertext3);
// Test invalid key length
assert!(encrypt_xsalsa20_raw_internal(&key[..31], &nonce, plaintext).is_err());
assert!(decrypt_xsalsa20_raw_internal(&key[..31], &nonce, &ciphertext).is_err());
// Test invalid nonce length
assert!(encrypt_xsalsa20_raw_internal(&key, &nonce[..23], plaintext).is_err());
assert!(decrypt_xsalsa20_raw_internal(&key, &nonce[..23], &ciphertext).is_err());
}
#[test]
fn test_xsalsa20_error_handling() {
let key = [0u8; 32];
let nonce = [0u8; 24];
let plaintext = b"test message";
// Test encryption with invalid key length
let invalid_key = vec![0u8; 31]; // Too short
let result = encrypt_xsalsa20_raw_internal(&invalid_key, &nonce, plaintext);
assert!(result.is_err());
// Test with too long key
let too_long_key = vec![0u8; 33]; // Too long
let result = encrypt_xsalsa20_raw_internal(&too_long_key, &nonce, plaintext);
assert!(result.is_err());
// Test decryption with invalid key length
let ciphertext = encrypt_xsalsa20_raw_internal(&key, &nonce, plaintext).unwrap();
let result = decrypt_xsalsa20_raw_internal(&invalid_key, &nonce, &ciphertext);
assert!(result.is_err());
// Test decryption with too long key
let result = decrypt_xsalsa20_raw_internal(&too_long_key, &nonce, &ciphertext);
assert!(result.is_err());
// Test with invalid nonce length
let invalid_nonce = vec![0u8; 23]; // Too short
let result = encrypt_xsalsa20_raw_internal(&key, &invalid_nonce, plaintext);
assert!(result.is_err());
let result = decrypt_xsalsa20_raw_internal(&key, &invalid_nonce, &ciphertext);
assert!(result.is_err());
// Test with too long nonce
let too_long_nonce = vec![0u8; 25]; // Too long
let result = encrypt_xsalsa20_raw_internal(&key, &too_long_nonce, plaintext);
assert!(result.is_err());
let result = decrypt_xsalsa20_raw_internal(&key, &too_long_nonce, &ciphertext);
assert!(result.is_err());
}
#[test]
fn test_xsalsa20_poly1305() {
let key = [0u8; 32]; // All zeros key
let nonce = [0u8; 24]; // All zeros nonce
let plaintext = b"Hello, World!";
// Test encryption
let ciphertext = encrypt_xsalsa20_poly1305(&key, &nonce, plaintext).unwrap();
assert!(ciphertext.len() > plaintext.len()); // Should include authentication tag
// Test decryption
let decrypted = decrypt_xsalsa20_poly1305(&key, &nonce, &ciphertext).unwrap();
assert_eq!(&*decrypted, plaintext);
// Test that different nonce produces different ciphertext
let nonce2 = [1u8; 24];
let ciphertext2 = encrypt_xsalsa20_poly1305(&key, &nonce2, plaintext).unwrap();
assert_ne!(ciphertext, ciphertext2);
// Test that different key produces different ciphertext
let key2 = [1u8; 32];
let ciphertext3 = encrypt_xsalsa20_poly1305(&key2, &nonce, plaintext).unwrap();
assert_ne!(ciphertext, ciphertext3);
// Test that decryption fails with wrong key
assert!(decrypt_xsalsa20_poly1305(&key2, &nonce, &ciphertext).is_err());
// Test that decryption fails with wrong nonce
assert!(decrypt_xsalsa20_poly1305(&key, &nonce2, &ciphertext).is_err());
// Test that decryption fails with tampered ciphertext
let mut tampered = ciphertext.clone();
tampered[0] ^= 1;
assert!(decrypt_xsalsa20_poly1305(&key, &nonce, &tampered).is_err());
}
}

View File

@@ -1,43 +0,0 @@
use std::fmt;
#[derive(Debug)]
pub enum CryptoError {
InvalidKeyLength(usize, usize),
InvalidNonceLength,
InvalidSealerSecretFormat,
InvalidSignatureLength,
InvalidVerifyingKey(String),
InvalidPublicKey(String),
WrongTag,
CipherError,
InvalidPrefix(&'static str, &'static str),
Base58Error(String),
}
impl fmt::Display for CryptoError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
CryptoError::InvalidKeyLength(expected, actual) => {
write!(f, "Invalid key length (expected {expected}, got {actual})")
}
CryptoError::InvalidNonceLength => write!(f, "Invalid nonce length"),
CryptoError::InvalidSealerSecretFormat => {
write!(
f,
"Invalid sealer secret format: must start with 'sealerSecret_z'"
)
}
CryptoError::InvalidSignatureLength => write!(f, "Invalid signature length"),
CryptoError::InvalidVerifyingKey(e) => write!(f, "Invalid verifying key: {}", e),
CryptoError::InvalidPublicKey(e) => write!(f, "Invalid public key: {}", e),
CryptoError::WrongTag => write!(f, "Wrong tag"),
CryptoError::CipherError => write!(f, "Failed to create cipher"),
CryptoError::InvalidPrefix(prefix, field) => {
write!(f, "Invalid {} format: must start with '{}'", field, prefix)
}
CryptoError::Base58Error(e) => write!(f, "Invalid base58: {}", e),
}
}
}
impl std::error::Error for CryptoError {}

View File

@@ -1,218 +0,0 @@
use wasm_bindgen::prelude::*;
/// Generate a 24-byte nonce from input material using BLAKE3.
/// - `nonce_material`: Raw bytes to derive the nonce from
/// Returns 24 bytes suitable for use as a nonce in cryptographic operations.
/// This function is deterministic - the same input will produce the same nonce.
#[wasm_bindgen]
pub fn generate_nonce(nonce_material: &[u8]) -> Box<[u8]> {
let mut hasher = blake3::Hasher::new();
hasher.update(nonce_material);
hasher.finalize().as_bytes()[..24].into()
}
/// Hash data once using BLAKE3.
/// - `data`: Raw bytes to hash
/// Returns 32 bytes of hash output.
/// This is the simplest way to compute a BLAKE3 hash of a single piece of data.
#[wasm_bindgen]
pub fn blake3_hash_once(data: &[u8]) -> Box<[u8]> {
let mut hasher = blake3::Hasher::new();
hasher.update(data);
hasher.finalize().as_bytes().to_vec().into_boxed_slice()
}
/// Hash data once using BLAKE3 with a context prefix.
/// - `data`: Raw bytes to hash
/// - `context`: Context bytes to prefix to the data
/// Returns 32 bytes of hash output.
/// This is useful for domain separation - the same data hashed with different contexts will produce different outputs.
#[wasm_bindgen]
pub fn blake3_hash_once_with_context(data: &[u8], context: &[u8]) -> Box<[u8]> {
let mut hasher = blake3::Hasher::new();
hasher.update(context);
hasher.update(data);
hasher.finalize().as_bytes().to_vec().into_boxed_slice()
}
#[wasm_bindgen]
pub struct Blake3Hasher(blake3::Hasher);
#[wasm_bindgen]
impl Blake3Hasher {
#[wasm_bindgen(constructor)]
pub fn new() -> Self {
Blake3Hasher(blake3::Hasher::new())
}
pub fn update(&mut self, data: &[u8]) {
self.0.update(data);
}
pub fn finalize(&self) -> Box<[u8]> {
self.0.finalize().as_bytes().to_vec().into_boxed_slice()
}
pub fn clone(&self) -> Self {
// The blake3::Hasher type implements Clone
Blake3Hasher(self.0.clone())
}
}
/// Get an empty BLAKE3 state for incremental hashing.
/// Returns a new Blake3Hasher instance for incremental hashing.
#[wasm_bindgen]
pub fn blake3_empty_state() -> Blake3Hasher {
Blake3Hasher::new()
}
/// Update a BLAKE3 state with new data for incremental hashing.
/// - `state`: Current Blake3Hasher instance
/// - `data`: New data to incorporate into the hash
/// Returns the updated Blake3Hasher.
#[wasm_bindgen]
pub fn blake3_update_state(state: &mut Blake3Hasher, data: &[u8]) {
state.update(data);
}
/// Get the final hash from a BLAKE3 state.
/// - `state`: The Blake3Hasher to finalize
/// Returns 32 bytes of hash output.
/// This finalizes an incremental hashing operation.
#[wasm_bindgen]
pub fn blake3_digest_for_state(state: Blake3Hasher) -> Box<[u8]> {
state.finalize()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_nonce_generation() {
let input = b"test input";
let nonce = generate_nonce(input);
assert_eq!(nonce.len(), 24);
// Same input should produce same nonce
let nonce2 = generate_nonce(input);
assert_eq!(nonce, nonce2);
// Different input should produce different nonce
let nonce3 = generate_nonce(b"different input");
assert_ne!(nonce, nonce3);
}
#[test]
fn test_blake3_hash_once() {
let input = b"test input";
let hash = blake3_hash_once(input);
// BLAKE3 produces 32-byte hashes
assert_eq!(hash.len(), 32);
// Same input should produce same hash
let hash2 = blake3_hash_once(input);
assert_eq!(hash, hash2);
// Different input should produce different hash
let hash3 = blake3_hash_once(b"different input");
assert_ne!(hash, hash3);
}
#[test]
fn test_blake3_hash_once_with_context() {
let input = b"test input";
let context = b"test context";
let hash = blake3_hash_once_with_context(input, context);
// BLAKE3 produces 32-byte hashes
assert_eq!(hash.len(), 32);
// Same input and context should produce same hash
let hash2 = blake3_hash_once_with_context(input, context);
assert_eq!(hash, hash2);
// Different input should produce different hash
let hash3 = blake3_hash_once_with_context(b"different input", context);
assert_ne!(hash, hash3);
// Different context should produce different hash
let hash4 = blake3_hash_once_with_context(input, b"different context");
assert_ne!(hash, hash4);
// Hash with context should be different from hash without context
let hash_no_context = blake3_hash_once(input);
assert_ne!(hash, hash_no_context);
}
#[test]
fn test_blake3_incremental() {
// Initial state
let mut state = blake3_empty_state();
// First update with [1,2,3,4,5]
let data1 = &[1u8, 2, 3, 4, 5];
blake3_update_state(&mut state, data1);
// Check that this matches a direct hash
let direct_hash = blake3_hash_once(data1);
let state_hash = state.finalize();
assert_eq!(
state_hash, direct_hash,
"First update should match direct hash"
);
// Create new state for second test
let mut state = blake3_empty_state();
blake3_update_state(&mut state, data1);
// Verify the exact expected hash from the TypeScript test for the first update
let expected_first_hash = [
2, 79, 103, 192, 66, 90, 61, 192, 47, 186, 245, 140, 185, 61, 229, 19, 46, 61, 117,
197, 25, 250, 160, 186, 218, 33, 73, 29, 136, 201, 112, 87,
]
.to_vec()
.into_boxed_slice();
assert_eq!(
state.finalize(),
expected_first_hash,
"First update should match expected hash"
);
// Test with two updates
let mut state = blake3_empty_state();
let data1 = &[1u8, 2, 3, 4, 5];
let data2 = &[6u8, 7, 8, 9, 10];
blake3_update_state(&mut state, data1);
blake3_update_state(&mut state, data2);
// Compare with a single hash of all data
let mut all_data = Vec::new();
all_data.extend_from_slice(data1);
all_data.extend_from_slice(data2);
let direct_hash_all = blake3_hash_once(&all_data);
assert_eq!(
state.finalize(),
direct_hash_all,
"Final state should match direct hash of all data"
);
// Test final hash matches expected value
let mut state = blake3_empty_state();
blake3_update_state(&mut state, data1);
blake3_update_state(&mut state, data2);
let expected_final_hash = [
165, 131, 141, 69, 2, 69, 39, 236, 196, 244, 180, 213, 147, 124, 222, 39, 68, 223, 54,
176, 242, 97, 200, 101, 204, 79, 21, 233, 56, 51, 1, 199,
]
.to_vec()
.into_boxed_slice();
assert_eq!(
state.finalize(),
expected_final_hash,
"Final state should match expected hash"
);
}
}

View File

@@ -1,165 +0,0 @@
use cojson_core::{
CoID, CoJsonCoreError, KeyID, KeySecret, SessionID, SessionLogInternal, Signature, SignerID, SignerSecret, TransactionMode
};
use serde_json::value::RawValue;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use wasm_bindgen::prelude::*;
mod error;
pub use error::CryptoError;
pub mod hash {
pub mod blake3;
pub use blake3::*;
}
pub mod crypto {
pub mod ed25519;
pub mod encrypt;
pub mod seal;
pub mod sign;
pub mod x25519;
pub mod xsalsa20;
pub use ed25519::*;
pub use encrypt::*;
pub use seal::*;
pub use sign::*;
pub use x25519::*;
pub use xsalsa20::*;
}
#[derive(Error, Debug)]
pub enum CojsonCoreWasmError {
#[error(transparent)]
CoJson(#[from] CoJsonCoreError),
#[error(transparent)]
Serde(#[from] serde_json::Error),
#[error(transparent)]
SerdeWasmBindgen(#[from] serde_wasm_bindgen::Error),
#[error("JsValue Error: {0:?}")]
Js(JsValue),
}
impl From<CojsonCoreWasmError> for JsValue {
fn from(err: CojsonCoreWasmError) -> Self {
JsValue::from_str(&err.to_string())
}
}
#[wasm_bindgen]
#[derive(Clone)]
pub struct SessionLog {
internal: SessionLogInternal,
}
#[derive(Serialize, Deserialize)]
struct PrivateTransactionResult {
signature: String,
encrypted_changes: String,
}
#[wasm_bindgen]
impl SessionLog {
#[wasm_bindgen(constructor)]
pub fn new(co_id: String, session_id: String, signer_id: Option<String>) -> SessionLog {
let co_id = CoID(co_id);
let session_id = SessionID(session_id);
let signer_id = signer_id.map(|id| SignerID(id));
let internal = SessionLogInternal::new(co_id, session_id, signer_id);
SessionLog { internal }
}
#[wasm_bindgen(js_name = clone)]
pub fn clone_js(&self) -> SessionLog {
self.clone()
}
#[wasm_bindgen(js_name = tryAdd)]
pub fn try_add(
&mut self,
transactions_json: Vec<String>,
new_signature_str: String,
skip_verify: bool,
) -> Result<(), CojsonCoreWasmError> {
let transactions: Vec<Box<RawValue>> = transactions_json
.into_iter()
.map(|s| {
serde_json::from_str(&s).map_err(|e| {
CojsonCoreWasmError::Js(JsValue::from(format!(
"Failed to parse transaction string: {}",
e
)))
})
})
.collect::<Result<Vec<_>, _>>()?;
let new_signature = Signature(new_signature_str);
self.internal
.try_add(transactions, &new_signature, skip_verify)?;
Ok(())
}
#[wasm_bindgen(js_name = addNewPrivateTransaction)]
pub fn add_new_private_transaction(
&mut self,
changes_json: &str,
signer_secret: String,
encryption_key: String,
key_id: String,
made_at: f64,
) -> Result<String, CojsonCoreWasmError> {
let (signature, transaction) = self.internal.add_new_transaction(
changes_json,
TransactionMode::Private{key_id: KeyID(key_id), key_secret: KeySecret(encryption_key)},
&SignerSecret(signer_secret),
made_at as u64,
);
// Extract encrypted_changes from the private transaction
let encrypted_changes = match transaction {
cojson_core::Transaction::Private(private_tx) => private_tx.encrypted_changes.value,
_ => return Err(CojsonCoreWasmError::Js(JsValue::from_str("Expected private transaction"))),
};
let result = PrivateTransactionResult{
signature: signature.0,
encrypted_changes,
};
Ok(serde_json::to_string(&result)?)
}
#[wasm_bindgen(js_name = addNewTrustingTransaction)]
pub fn add_new_trusting_transaction(
&mut self,
changes_json: &str,
signer_secret: String,
made_at: f64,
) -> Result<String, CojsonCoreWasmError> {
let (signature, _) = self.internal.add_new_transaction(
changes_json,
TransactionMode::Trusting,
&SignerSecret(signer_secret),
made_at as u64,
);
Ok(signature.0)
}
#[wasm_bindgen(js_name = decryptNextTransactionChangesJson)]
pub fn decrypt_next_transaction_changes_json(
&self,
tx_index: u32,
encryption_key: String,
) -> Result<String, CojsonCoreWasmError> {
Ok(self
.internal
.decrypt_next_transaction_changes_json(tx_index, KeySecret(encryption_key))?)
}
}

View File

@@ -1,18 +0,0 @@
[package]
name = "cojson-core"
version = "0.1.0"
edition = "2021"
[dependencies]
lzy = { path = "../lzy", optional = true }
serde = { version = "1.0", features = ["derive"] }
serde_json = { version = "1.0", features = ["raw_value"] }
ed25519-dalek = { version = "2.2.0", features = ["rand_core"] }
bs58 = "0.5.1"
blake3 = "1.5.1"
salsa20 = "0.10.2"
base64 = "0.22.1"
thiserror = "1.0"
[dev-dependencies]
rand_core = { version = "0.6", features = ["getrandom"] }

View File

@@ -1,8 +0,0 @@
{
"coID": "co_zUsz4gkwCCWqMXa4LHXdwyAkVK3",
"signerID":"signer_z3FdM2ucYXUkbJQgPRf8R4Di6exd2sNPVaHaJHhQ8WAqi",
"knownKeys":[],
"exampleBase": {
"co_zkNajJ1BhLzR962jpzvXxx917ZB_session_zXzrQLTtp8rR":{"transactions":[{"changes":"[{\"key\":\"co_zkNajJ1BhLzR962jpzvXxx917ZB\",\"op\":\"set\",\"value\":\"admin\"}]","madeAt":1750685354142,"privacy":"trusting"},{"changes":"[{\"key\":\"key_z268nqpkZYFFWPoGzL_for_co_zkNajJ1BhLzR962jpzvXxx917ZB\",\"op\":\"set\",\"value\":\"sealed_UmZaEEzCUrP3Q-t2KrN00keV66wzA4LWadqhEmw0jlku5frSW2QyXUY3zYIC_XLig6BDS9rcZZdTm3CwnLjTPzp9hgd9TlJLf_Q==\"}]","madeAt":1750685354142,"privacy":"trusting"},{"changes":"[{\"key\":\"readKey\",\"op\":\"set\",\"value\":\"key_z268nqpkZYFFWPoGzL\"}]","madeAt":1750685354143,"privacy":"trusting"},{"changes":"[{\"key\":\"everyone\",\"op\":\"set\",\"value\":\"writer\"}]","madeAt":1750685354143,"privacy":"trusting"},{"changes":"[{\"key\":\"key_z268nqpkZYFFWPoGzL_for_everyone\",\"op\":\"set\",\"value\":\"keySecret_zHRFDaEsnpYSZh6rUAvXS8uUrKCxJAzeBPSSaVU1r9RZY\"}]","madeAt":1750685354143,"privacy":"trusting"}],"lastHash":"hash_z5j1DUZjBiTKm5XnLi8ZrNPV3P7zGuXnMNCZfh2qGXGC7","streamingHash":{"state":{"__wbg_ptr":1127736},"crypto":{}},"lastSignature":"signature_z4LoRVDLnJBfAzHvRn3avgK4RVBd7iAfqUMJdpDEtV8HGLKGAqLyweBkNp8jggcNUQZatrMeU9tdc31ct9qxw7rib","signatureAfter":{}}
}
}

View File

@@ -1,6 +0,0 @@
{
"coID": "co_zWnX74VrMP3n3dkm9wZVPszfiCw",
"signerID":"signer_z3FdM2ucYXUkbJQgPRf8R4Di6exd2sNPVaHaJHhQ8WAqi",
"knownKeys":[{"secret":"keySecret_zHRFDaEsnpYSZh6rUAvXS8uUrKCxJAzeBPSSaVU1r9RZY","id":"key_z268nqpkZYFFWPoGzL"}],
"exampleBase":{"co_zkNajJ1BhLzR962jpzvXxx917ZB_session_zXzrQLTtp8rR":{"transactions":[{"encryptedChanges":"encrypted_UxN_r7X7p-3GUE3GRGRO4NfIhEUvB01m-HaSSipRRrUsTmNBW9dZ-pkAk-NoVP_iEB0moLFbG9GDq9U9S-rUDfSPcaWCJtpE=","keyUsed":"key_z268nqpkZYFFWPoGzL","madeAt":1750685368555,"privacy":"private"}],"lastHash":"hash_zJCdoTRgDuFdUK2XogR7qgNnxezfYAVih3qve2UV65L5X","streamingHash":{"state":{"__wbg_ptr":1129680},"crypto":{}},"lastSignature":"signature_z3UErpugJAqDEYKgzUhs88xBMohzmaL228PgkNhEomf6AeVr7NYNxY17iUoCmPQTpGJNqYPo3y82mGX4oWBhkqN4y","signatureAfter":{}}}
}

View File

@@ -1,689 +0,0 @@
use base64::{engine::general_purpose::URL_SAFE, Engine as _};
use bs58;
use ed25519_dalek::{Signature as Ed25519Signature, Signer, SigningKey, Verifier, VerifyingKey};
use salsa20::{
cipher::{KeyIvInit, StreamCipher},
XSalsa20,
};
use serde::{Deserialize, Serialize};
use serde_json::{value::RawValue, Number, Value as JsonValue};
use thiserror::Error;
// Re-export lzy for convenience
#[cfg(feature = "lzy")]
pub use lzy;
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct SessionID(pub String);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(transparent)]
pub struct SignerID(pub String);
impl From<VerifyingKey> for SignerID {
fn from(key: VerifyingKey) -> Self {
SignerID(format!(
"signer_z{}",
bs58::encode(key.to_bytes()).into_string()
))
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(transparent)]
pub struct SignerSecret(pub String);
impl From<SigningKey> for SignerSecret {
fn from(key: SigningKey) -> Self {
SignerSecret(format!(
"signerSecret_z{}",
bs58::encode(key.to_bytes()).into_string()
))
}
}
impl Into<SigningKey> for &SignerSecret {
fn into(self) -> SigningKey {
let key_bytes = decode_z(&self.0).expect("Invalid key secret");
SigningKey::from_bytes(&key_bytes.try_into().expect("Invalid key secret length"))
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(transparent)]
pub struct Signature(pub String);
impl From<Ed25519Signature> for Signature {
fn from(signature: Ed25519Signature) -> Self {
Signature(format!(
"signature_z{}",
bs58::encode(signature.to_bytes()).into_string()
))
}
}
impl Into<Ed25519Signature> for &Signature {
fn into(self) -> Ed25519Signature {
let signature_bytes = decode_z(&self.0).expect("Invalid signature");
Ed25519Signature::from_bytes(
&signature_bytes
.try_into()
.expect("Invalid signature length"),
)
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(transparent)]
pub struct Hash(pub String);
impl From<blake3::Hash> for Hash {
fn from(hash: blake3::Hash) -> Self {
Hash(format!("hash_z{}", bs58::encode(hash.as_bytes()).into_string()))
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(transparent)]
pub struct KeyID(pub String);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(transparent)]
pub struct KeySecret(pub String);
impl Into<[u8; 32]> for &KeySecret {
fn into(self) -> [u8; 32] {
let key_bytes = decode_z(&self.0).expect("Invalid key secret");
key_bytes.try_into().expect("Invalid key secret length")
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(transparent)]
pub struct CoID(pub String);
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct TransactionID {
#[serde(rename = "sessionID")]
pub session_id: SessionID,
#[serde(rename = "txIndex")]
pub tx_index: u32,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(transparent)]
pub struct Encrypted<T> {
pub value: String,
_phantom: std::marker::PhantomData<T>,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct PrivateTransaction {
#[serde(rename = "encryptedChanges")]
pub encrypted_changes: Encrypted<JsonValue>,
#[serde(rename = "keyUsed")]
pub key_used: KeyID,
#[serde(rename = "madeAt")]
pub made_at: Number,
pub privacy: String,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct TrustingTransaction {
pub changes: String,
#[serde(rename = "madeAt")]
pub made_at: Number,
pub privacy: String,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum Transaction {
Private(PrivateTransaction),
Trusting(TrustingTransaction),
}
pub enum TransactionMode {
Private {
key_id: KeyID,
key_secret: KeySecret,
},
Trusting,
}
#[derive(Error, Debug)]
pub enum CoJsonCoreError {
#[error("Transaction not found at index {0}")]
TransactionNotFound(u32),
#[error("Invalid encrypted prefix in transaction")]
InvalidEncryptedPrefix,
#[error("Base64 decoding failed")]
Base64Decode(#[from] base64::DecodeError),
#[error("UTF-8 conversion failed")]
Utf8(#[from] std::string::FromUtf8Error),
#[error("JSON deserialization failed")]
Json(#[from] serde_json::Error),
#[error("Signature verification failed: (hash: {0})")]
SignatureVerification(String),
}
#[derive(Clone)]
pub struct SessionLogInternal {
co_id: CoID,
session_id: SessionID,
public_key: Option<VerifyingKey>,
hasher: blake3::Hasher,
transactions_json: Vec<String>,
last_signature: Option<Signature>,
}
impl SessionLogInternal {
pub fn new(co_id: CoID, session_id: SessionID, signer_id: Option<SignerID>) -> Self {
let hasher = blake3::Hasher::new();
let public_key = match signer_id {
Some(signer_id) => Some(VerifyingKey::try_from(
decode_z(&signer_id.0)
.expect("Invalid public key")
.as_slice(),
)
.expect("Invalid public key")),
None => None,
};
Self {
co_id,
session_id,
public_key,
hasher,
transactions_json: Vec::new(),
last_signature: None,
}
}
pub fn transactions_json(&self) -> &Vec<String> {
&self.transactions_json
}
pub fn last_signature(&self) -> Option<&Signature> {
self.last_signature.as_ref()
}
fn expected_hash_after(&self, transactions: &[Box<RawValue>]) -> blake3::Hasher {
let mut hasher = self.hasher.clone();
for tx in transactions {
hasher.update(tx.get().as_bytes());
}
hasher
}
pub fn try_add(
&mut self,
transactions: Vec<Box<RawValue>>,
new_signature: &Signature,
skip_verify: bool,
) -> Result<(), CoJsonCoreError> {
if !skip_verify {
let hasher = self.expected_hash_after(&transactions);
let new_hash_encoded_stringified = format!(
"\"hash_z{}\"",
bs58::encode(hasher.finalize().as_bytes()).into_string()
);
if let Some(public_key) = self.public_key {
match public_key.verify(
new_hash_encoded_stringified.as_bytes(),
&(new_signature).into(),
) {
Ok(()) => {}
Err(_) => {
return Err(CoJsonCoreError::SignatureVerification(
new_hash_encoded_stringified.replace("\"", ""),
));
}
}
} else {
return Err(CoJsonCoreError::SignatureVerification(
new_hash_encoded_stringified.replace("\"", ""),
));
}
self.hasher = hasher;
}
for tx in transactions {
self.transactions_json.push(tx.get().to_string());
}
self.last_signature = Some(new_signature.clone());
Ok(())
}
pub fn add_new_transaction(
&mut self,
changes_json: &str,
mode: TransactionMode,
signer_secret: &SignerSecret,
made_at: u64,
) -> (Signature, Transaction) {
let new_tx = match mode {
TransactionMode::Private { key_id, key_secret } => {
let tx_index = self.transactions_json.len() as u32;
let nonce_material = JsonValue::Object(serde_json::Map::from_iter(vec![
("in".to_string(), JsonValue::String(self.co_id.0.clone())),
(
"tx".to_string(),
serde_json::to_value(TransactionID {
session_id: self.session_id.clone(),
tx_index,
})
.unwrap(),
),
]));
let nonce = self.generate_json_nonce(&nonce_material);
let secret_key_bytes: [u8; 32] = (&key_secret).into();
let mut ciphertext = changes_json.as_bytes().to_vec();
let mut cipher = XSalsa20::new(&secret_key_bytes.into(), &nonce.into());
cipher.apply_keystream(&mut ciphertext);
let encrypted_str = format!("encrypted_U{}", URL_SAFE.encode(&ciphertext));
Transaction::Private(PrivateTransaction {
encrypted_changes: Encrypted {
value: encrypted_str,
_phantom: std::marker::PhantomData,
},
key_used: key_id.clone(),
made_at: Number::from(made_at),
privacy: "private".to_string(),
})
}
TransactionMode::Trusting => Transaction::Trusting(TrustingTransaction {
changes: changes_json.to_string(),
made_at: Number::from(made_at),
privacy: "trusting".to_string(),
}),
};
let tx_json = serde_json::to_string(&new_tx).unwrap();
self.hasher.update(tx_json.as_bytes());
self.transactions_json.push(tx_json);
let new_hash = self.hasher.finalize();
let new_hash_encoded_stringified = format!("\"hash_z{}\"", bs58::encode(new_hash.as_bytes()).into_string());
let signing_key: SigningKey = signer_secret.into();
let new_signature: Signature = signing_key.sign(new_hash_encoded_stringified.as_bytes()).into();
self.last_signature = Some(new_signature.clone());
(new_signature, new_tx)
}
pub fn decrypt_next_transaction_changes_json(
&self,
tx_index: u32,
key_secret: KeySecret,
) -> Result<String, CoJsonCoreError> {
let tx_json = self
.transactions_json
.get(tx_index as usize)
.ok_or(CoJsonCoreError::TransactionNotFound(tx_index))?;
let tx: Transaction = serde_json::from_str(tx_json)?;
match tx {
Transaction::Private(private_tx) => {
let nonce_material = JsonValue::Object(serde_json::Map::from_iter(vec![
("in".to_string(), JsonValue::String(self.co_id.0.clone())),
(
"tx".to_string(),
serde_json::to_value(TransactionID {
session_id: self.session_id.clone(),
tx_index,
})?,
),
]));
let nonce = self.generate_json_nonce(&nonce_material);
let encrypted_val = private_tx.encrypted_changes.value;
let prefix = "encrypted_U";
if !encrypted_val.starts_with(prefix) {
return Err(CoJsonCoreError::InvalidEncryptedPrefix);
}
let ciphertext_b64 = &encrypted_val[prefix.len()..];
let mut ciphertext = URL_SAFE.decode(ciphertext_b64)?;
let secret_key_bytes: [u8; 32] = (&key_secret).into();
let mut cipher = XSalsa20::new((&secret_key_bytes).into(), &nonce.into());
cipher.apply_keystream(&mut ciphertext);
Ok(String::from_utf8(ciphertext)?)
}
Transaction::Trusting(trusting_tx) => Ok(trusting_tx.changes),
}
}
fn generate_nonce(&self, material: &[u8]) -> [u8; 24] {
let mut hasher = blake3::Hasher::new();
hasher.update(material);
let mut output = [0u8; 24];
let mut output_reader = hasher.finalize_xof();
output_reader.fill(&mut output);
output
}
fn generate_json_nonce(&self, material: &JsonValue) -> [u8; 24] {
let stable_json = serde_json::to_string(&material).unwrap();
self.generate_nonce(stable_json.as_bytes())
}
}
pub fn decode_z(value: &str) -> Result<Vec<u8>, String> {
let prefix_end = value.find("_z").ok_or("Invalid prefix")? + 2;
bs58::decode(&value[prefix_end..])
.into_vec()
.map_err(|e| e.to_string())
}
#[cfg(test)]
mod tests {
use super::*;
use rand_core::OsRng;
use std::{collections::HashMap, fs};
#[test]
fn it_works() {
let mut csprng = OsRng;
let signing_key = SigningKey::generate(&mut csprng);
let verifying_key = signing_key.verifying_key();
let session = SessionLogInternal::new(
CoID("co_test1".to_string()),
SessionID("session_test1".to_string()),
verifying_key.into(),
);
assert!(session.last_signature.is_none());
}
#[test]
fn test_add_from_example_json() {
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct TestSession<'a> {
last_signature: Signature,
#[serde(borrow)]
transactions: Vec<&'a RawValue>,
last_hash: String,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct Root<'a> {
#[serde(borrow)]
example_base: HashMap<String, TestSession<'a>>,
#[serde(rename = "signerID")]
signer_id: SignerID,
}
let data = fs::read_to_string("data/singleTxSession.json")
.expect("Unable to read singleTxSession.json");
let root: Root = serde_json::from_str(&data).unwrap();
let (session_id_str, example) = root.example_base.into_iter().next().unwrap();
let session_id = SessionID(session_id_str.clone());
let co_id = CoID(
session_id_str
.split("_session_")
.next()
.unwrap()
.to_string(),
);
let mut session = SessionLogInternal::new(co_id, session_id, root.signer_id);
let new_signature = example.last_signature;
let result = session.try_add(
vec![example.transactions[0].to_owned()],
&new_signature,
false,
);
match result {
Ok(returned_final_hash) => {
let final_hash = session.hasher.finalize();
let final_hash_encoded = format!(
"hash_z{}",
bs58::encode(final_hash.as_bytes()).into_string()
);
assert_eq!(final_hash_encoded, example.last_hash);
assert_eq!(session.last_signature, Some(new_signature));
}
Err(CoJsonCoreError::SignatureVerification(new_hash_encoded)) => {
assert_eq!(new_hash_encoded, example.last_hash);
panic!("Signature verification failed despite same hash");
}
Err(e) => {
panic!("Unexpected error: {:?}", e);
}
}
}
#[test]
fn test_add_from_example_json_multi_tx() {
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct TestSession<'a> {
last_signature: Signature,
#[serde(borrow)]
transactions: Vec<&'a RawValue>,
last_hash: String,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct Root<'a> {
#[serde(borrow)]
example_base: HashMap<String, TestSession<'a>>,
#[serde(rename = "signerID")]
signer_id: SignerID,
}
let data = fs::read_to_string("data/multiTxSession.json")
.expect("Unable to read multiTxSession.json");
let root: Root = serde_json::from_str(&data).unwrap();
let (session_id_str, example) = root.example_base.into_iter().next().unwrap();
let session_id = SessionID(session_id_str.clone());
let co_id = CoID(
session_id_str
.split("_session_")
.next()
.unwrap()
.to_string(),
);
let mut session = SessionLogInternal::new(co_id, session_id, root.signer_id);
let new_signature = example.last_signature;
let result = session.try_add(
example.transactions.into_iter().map(|tx| tx.to_owned()).collect(),
&new_signature,
false,
);
match result {
Ok(returned_final_hash) => {
let final_hash = session.hasher.finalize();
let final_hash_encoded = format!(
"hash_z{}",
bs58::encode(final_hash.as_bytes()).into_string()
);
assert_eq!(final_hash_encoded, example.last_hash);
assert_eq!(session.last_signature, Some(new_signature));
}
Err(CoJsonCoreError::SignatureVerification(new_hash_encoded)) => {
assert_eq!(new_hash_encoded, example.last_hash);
panic!("Signature verification failed despite same hash");
}
Err(e) => {
panic!("Unexpected error: {:?}", e);
}
}
}
#[test]
fn test_add_new_transaction() {
// Load the example data to get all the pieces we need
let data = fs::read_to_string("data/singleTxSession.json")
.expect("Unable to read singleTxSession.json");
let root: serde_json::Value = serde_json::from_str(&data).unwrap();
let session_data =
&root["exampleBase"]["co_zkNajJ1BhLzR962jpzvXxx917ZB_session_zXzrQLTtp8rR"];
let tx_from_example = &session_data["transactions"][0];
let known_key = &root["knownKeys"][0];
// Since we don't have the original private key, we generate a new one for this test.
let mut csprng = OsRng;
let signing_key = SigningKey::generate(&mut csprng);
let public_key = signing_key.verifying_key();
// Initialize an empty session
let mut session = SessionLogInternal::new(
CoID(root["coID"].as_str().unwrap().to_string()),
SessionID("co_zkNajJ1BhLzR962jpzvXxx917ZB_session_zXzrQLTtp8rR".to_string()),
public_key.into(),
);
// The plaintext changes we want to add
let changes_json =
r#"[{"after":"start","op":"app","value":"co_zMphsnYN6GU8nn2HDY5suvyGufY"}]"#;
// Extract all the necessary components from the example data
let key_secret = KeySecret(known_key["secret"].as_str().unwrap().to_string());
let key_id = KeyID(known_key["id"].as_str().unwrap().to_string());
let made_at = tx_from_example["madeAt"].as_u64().unwrap();
// Call the function we are testing
let (new_signature, _new_tx) = session.add_new_transaction(
changes_json,
TransactionMode::Private {
key_id: key_id,
key_secret: key_secret,
},
&signing_key.into(),
made_at,
);
// 1. Check that the transaction we created matches the one in the file
let created_tx_json = &session.transactions_json[0];
let expected_tx_json = serde_json::to_string(tx_from_example).unwrap();
assert_eq!(created_tx_json, &expected_tx_json);
// 2. Check that the final hash of the session matches the one in the file
let final_hash = session.hasher.finalize();
let final_hash_encoded = format!(
"hash_z{}",
bs58::encode(final_hash.as_bytes()).into_string()
);
assert_eq!(
final_hash_encoded,
session_data["lastHash"].as_str().unwrap()
);
let final_hash_encoded_stringified = format!(
"\"{}\"",
final_hash_encoded
);
// 3. Check that the signature is valid for our generated key
assert!(session
.public_key
.verify(final_hash_encoded_stringified.as_bytes(), &(&new_signature).into())
.is_ok());
assert_eq!(session.last_signature, Some(new_signature));
}
#[test]
fn test_decrypt_from_example_json() {
#[derive(Deserialize, Debug)]
struct KnownKey {
secret: String,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
#[serde(bound(deserialize = "'de: 'a"))]
struct TestSession<'a> {
last_signature: String,
#[serde(borrow)]
transactions: Vec<&'a RawValue>,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
#[serde(bound(deserialize = "'de: 'a"))]
struct Root<'a> {
#[serde(borrow)]
example_base: HashMap<String, TestSession<'a>>,
#[serde(rename = "signerID")]
signer_id: SignerID,
known_keys: Vec<KnownKey>,
#[serde(rename = "coID")]
co_id: CoID,
}
let data = fs::read_to_string("data/singleTxSession.json")
.expect("Unable to read singleTxSession.json");
let root: Root = serde_json::from_str(&data).unwrap();
let (session_id_str, example) = root.example_base.into_iter().next().unwrap();
let session_id = SessionID(session_id_str.clone());
let public_key =
VerifyingKey::from_bytes(&decode_z(&root.signer_id.0).unwrap().try_into().unwrap())
.unwrap();
let mut session = SessionLogInternal::new(root.co_id, session_id, public_key.into());
let new_signature = Signature(example.last_signature);
session
.try_add(
example
.transactions
.into_iter()
.map(|v| v.to_owned())
.collect(),
&new_signature,
true, // Skipping verification because we don't have the right initial state
)
.unwrap();
let key_secret = KeySecret(root.known_keys[0].secret.clone());
let decrypted = session
.decrypt_next_transaction_changes_json(0, key_secret)
.unwrap();
assert_eq!(
decrypted,
r#"[{"after":"start","op":"app","value":"co_zMphsnYN6GU8nn2HDY5suvyGufY"}]"#
);
}
}

View File

@@ -1,15 +0,0 @@
[package]
name = "lzy"
version = "0.1.0"
edition = "2021"
[dependencies]
[dev-dependencies]
criterion = { version = "0.5", features = ["html_reports"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
[[bench]]
name = "compression_benchmark"
harness = false

View File

@@ -1,36 +0,0 @@
use criterion::{black_box, criterion_group, criterion_main, Criterion, Throughput};
use lzy::{compress, decompress};
use std::fs;
use std::time::Duration;
fn compression_benchmark(c: &mut Criterion) {
let data = fs::read("data/compression_66k_JSON.txt").expect("Failed to read benchmark data");
let mut group = c.benchmark_group("LZY Compression");
group.measurement_time(Duration::from_secs(10));
group.sample_size(10);
group.throughput(Throughput::Bytes(data.len() as u64));
let compressed = compress(&data);
let compression_ratio = compressed.len() as f64 / data.len() as f64;
println!(
"Compression ratio (compressed/original): {:.4} ({} / {} bytes)",
compression_ratio,
compressed.len(),
data.len()
);
group.bench_function("compress", |b| {
b.iter(|| compress(black_box(&data)))
});
let decompressed = decompress(&compressed).unwrap();
assert_eq!(data, decompressed);
group.bench_function("decompress", |b| {
b.iter(|| decompress(black_box(&compressed)))
});
}
criterion_group!(benches, compression_benchmark);
criterion_main!(benches);

File diff suppressed because one or more lines are too long

View File

@@ -1,348 +0,0 @@
const MIN_MATCH_LEN: usize = 4;
const MAX_MATCH_LEN: usize = 15 + 3;
const MAX_LITERALS: usize = 15;
const HASH_LOG: u32 = 16;
const HASH_TABLE_SIZE: usize = 1 << HASH_LOG;
fn hash(data: &[u8]) -> usize {
const KNUTH_MULT_PRIME: u32 = 2654435761;
let val = u32::from_le_bytes(data.try_into().unwrap());
((val.wrapping_mul(KNUTH_MULT_PRIME)) >> (32 - HASH_LOG)) as usize
}
#[derive(Debug, PartialEq)]
pub enum DecompressionError {
InvalidToken,
UnexpectedEof,
}
pub fn decompress(input: &[u8]) -> Result<Vec<u8>, DecompressionError> {
let mut decompressed = Vec::with_capacity(input.len() * 2);
let mut i = 0;
while i < input.len() {
let token = input[i];
i += 1;
let literal_len = (token >> 4) as usize;
let match_len_token = (token & 0x0F) as usize;
if i + literal_len > input.len() {
return Err(DecompressionError::UnexpectedEof);
}
decompressed.extend_from_slice(&input[i..i + literal_len]);
i += literal_len;
if match_len_token > 0 {
if i + 2 > input.len() {
return Err(DecompressionError::UnexpectedEof);
}
let offset = u16::from_le_bytes([input[i], input[i + 1]]) as usize;
i += 2;
if offset == 0 || offset > decompressed.len() {
return Err(DecompressionError::InvalidToken);
}
let match_len = match_len_token + 3;
let match_start = decompressed.len() - offset;
for k in 0..match_len {
decompressed.push(decompressed[match_start + k]);
}
}
}
Ok(decompressed)
}
pub fn compress(input: &[u8]) -> Vec<u8> {
let mut compressor = Compressor::new();
compressor.compress_chunk(input)
}
fn emit_sequence(out: &mut Vec<u8>, mut literals: &[u8], match_len: usize, offset: u16) {
while literals.len() > MAX_LITERALS {
let token = (MAX_LITERALS as u8) << 4;
out.push(token);
out.extend_from_slice(&literals[..MAX_LITERALS]);
literals = &literals[MAX_LITERALS..];
}
let lit_len_token = literals.len() as u8;
let match_len_token = if match_len > 0 {
(match_len - 3) as u8
} else {
0
};
let token = lit_len_token << 4 | match_len_token;
out.push(token);
out.extend_from_slice(literals);
if match_len > 0 {
out.extend_from_slice(&offset.to_le_bytes());
}
}
pub struct Compressor {
hash_table: Vec<u32>,
history: Vec<u8>,
}
impl Compressor {
pub fn new() -> Self {
Self {
hash_table: vec![0; HASH_TABLE_SIZE],
history: Vec::new(),
}
}
pub fn compress_chunk(&mut self, chunk: &[u8]) -> Vec<u8> {
let mut compressed_chunk = Vec::new();
let chunk_start_cursor = self.history.len();
self.history.extend_from_slice(chunk);
let mut cursor = chunk_start_cursor;
let mut literal_anchor = chunk_start_cursor;
while cursor < self.history.len() {
let mut best_match: Option<(u16, usize)> = None;
if self.history.len() - cursor >= MIN_MATCH_LEN {
let h = hash(&self.history[cursor..cursor + 4]);
let match_pos = self.hash_table[h] as usize;
if match_pos < cursor && cursor - match_pos < u16::MAX as usize {
if self.history.get(match_pos..match_pos + MIN_MATCH_LEN) == Some(&self.history[cursor..cursor + MIN_MATCH_LEN]) {
let mut match_len = MIN_MATCH_LEN;
while cursor + match_len < self.history.len()
&& match_len < MAX_MATCH_LEN
&& self.history.get(match_pos + match_len) == self.history.get(cursor + match_len)
{
match_len += 1;
}
best_match = Some(((cursor - match_pos) as u16, match_len));
}
}
self.hash_table[h] = cursor as u32;
}
if let Some((offset, match_len)) = best_match {
let literals = &self.history[literal_anchor..cursor];
emit_sequence(&mut compressed_chunk, literals, match_len, offset);
cursor += match_len;
literal_anchor = cursor;
} else {
cursor += 1;
}
}
if literal_anchor < cursor {
let literals = &self.history[literal_anchor..cursor];
emit_sequence(&mut compressed_chunk, literals, 0, 0);
}
compressed_chunk
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_simple_roundtrip() {
let data = b"hello world, hello people";
let compressed = compress(data);
println!("Compressed '{}': {:x?}", std::str::from_utf8(data).unwrap(), compressed);
let decompressed = decompress(&compressed).unwrap();
assert_eq!(data, decompressed.as_slice());
}
#[test]
fn test_long_literals() {
let data = b"abcdefghijklmnopqrstuvwxyz";
let compressed = compress(data);
println!("Compressed '{}': {:x?}", std::str::from_utf8(data).unwrap(), compressed);
let decompressed = decompress(&compressed).unwrap();
assert_eq!(data, decompressed.as_slice());
}
#[test]
fn test_decompress_empty() {
let data = b"";
let compressed = compress(data);
assert!(compressed.is_empty());
let decompressed = decompress(&compressed).unwrap();
assert_eq!(data, decompressed.as_slice());
}
#[test]
fn test_overlapping_match() {
let data = b"abcdeabcdeabcdeabcde"; // repeating sequence
let compressed = compress(data);
println!("Compressed '{}': {:x?}", std::str::from_utf8(data).unwrap(), compressed);
let decompressed = decompress(&compressed).unwrap();
assert_eq!(data, decompressed.as_slice());
let data2 = b"abababababababababab";
let compressed2 = compress(data2);
println!("Compressed '{}': {:x?}", std::str::from_utf8(data2).unwrap(), compressed2);
let decompressed2 = decompress(&compressed2).unwrap();
assert_eq!(data2, decompressed2.as_slice());
}
#[test]
fn test_json_roundtrip() {
let data = std::fs::read("data/compression_66k_JSON.txt").unwrap();
let compressed = compress(&data);
std::fs::write("compressed_66k.lzy", &compressed).unwrap();
let decompressed = decompress(&compressed).unwrap();
assert_eq!(data, decompressed.as_slice());
}
mod crdt_helpers {
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct After {
pub session_id: String,
pub tx_index: u32,
pub change_idx: u32,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Transaction {
pub op: String,
pub value: String,
pub after: After,
}
pub fn generate_transactions(text: &str, session_id: &str) -> Vec<String> {
let mut transactions = Vec::new();
for (i, c) in text.chars().enumerate() {
let tx = Transaction {
op: "app".to_string(),
value: c.to_string(),
after: After {
session_id: session_id.to_string(),
tx_index: i as u32,
change_idx: 0,
},
};
transactions.push(serde_json::to_string(&tx).unwrap());
}
transactions
}
pub fn generate_shorthand_transactions(text: &str) -> Vec<String> {
let mut transactions = Vec::new();
for c in text.chars() {
transactions.push(serde_json::to_string(&c.to_string()).unwrap());
}
transactions
}
}
#[test]
fn test_crdt_transaction_generation() {
let sample_text = "This is a sample text for our CRDT simulation. \
It should be long enough to see some interesting compression results later on. \
Let's add another sentence to make it a bit more substantial.";
let session_id = "co_zRtnoNffeMHge9wvyL5mK1RWbdz_session_zKvAVFSV5cqW";
let transactions = crdt_helpers::generate_transactions(sample_text, session_id);
println!("--- Generated CRDT Transactions ---");
for tx in &transactions {
println!("{}", tx);
}
println!("--- End of CRDT Transactions ---");
assert!(!transactions.is_empty());
assert_eq!(transactions.len(), sample_text.chars().count());
}
#[test]
fn test_crdt_chunked_compression() {
let sample_text = "This is a sample text for our CRDT simulation. \
It should be long enough to see some interesting compression results later on. \
Let's add another sentence to make it a bit more substantial.";
let session_id = "co_zRtnoNffeMHge9wvyL5mK1RWbdz_session_zKvAVFSV5cqW";
let transactions_json = crdt_helpers::generate_transactions(sample_text, session_id);
let mut compressor = Compressor::new();
let mut compressed_log = Vec::new();
let mut total_json_len = 0;
for tx_json in &transactions_json {
let compressed_chunk = compressor.compress_chunk(tx_json.as_bytes());
compressed_log.extend_from_slice(&compressed_chunk);
total_json_len += tx_json.len();
}
let decompressed = decompress(&compressed_log).unwrap();
// Verify roundtrip
let original_log_concatenated = transactions_json.join("");
assert_eq!(decompressed, original_log_concatenated.as_bytes());
let plaintext_len = sample_text.len();
let compressed_len = compressed_log.len();
let compression_ratio = compressed_len as f64 / total_json_len as f64;
let overhead_ratio = compressed_len as f64 / plaintext_len as f64;
println!("\n--- CRDT Chunked Compression Test ---");
println!("Plaintext size: {} bytes", plaintext_len);
println!("Total JSON size: {} bytes", total_json_len);
println!("Compressed log size: {} bytes", compressed_len);
println!("Compression ratio (compressed/json): {:.4}", compression_ratio);
println!("Overhead ratio (compressed/plaintext): {:.4}", overhead_ratio);
println!("--- End of Test ---");
}
#[test]
fn test_crdt_shorthand_compression() {
let sample_text = "This is a sample text for our CRDT simulation. \
It should be long enough to see some interesting compression results later on. \
Let's add another sentence to make it a bit more substantial.";
let transactions_json = crdt_helpers::generate_shorthand_transactions(sample_text);
let mut compressor = Compressor::new();
let mut compressed_log = Vec::new();
let mut total_json_len = 0;
for tx_json in &transactions_json {
let compressed_chunk = compressor.compress_chunk(tx_json.as_bytes());
compressed_log.extend_from_slice(&compressed_chunk);
total_json_len += tx_json.len();
}
let decompressed = decompress(&compressed_log).unwrap();
// Verify roundtrip
let original_log_concatenated = transactions_json.join("");
assert_eq!(decompressed, original_log_concatenated.as_bytes());
let plaintext_len = sample_text.len();
let compressed_len = compressed_log.len();
let compression_ratio = compressed_len as f64 / total_json_len as f64;
let overhead_ratio = compressed_len as f64 / plaintext_len as f64;
println!("\n--- CRDT Shorthand Compression Test ---");
println!("Plaintext size: {} bytes", plaintext_len);
println!("Total JSON size: {} bytes", total_json_len);
println!("Compressed log size: {} bytes", compressed_len);
println!("Compression ratio (compressed/json): {:.4}", compression_ratio);
println!("Overhead ratio (compressed/plaintext): {:.4}", overhead_ratio);
println!("--- End of Test ---");
}
}

View File

@@ -5,7 +5,7 @@
<link rel="icon" type="image/png" href="/jazz-logo.png" />
<link rel="stylesheet" href="/src/index.css" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Jazz CoValues tests</title>
<title>Jazz Chat Example</title>
</head>
<body>
<div id="root"></div>

View File

@@ -0,0 +1,34 @@
{
"name": "@jazz-e2e/binarycostream",
"private": true,
"version": "0.0.81",
"type": "module",
"scripts": {
"dev": "vite",
"build": "tsc && vite build",
"preview": "vite preview",
"test": "playwright test",
"test:ui": "playwright test --ui"
},
"lint-staged": {
"*.{js,jsx,mdx,json}": "prettier --write"
},
"dependencies": {
"cojson": "workspace:*",
"hash-slash": "workspace:*",
"is-ci": "^3.0.1",
"jazz-react": "workspace:*",
"jazz-tools": "workspace:*",
"react": "^18.2.0",
"react-dom": "^18.2.0"
},
"devDependencies": {
"@playwright/test": "^1.46.1",
"@types/node": "^22.5.1",
"@types/react": "^18.2.19",
"@types/react-dom": "^18.2.7",
"@vitejs/plugin-react-swc": "^3.3.2",
"typescript": "^5.3.3",
"vite": "^5.0.10"
}
}

View File

@@ -0,0 +1,49 @@
import { defineConfig, devices } from "@playwright/test";
import isCI from "is-ci";
/**
* Read environment variables from file.
* https://github.com/motdotla/dotenv
*/
// import dotenv from 'dotenv';
// dotenv.config({ path: path.resolve(__dirname, '.env') });
/**
* See https://playwright.dev/docs/test-configuration.
*/
export default defineConfig({
testDir: "./tests",
/* Run tests in files in parallel */
fullyParallel: true,
/* Fail the build on CI if you accidentally left test.only in the source code. */
forbidOnly: isCI,
/* Retry on CI only */
retries: isCI ? 2 : 0,
/* Opt out of parallel tests on CI. */
workers: isCI ? 1 : undefined,
/* Reporter to use. See https://playwright.dev/docs/test-reporters */
reporter: "html",
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
use: {
/* Base URL to use in actions like `await page.goto('/')`. */
baseURL: isCI ? "http://localhost:4173/" : "http://localhost:5173",
/* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
trace: "on-first-retry",
},
/* Configure projects for major browsers */
projects: [
{
name: "chromium",
use: { ...devices["Desktop Chrome"] },
},
],
/* Run your local dev server before starting the tests */
webServer: isCI ? {
command: "pnpm preview",
url: "http://localhost:4173/",
} : undefined,
});

View File

@@ -0,0 +1,40 @@
import { Account, BinaryCoStream, ID } from "jazz-tools";
import { useEffect } from "react";
import { useAccount, useCoState } from "./jazz";
import { UploadedFile } from "./schema";
import { waitForCoValue } from "./lib/waitForCoValue";
async function getUploadedFile(
me: Account,
uploadedFileId: ID<UploadedFile>) {
const uploadedFile = await waitForCoValue(UploadedFile, uploadedFileId, me, Boolean, {})
await BinaryCoStream.loadAsBlob(uploadedFile._refs.file.id, me);
return uploadedFile;
}
export function DownloaderPeer(props: { testCoMapId: ID<UploadedFile> }) {
const account = useAccount();
const testCoMap = useCoState(UploadedFile, props.testCoMapId, {});
useEffect(() => {
getUploadedFile(account.me, props.testCoMapId).then(value => {
value.syncCompleted = true;
});
}, []);
return (
<>
<h1>Downloader Peer</h1>
<div>Fetching: {props.testCoMapId}</div>
<div data-testid="result">
Covalue: {Boolean(testCoMap?.id) ? "Downloaded" : "Not Downloaded"}
</div>
<div data-testid="result">
File:{" "}
{Boolean(testCoMap?.syncCompleted) ? "Downloaded" : "Not Downloaded"}
</div>
</>
);
}

View File

@@ -1,29 +1,24 @@
import { coValueClassFromCoValueClassOrSchema } from "jazz-tools";
import { useAccount, useCoState } from "jazz-tools/react";
import { ID } from "jazz-tools";
import { useEffect, useState } from "react";
import { createCredentiallessIframe } from "../../lib/createCredentiallessIframe";
import { waitForCoValue } from "../../lib/waitForCoValue";
import { BytesRadioGroup } from "./lib/BytesRadioGroup";
import { useAccount } from "./jazz";
import { createCredentiallessIframe } from "./lib/createCredentiallessIframe";
import { generateTestFile } from "./lib/generateTestFile";
import { getDownloaderPeerUrl } from "./lib/getDownloaderPeerUrl";
import { getDefaultFileSize, getIsAutoUpload } from "./lib/searchParams";
import { UploadedFile } from "./schema";
import { waitForCoValue } from "./lib/waitForCoValue";
import { getDefaultFileSize, getIsAutoUpload } from "./lib/searchParams";
import { BytesRadioGroup } from "./lib/BytesRadioGroup";
export function UploaderPeer() {
const account = useAccount();
const [uploadedFileId, setUploadedFileId] = useState<string | undefined>(
undefined,
);
const [uploadedFileId, setUploadedFileId] = useState<
ID<UploadedFile> | undefined
>(undefined);
const [syncDuration, setSyncDuration] = useState<number | null>(null);
const [bytes, setBytes] = useState(getDefaultFileSize);
const [synced, setSynced] = useState(false);
const testFile = useCoState(UploadedFile, uploadedFileId, {});
async function uploadTestFile() {
if (!account.me) return;
setUploadedFileId(undefined);
setSynced(false);
if (!account) return;
// Mark the sync start
performance.mark("sync-start");
@@ -34,20 +29,16 @@ export function UploaderPeer() {
const iframe = createCredentiallessIframe(getDownloaderPeerUrl(file));
document.body.appendChild(iframe);
setSyncDuration(null);
setUploadedFileId(file.id);
account.me.waitForAllCoValuesSync().then(() => {
setSynced(true);
});
// The downloader peer will set the syncCompleted to true when the download is complete.
// We use this to measure the sync duration.
await waitForCoValue(
coValueClassFromCoValueClassOrSchema(UploadedFile),
UploadedFile,
file.id,
account.me,
(value) => value.syncCompleted,
{ loadAs: account.me },
{}
);
iframe.remove();
@@ -69,28 +60,19 @@ export function UploaderPeer() {
<BytesRadioGroup selectedValue={bytes} onChange={setBytes} />
<button onClick={uploadTestFile}>Upload Test File</button>
{uploadedFileId && (
<>
<div>{uploadedFileId}</div>
<div data-testid="synced-with-server">
Synced with the server: {String(synced)}
</div>
</>
)}
{uploadedFileId && <div>{uploadedFileId}</div>}
{syncDuration && (
<div data-testid="sync-duration">
Two way sync duration: {syncDuration.toFixed(2)}ms
Sync Duration: {syncDuration.toFixed(2)}ms
</div>
)}
{uploadedFileId && (
<div data-testid="result">
Two way sync completed: {String(Boolean(syncDuration))}
Sync Completed: {String(Boolean(syncDuration))}
</div>
)}
{testFile?.coMapDownloaded && (
<div data-testid="co-map-downloaded">CoMap synced!</div>
)}
</>
);
}

View File

@@ -0,0 +1,24 @@
import React from "react";
import ReactDOM from "react-dom/client";
import { DownloaderPeer } from "./DownloaderPeer";
import { Jazz } from "./jazz";
import { UploaderPeer } from "./UploaderPeer";
import { getValueId } from "./lib/searchParams";
function Main() {
const valueId = getValueId();
if (valueId) {
return <DownloaderPeer testCoMapId={valueId} />;
}
return <UploaderPeer />;
}
ReactDOM.createRoot(document.getElementById("root")!).render(
<React.StrictMode>
<Jazz.Provider>
<Main />
</Jazz.Provider>
</React.StrictMode>,
);

View File

@@ -0,0 +1,38 @@
import { createJazzReactContext, DemoAuth } from "jazz-react";
import { useEffect } from "react";
import { getValueId } from "./lib/searchParams";
function AutoLoginComponent(props: {
appName: string;
loading: boolean;
existingUsers: string[];
logInAs: (existingUser: string) => void;
signUp: (username: string) => void;
}) {
useEffect(() => {
if (props.loading) return;
props.signUp("Test User");
}, [props.loading]);
return <div>Signing up...</div>;
}
const key = getValueId()
? `downloader-e2e@jazz.tools`
: `uploader-e2e@jazz.tools`;
const localSync = new URLSearchParams(location.search).has("localSync");
const Jazz = createJazzReactContext({
auth: DemoAuth({
appName: "BinaryCoStream Sync",
Component: AutoLoginComponent,
}),
peer: localSync
? `ws://localhost:4200?key=${key}`
: `wss://mesh.jazz.tools/?key=${key}`,
});
export const { useAccount, useCoState } = Jazz;
export { Jazz };

View File

@@ -4,54 +4,46 @@ export function BytesRadioGroup(props: {
}) {
return (
<p>
<BytesRadioInput
label="1KB"
value={1e3}
selectedValue={props.selectedValue}
onChange={props.onChange}
/>
<BytesRadioInput
label="10KB"
value={1e4}
selectedValue={props.selectedValue}
onChange={props.onChange}
/>
<BytesRadioInput
label="1KB"
value={1e3}
selectedValue={props.selectedValue}
onChange={props.onChange} />
<BytesRadioInput
label="10KB"
value={1e4}
selectedValue={props.selectedValue}
onChange={props.onChange} />
<BytesRadioInput
label="100KB"
value={1e5}
selectedValue={props.selectedValue}
onChange={props.onChange}
/>
onChange={props.onChange} />
<BytesRadioInput
label="150KB"
value={1e5 + 5e4}
selectedValue={props.selectedValue}
onChange={props.onChange}
/>
onChange={props.onChange} />
<BytesRadioInput
label="200KB"
value={2e6}
selectedValue={props.selectedValue}
onChange={props.onChange}
/>
onChange={props.onChange} />
<BytesRadioInput
label="500KB"
value={5e6}
selectedValue={props.selectedValue}
onChange={props.onChange}
/>
onChange={props.onChange} />
<BytesRadioInput
label="1MB"
value={1e6}
selectedValue={props.selectedValue}
onChange={props.onChange}
/>
onChange={props.onChange} />
<BytesRadioInput
label="10MB"
value={1e7}
selectedValue={props.selectedValue}
onChange={props.onChange}
/>
onChange={props.onChange} />
</p>
);
}
@@ -68,8 +60,7 @@ function BytesRadioInput(props: {
name="bytes"
value={props.value}
checked={props.value === props.selectedValue}
onChange={() => props.onChange(props.value)}
/>
onChange={() => props.onChange(props.value)} />
{props.label}
</label>
);

View File

@@ -15,7 +15,6 @@ export function createCredentiallessIframe(url: string) {
iframe.style.position = "absolute";
iframe.style.top = "0";
iframe.style.right = "0";
iframe.setAttribute("data-testid", "downloader-peer");
return iframe;
}

View File

@@ -0,0 +1,26 @@
import { Account, Group, BinaryCoStream } from "jazz-tools";
import { UploadedFile } from "../schema";
export async function generateTestFile(me: Account, bytes: number) {
const group = Group.create({ owner: me });
group.addMember("everyone", "writer");
const ownership = { owner: group };
const testFile = UploadedFile.create(
{
file: await BinaryCoStream.createFromBlob(
new Blob(['1'.repeat(bytes)], { type: 'image/png' }),
ownership
),
syncCompleted: false,
},
ownership
);
const url = new URL(window.location.href);
url.searchParams.set("valueId", testFile.id);
return testFile;
}

View File

@@ -0,0 +1,8 @@
import { UploadedFile } from "src/schema";
export function getDownloaderPeerUrl(value: UploadedFile) {
const url = new URL(window.location.href);
url.searchParams.set("valueId", value.id);
return url.toString();
}

View File

@@ -0,0 +1,14 @@
import { ID } from "jazz-tools";
import { UploadedFile } from "../schema";
export function getValueId() {
return new URLSearchParams(location.search).get("valueId") as ID<UploadedFile> | undefined ?? undefined;
}
export function getIsAutoUpload() {
return new URLSearchParams(location.search).has("auto");
}
export function getDefaultFileSize() {
return parseInt(new URLSearchParams(location.search).get("fileSize") ?? 1e3.toString());
}

View File

@@ -0,0 +1,31 @@
import {
Account,
CoValue,
CoValueClass,
DepthsIn,
ID,
subscribeToCoValue,
} from "jazz-tools";
export function waitForCoValue<T extends CoValue>(
coMap: CoValueClass<T>,
valueId: ID<T>,
account: Account,
predicate: (value: T) => boolean,
depth: DepthsIn<T>
) {
return new Promise<T>((resolve) => {
const unsubscribe = subscribeToCoValue(
coMap,
valueId,
account,
depth,
(value) => {
if (predicate(value)) {
resolve(value);
unsubscribe();
}
}
);
});
}

View File

@@ -0,0 +1,6 @@
import { BinaryCoStream, co, CoMap } from "jazz-tools";
export class UploadedFile extends CoMap {
file = co.ref(BinaryCoStream);
syncCompleted = co.boolean;
}

View File

@@ -0,0 +1,11 @@
import { test, expect } from '@playwright/test';
test('BinaryCoStream - Sync', async ({ page }) => {
await page.goto('/');
await page.getByRole('button', { name: 'Upload Test File' }).click();
await page.getByTestId('sync-duration').waitFor();
await expect(page.getByTestId('result')).toHaveText('Sync Completed: true');
});

View File

@@ -21,5 +21,5 @@
"noFallthroughCasesInSwitch": true,
"baseUrl": "."
},
"include": ["src"]
"include": ["src"],
}

View File

@@ -0,0 +1,10 @@
import { defineConfig } from 'vite'
import react from '@vitejs/plugin-react-swc'
// https://vitejs.dev/config/
export default defineConfig({
plugins: [react()],
build: {
minify: false
}
})

View File

@@ -1 +0,0 @@
BETTER_AUTH_SECRET="TEST_SECRET"

View File

@@ -1,49 +0,0 @@
/test-results/
/playwright-report/
/blob-report/
/playwright/.cache/
sqlite.db
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/versions
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# misc
.DS_Store
*.pem
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*
# env files (can opt-in for committing if needed)
.env
.env.*
!.env.example
!.env.test
# vercel
.vercel
# typescript
*.tsbuildinfo
next-env.d.ts

View File

@@ -1,298 +0,0 @@
# betterauth
## 0.1.25
### Patch Changes
- Updated dependencies [048ac1d]
- jazz-tools@0.14.22
- jazz-betterauth-server-plugin@0.14.22
- jazz-inspector@0.14.22
- jazz-react@0.14.22
- jazz-react-auth-betterauth@0.14.22
- jazz-betterauth-client-plugin@0.14.22
## 0.1.24
### Patch Changes
- Updated dependencies [e7e505e]
- Updated dependencies [13b57aa]
- Updated dependencies [5662faa]
- Updated dependencies [2116a59]
- jazz-tools@0.14.21
- jazz-betterauth-server-plugin@0.14.21
- jazz-inspector@0.14.21
- jazz-react@0.14.21
- jazz-react-auth-betterauth@0.14.21
- jazz-betterauth-client-plugin@0.14.21
## 0.1.23
### Patch Changes
- Updated dependencies [6f72419]
- Updated dependencies [04b20c2]
- jazz-tools@0.14.20
- jazz-betterauth-server-plugin@0.14.20
- jazz-inspector@0.14.20
- jazz-react@0.14.20
- jazz-react-auth-betterauth@0.14.20
- jazz-betterauth-client-plugin@0.14.20
## 0.1.22
### Patch Changes
- jazz-betterauth-client-plugin@0.14.19
- jazz-betterauth-server-plugin@0.14.19
- jazz-react-auth-betterauth@0.14.19
- jazz-inspector@0.14.19
- jazz-react@0.14.19
- jazz-tools@0.14.19
## 0.1.21
### Patch Changes
- Updated dependencies [4b950bc]
- Updated dependencies [d6d9c0a]
- Updated dependencies [c559054]
- jazz-tools@0.14.18
- jazz-betterauth-server-plugin@0.14.18
- jazz-inspector@0.14.18
- jazz-react@0.14.18
- jazz-react-auth-betterauth@0.14.18
- jazz-betterauth-client-plugin@0.14.18
## 0.1.20
### Patch Changes
- Updated dependencies [e512df4]
- jazz-betterauth-server-plugin@0.14.17
- jazz-tools@0.14.17
- jazz-betterauth-client-plugin@0.14.17
- jazz-inspector@0.14.17
- jazz-react@0.14.17
- jazz-react-auth-betterauth@0.14.17
## 0.1.19
### Patch Changes
- jazz-betterauth-server-plugin@0.14.16
- jazz-inspector@0.14.16
- jazz-react@0.14.16
- jazz-react-auth-betterauth@0.14.16
- jazz-tools@0.14.16
- jazz-betterauth-client-plugin@0.14.16
## 0.1.18
### Patch Changes
- Updated dependencies [f9590f9]
- jazz-react@0.14.15
- jazz-betterauth-server-plugin@0.14.15
- jazz-inspector@0.14.15
- jazz-react-auth-betterauth@0.14.15
- jazz-tools@0.14.15
- jazz-betterauth-client-plugin@0.14.15
## 0.1.17
### Patch Changes
- Updated dependencies [e32a1f7]
- jazz-tools@0.14.14
- jazz-betterauth-server-plugin@0.14.14
- jazz-inspector@0.14.14
- jazz-react@0.14.14
- jazz-react-auth-betterauth@0.14.14
- jazz-betterauth-client-plugin@0.14.14
## 0.1.16
### Patch Changes
- jazz-inspector@0.14.13
- jazz-react@0.14.13
- jazz-react-auth-betterauth@0.14.13
## 0.1.15
### Patch Changes
- jazz-inspector@0.14.12
- jazz-react@0.14.12
- jazz-react-auth-betterauth@0.14.12
## 0.1.14
### Patch Changes
- Updated dependencies [dc746a2]
- Updated dependencies [f869d9a]
- Updated dependencies [3fe6832]
- jazz-react-auth-betterauth@0.14.10
- jazz-inspector@0.14.10
- jazz-react@0.14.10
- jazz-tools@0.14.10
- jazz-betterauth-server-plugin@0.14.10
- jazz-betterauth-client-plugin@0.14.10
## 0.1.13
### Patch Changes
- Updated dependencies [22c2600]
- jazz-tools@0.14.9
- jazz-betterauth-server-plugin@0.14.9
- jazz-inspector@0.14.9
- jazz-react@0.14.9
- jazz-react-auth-betterauth@0.14.9
- jazz-betterauth-client-plugin@0.14.9
## 0.1.12
### Patch Changes
- Updated dependencies [637ae13]
- jazz-tools@0.14.8
- jazz-betterauth-server-plugin@0.14.8
- jazz-inspector@0.14.8
- jazz-react@0.14.8
- jazz-react-auth-betterauth@0.14.8
- jazz-betterauth-client-plugin@0.14.8
## 0.1.11
### Patch Changes
- Updated dependencies [365b0ea]
- jazz-tools@0.14.7
- jazz-betterauth-server-plugin@0.14.7
- jazz-inspector@0.14.7
- jazz-react@0.14.7
- jazz-react-auth-betterauth@0.14.7
- jazz-betterauth-client-plugin@0.14.7
## 0.1.10
### Patch Changes
- Updated dependencies [9d6d9fe]
- Updated dependencies [9d6d9fe]
- jazz-tools@0.14.6
- jazz-betterauth-server-plugin@0.14.6
- jazz-inspector@0.14.6
- jazz-react@0.14.6
- jazz-react-auth-betterauth@0.14.6
- jazz-betterauth-client-plugin@0.14.6
## 0.1.9
### Patch Changes
- Updated dependencies [91cbb2f]
- Updated dependencies [20b3d88]
- jazz-tools@0.14.5
- jazz-betterauth-server-plugin@0.14.5
- jazz-inspector@0.14.5
- jazz-react@0.14.5
- jazz-react-auth-betterauth@0.14.5
- jazz-betterauth-client-plugin@0.14.5
## 0.1.8
### Patch Changes
- Updated dependencies [011af55]
- jazz-tools@0.14.4
- jazz-betterauth-server-plugin@0.14.4
- jazz-inspector@0.14.4
- jazz-react@0.14.4
- jazz-react-auth-betterauth@0.14.4
- jazz-betterauth-client-plugin@0.14.4
## 0.1.7
### Patch Changes
- Updated dependencies [3d1027f]
- Updated dependencies [c240eed]
- jazz-tools@0.14.2
- jazz-betterauth-server-plugin@0.14.2
- jazz-inspector@0.14.2
- jazz-react@0.14.2
- jazz-react-auth-betterauth@0.14.2
- jazz-betterauth-client-plugin@0.14.2
## 0.1.6
### Patch Changes
- Updated dependencies [cdfc105]
- jazz-tools@0.14.1
- jazz-betterauth-server-plugin@0.14.1
- jazz-inspector@0.14.1
- jazz-react@0.14.1
- jazz-react-auth-betterauth@0.14.1
- jazz-betterauth-client-plugin@0.14.1
## 0.1.5
### Patch Changes
- Updated dependencies [5835ed1]
- jazz-tools@0.14.0
- jazz-betterauth-server-plugin@0.14.0
- jazz-inspector@0.14.0
- jazz-react@0.14.0
- jazz-react-auth-betterauth@0.14.0
- jazz-betterauth-client-plugin@0.14.0
## 0.1.4
### Patch Changes
- jazz-betterauth-server-plugin@0.13.32
- jazz-react@0.13.32
- jazz-react-auth-betterauth@0.13.32
- jazz-betterauth-client-plugin@0.13.32
## 0.1.3
### Patch Changes
- Updated dependencies [e5b170f]
- jazz-tools@0.13.31
- jazz-betterauth-server-plugin@0.13.31
- jazz-inspector@0.13.31
- jazz-react@0.13.31
- jazz-react-auth-betterauth@0.13.31
- jazz-betterauth-client-plugin@0.13.31
## 0.1.2
### Patch Changes
- jazz-betterauth-server-plugin@0.13.30
- jazz-inspector@0.13.30
- jazz-react@0.13.30
- jazz-react-auth-betterauth@0.13.30
- jazz-tools@0.13.30
- jazz-betterauth-client-plugin@0.13.30
## 0.1.1
### Patch Changes
- Updated dependencies [8e5ff13]
- jazz-inspector@0.13.29
- jazz-betterauth-server-plugin@0.0.1
- jazz-react@0.13.29
- jazz-react-auth-betterauth@0.0.1
- jazz-tools@0.13.29
- jazz-betterauth-client-plugin@0.0.1

View File

@@ -1,82 +0,0 @@
# Better Auth Integration Example
This example demonstrates how to integrate [Better Auth](https://www.better-auth.com/) with Jazz.
## Getting started
To run this example, you may either:
- Clone the Jazz monorepo and run this example from within.
- Create a new Jazz project using this example as a template, and run that new project.
### Setting environment variables
- `NEXT_PUBLIC_AUTH_BASE_URL`: A URL to a Better Auth server. If undefined, the example will self-host a Better Auth server.
- `BETTER_AUTH_SECRET`: The encryption secret used by the self-hosted Better Auth server (required only if `NEXT_PUBLIC_AUTH_BASE_URL` is undefined)
- `GITHUB_CLIENT_ID`: The client ID for the GitHub OAuth provider used by the self-hosted Better Auth server (required only if `NEXT_PUBLIC_AUTH_BASE_URL` is undefined)
- `GITHUB_CLIENT_SECRET`: The client secret for the GitHub OAuth provider used by the self-hosted Better Auth server (required only if `NEXT_PUBLIC_AUTH_BASE_URL` is undefined)
### Using this example as a template
1. Create a new Jazz project, and use this example as a template.
```sh
npx create-jazz-app@latest betterauth-app --example betterauth
```
2. Navigate to the new project and install dependencies.
```sh
cd betterauth-app
pnpm install
```
3. Create a .env file (don't forget to set your [BETTER_AUTH_SECRET](https://www.better-auth.com/docs/installation#set-environment-variables)!)
```sh
mv .env.example .env
```
4. Start the development server
```sh
pnpm dev
```
https://www.better-auth.com/docs/installation#set-environment-variables
### Using the monorepo
This requires `pnpm` to be installed, see [https://pnpm.io/installation](https://pnpm.io/installation).
Clone the jazz repository.
```bash
git clone https://github.com/garden-co/jazz.git
```
Install and build dependencies.
```bash
pnpm i && npx turbo build
```
Go to the example directory.
```bash
cd jazz/examples/betterauth/
```
Create a .env file (don't forget to set your [BETTER_AUTH_SECRET](https://www.better-auth.com/docs/installation#set-environment-variables)!)
```sh
mv .env.example .env
```
Start the dev server.
```bash
pnpm dev
```
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.

View File

@@ -1,21 +0,0 @@
{
"$schema": "https://ui.shadcn.com/schema.json",
"style": "new-york",
"rsc": false,
"tsx": true,
"tailwind": {
"config": "",
"css": "src/styles/globals.css",
"baseColor": "neutral",
"cssVariables": true,
"prefix": ""
},
"aliases": {
"components": "@/components",
"utils": "@/lib/utils",
"ui": "@/components/ui",
"lib": "@/lib",
"hooks": "@/hooks"
},
"iconLibrary": "lucide"
}

View File

@@ -1,7 +0,0 @@
import type { NextConfig } from "next";
const nextConfig: NextConfig = {
/* config options here */
};
export default nextConfig;

View File

@@ -1,48 +0,0 @@
{
"name": "betterauth",
"private": true,
"type": "module",
"scripts": {
"dev": "next dev --turbopack",
"build": "next build",
"start": "next start",
"lint": "next lint",
"format-and-lint": "biome check .",
"format-and-lint:fix": "biome check . --write",
"test:e2e": "playwright test",
"test:e2e:ui": "playwright test --ui",
"email": "email dev --dir src/components/emails"
},
"dependencies": {
"@icons-pack/react-simple-icons": "^12.8.0",
"@radix-ui/react-label": "^2.1.6",
"@radix-ui/react-slot": "^1.2.2",
"better-auth": "^1.2.4",
"better-sqlite3": "^11.9.1",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"jazz-react-auth-betterauth": "workspace:*",
"jazz-betterauth-client-plugin": "workspace:*",
"jazz-betterauth-server-plugin": "workspace:*",
"jazz-tools": "workspace:*",
"lucide-react": "^0.510.0",
"next": "15.3.2",
"react": "catalog:react",
"react-dom": "catalog:react",
"sonner": "^2.0.3",
"tailwind-merge": "^3.3.0",
"tw-animate-css": "^1.2.5"
},
"devDependencies": {
"@biomejs/biome": "catalog:default",
"@playwright/test": "^1.50.1",
"@tailwindcss/postcss": "^4",
"@types/better-sqlite3": "^7.6.12",
"@types/node": "^20",
"@types/react": "catalog:react",
"@types/react-dom": "catalog:react",
"react-email": "^4.0.11",
"tailwindcss": "^4",
"typescript": "catalog:default"
}
}

View File

@@ -1,53 +0,0 @@
import { defineConfig, devices } from "@playwright/test";
import isCI from "is-ci";
/**
* Read environment variables from file.
* https://github.com/motdotla/dotenv
*/
// import dotenv from 'dotenv';
// dotenv.config({ path: path.resolve(__dirname, '.env') });
/**
* See https://playwright.dev/docs/test-configuration.
*/
export default defineConfig({
testDir: "./tests",
/* Run tests in files in parallel */
fullyParallel: true,
/* Fail the build on CI if you accidentally left test.only in the source code. */
forbidOnly: isCI,
/* Retry on CI only */
retries: isCI ? 2 : 0,
/* Opt out of parallel tests on CI. */
workers: isCI ? 1 : undefined,
/* Reporter to use. See https://playwright.dev/docs/test-reporters */
reporter: "html",
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
use: {
/* Base URL to use in actions like `await page.goto('/')`. */
baseURL: "http://localhost:3000/",
/* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
trace: "on-first-retry",
permissions: ["clipboard-read", "clipboard-write"],
},
/* Configure projects for major browsers */
projects: [
{
name: "chromium",
use: { ...devices["Desktop Chrome"] },
},
],
/* Run your local dev server before starting the tests */
webServer: [
{
command: "pnpm dev",
url: "http://localhost:3000/",
reuseExistingServer: !isCI,
},
],
});

View File

@@ -1,5 +0,0 @@
const config = {
plugins: ["@tailwindcss/postcss"],
};
export default config;

View File

@@ -1,15 +0,0 @@
<svg
viewBox="0 0 386 146"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
>
<path id="text"
d="M176.725 33.865H188.275V22.7H176.725V33.865ZM164.9 129.4H172.875C182.72 129.4 188.275 123.9 188.275 114.22V43.6H176.725V109.545C176.725 115.65 173.975 118.51 167.925 118.51H164.9V129.4ZM245.298 53.28C241.613 45.47 233.363 41.95 222.748 41.95C208.998 41.95 200.748 48.44 197.888 58.615L208.613 61.915C210.648 55.315 216.368 52.565 222.638 52.565C231.933 52.565 235.673 56.415 236.058 64.61C226.433 65.93 216.643 67.195 209.768 69.23C200.583 72.145 195.743 77.865 195.743 86.83C195.743 96.51 202.673 104.65 215.818 104.65C225.443 104.65 232.318 101.35 237.213 94.365V103H247.388V66.425C247.388 61.475 247.168 57.185 245.298 53.28ZM217.853 95.245C210.483 95.245 207.128 91.34 207.128 86.72C207.128 82.045 210.593 79.515 215.323 77.92C220.328 76.435 226.983 75.5 235.948 74.18C235.893 76.93 235.673 80.725 234.738 83.475C233.418 89.25 227.643 95.245 217.853 95.245ZM251.22 103H301.545V92.715H269.535L303.195 45.47V43.6H254.3V53.885H284.935L251.22 101.185V103ZM304.815 103H355.14V92.715H323.13L356.79 45.47V43.6H307.895V53.885H338.53L304.815 101.185V103Z"
/>
<path
fillRule="evenodd"
clipRule="evenodd"
d="M136.179 44.8277C136.179 44.8277 136.179 44.8277 136.179 44.8276V21.168C117.931 28.5527 97.9854 32.6192 77.0897 32.6192C65.1466 32.6192 53.5138 31.2908 42.331 28.7737V51.4076C42.331 51.4076 42.331 51.4076 42.331 51.4076V81.1508C41.2955 80.4385 40.1568 79.8458 38.9405 79.3915C36.1732 78.358 33.128 78.0876 30.1902 78.6145C27.2524 79.1414 24.5539 80.4419 22.4358 82.3516C20.3178 84.2613 18.8754 86.6944 18.291 89.3433C17.7066 91.9921 18.0066 94.7377 19.1528 97.2329C20.2991 99.728 22.2403 101.861 24.7308 103.361C27.2214 104.862 30.1495 105.662 33.1448 105.662H33.1455C33.6061 105.662 33.8365 105.662 34.0314 105.659C44.5583 105.449 53.042 96.9656 53.2513 86.4386C53.2534 86.3306 53.2544 86.2116 53.2548 86.0486H53.2552V85.7149L53.2552 85.5521V82.0762L53.2552 53.1993C61.0533 54.2324 69.0092 54.7656 77.0897 54.7656C77.6696 54.7656 78.2489 54.7629 78.8276 54.7574V110.696C77.792 109.983 76.6533 109.391 75.437 108.936C72.6697 107.903 69.6246 107.632 66.6867 108.159C63.7489 108.686 61.0504 109.987 58.9323 111.896C56.8143 113.806 55.3719 116.239 54.7875 118.888C54.2032 121.537 54.5031 124.283 55.6494 126.778C56.7956 129.273 58.7368 131.405 61.2273 132.906C63.7179 134.406 66.646 135.207 69.6414 135.207C70.1024 135.207 70.3329 135.207 70.5279 135.203C81.0548 134.994 89.5385 126.51 89.7478 115.983C89.7517 115.788 89.7517 115.558 89.7517 115.097V111.621L89.7517 54.3266C101.962 53.4768 113.837 51.4075 125.255 48.2397V80.9017C124.219 80.1894 123.081 79.5966 121.864 79.1424C119.097 78.1089 116.052 77.8384 113.114 78.3653C110.176 78.8922 107.478 80.1927 105.36 82.1025C103.242 84.0122 101.799 86.4453 101.215 89.0941C100.631 91.743 100.931 94.4886 102.077 96.9837C103.223 99.4789 105.164 101.612 107.655 103.112C110.145 104.612 113.073 105.413 116.069 105.413C116.53 105.413 116.76 105.413 116.955 105.409C127.482 105.2 135.966 96.7164 136.175 86.1895C136.179 85.9945 136.179 85.764 136.179 85.3029V81.8271L136.179 44.8277Z"
fill="#146AFF"
/>
</svg>

Before

Width:  |  Height:  |  Size: 3.2 KiB

Some files were not shown because too many files have changed in this diff Show More