Compare commits

..

3 Commits

Author SHA1 Message Date
Giordano Ricci
8233449e83 fix expo instructions 2025-05-19 17:22:58 +01:00
Giordano Ricci
7c020d45a9 fix typo, add benefits 2025-05-19 17:02:08 +01:00
Giordano Ricci
9197ec64c7 docs: add cloud auth specific docs 2025-05-19 15:54:37 +01:00
1858 changed files with 83409 additions and 114102 deletions

View File

@@ -2,24 +2,42 @@
"$schema": "https://unpkg.com/@changesets/config@2.3.1/schema.json",
"changelog": "@changesets/cli/changelog",
"commit": false,
"linked": [],
"fixed": [
"fixed": [],
"linked": [
[
"cojson",
"cojson-core-wasm",
"cojson-storage",
"cojson-storage-indexeddb",
"cojson-storage-sqlite",
"cojson-transport-ws",
"jazz-browser",
"jazz-auth-clerk",
"jazz-auth-betterauth",
"jazz-betterauth-client-plugin",
"jazz-betterauth-server-plugin",
"jazz-react-auth-betterauth",
"jazz-browser-media-images",
"jazz-expo",
"jazz-inspector",
"jazz-inspector-element",
"jazz-nodejs",
"jazz-react",
"jazz-react-core",
"jazz-react-auth-clerk",
"jazz-react-native-core",
"jazz-react-native",
"jazz-react-native-media-images",
"jazz-run",
"jazz-svelte",
"jazz-tools",
"community-jazz-vue"
"jazz-vue"
]
],
"access": "public",
"baseBranch": "main",
"updateInternalDependencies": "minor"
"updateInternalDependencies": "patch",
"ignore": [],
"___experimentalUnsafeOptions_WILL_CHANGE_IN_PATCH": {
"onlyUpdatePeerDependentsWhenOutOfRange": true
}
}

8
.github/CODEOWNERS vendored
View File

@@ -1,8 +0,0 @@
./packages @garden-co/framework
./tests @garden-co/framework
./packages/quint-ui @garden-co/ui
./homepage @garden-co/ui
./homepage/homepage/content/docs @garden-co/docs
./starters @garden-co/docs
./examples @garden-co/docs @garden-co/ui

View File

@@ -1,23 +0,0 @@
# Description
<!-- Please include a summary of the change and which issue is fixed -->
<!-- Please also include relevant motivation and context -->
<!-- Include any links to documentation like RFCs if necessary -->
<!-- Add a link to to relevant preview environments or anything that would simplify visual review process -->
<!-- Supplemental screenshots and video are encouraged, but the primary description should be in text -->
## Manual testing instructions
<!-- Add any actions required to manually test the changes -->
## Tests
- [ ] Tests have been added and/or updated
- [ ] Tests have not been updated, because: <!-- Insert reason for not updating tests here -->
- [ ] I need help with writing tests
## Checklist
- [ ] I've updated the part of the docs that are affected the PR changes
- [ ] I've generated a changeset, if a version bump is required
- [ ] I've updated the jsDoc comments to the public APIs I've modified, or added them when missing

37
.github/workflows/build-examples.yaml vendored Normal file
View File

@@ -0,0 +1,37 @@
name: Build Examples
on:
push:
branches: [ "main" ]
jobs:
build-examples:
runs-on: blacksmith-4vcpu-ubuntu-2204
strategy:
matrix:
example: [
"chat",
"clerk",
"passkey",
"inspector",
"music-player",
"password-manager",
"pets",
"reactions",
"todo",
]
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: true
- name: Setup Source Code
uses: ./.github/actions/source-code/
- name: Pnpm Build
run: |
pnpm install
pnpm turbo build;
working-directory: ./examples/${{ matrix.example }}

26
.github/workflows/build-starters.yaml vendored Normal file
View File

@@ -0,0 +1,26 @@
name: Build Starters
on:
push:
branches: ["main"]
jobs:
build-starters:
runs-on: blacksmith-4vcpu-ubuntu-2204
strategy:
matrix:
starter: ["react-passkey-auth"]
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Setup Source Code
uses: ./.github/actions/source-code/
- name: Pnpm Build
run: |
pnpm install
pnpm turbo build;
working-directory: ./starters/${{ matrix.starter }}

View File

@@ -1,27 +1,21 @@
name: Code quality
concurrency:
# For pushes, this lets concurrent runs happen, so each push gets a result.
# But for other events (e.g. PRs), we can cancel the previous runs.
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.sha || github.ref }}
cancel-in-progress: true
on:
push:
branches:
- "main"
pull_request:
jobs:
quality:
runs-on: blacksmith-2vcpu-ubuntu-2404-arm
runs-on: blacksmith-4vcpu-ubuntu-2204
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Biome
uses: biomejs/setup-biome@v2
with:
version: 2.1.3
version: latest
- name: Run Biome
run: biome ci .
- name: Check Catalog Dependencies
run: node scripts/check-catalog-deps.js

View File

@@ -1,77 +0,0 @@
name: Test `create-jazz-app` Distribution
concurrency:
# For pushes, this lets concurrent runs happen, so each push gets a result.
# But for other events (e.g. PRs), we can cancel the previous runs.
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.sha || github.ref }}
cancel-in-progress: true
on:
pull_request:
types: [opened, synchronize, reopened]
paths:
- 'packages/create-jazz-app/**'
jobs:
test-create-jazz-app-distribution:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Source Code
uses: ./.github/actions/source-code/
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- name: Build create-jazz-app
run: pnpm build
working-directory: packages/create-jazz-app
- name: Pack create-jazz-app
run: pnpm pack
working-directory: packages/create-jazz-app
- name: Create test directory
run: mkdir -p /tmp/test-create-jazz-app
- name: Initialize test package
run: |
cd /tmp/test-create-jazz-app
bun init -y
- name: Install packed create-jazz-app
run: |
cd /tmp/test-create-jazz-app
bun install ${{ github.workspace }}/packages/create-jazz-app/create-jazz-app-*.tgz
- name: Test basic functionality
run: |
cd /tmp/test-create-jazz-app
bunx create-jazz-app --help
- name: Create test project and validate catalog resolution
run: |
cd /tmp/test-create-jazz-app
mkdir test-project
cd test-project
echo -e "\n\n\n\n\n\n\n\n" | bunx create-jazz-app . --framework react --starter react-passkey-auth --package-manager bun --git false
- name: Validate no unresolved catalog references
run: |
cd /tmp/test-create-jazz-app/test-project
# Check for unresolved catalog: references in package.json
if grep -r "catalog:" package.json; then
echo "❌ Found unresolved catalog: references in generated project"
exit 1
fi
# Check for unresolved workspace: references
if grep -r "workspace:" package.json; then
echo "❌ Found unresolved workspace: references in generated project"
exit 1
fi
echo "✅ All catalog and workspace references resolved successfully"

View File

@@ -1,11 +1,5 @@
name: End-to-End Tests for React Native
concurrency:
# For pushes, this lets concurrent runs happen, so each push gets a result.
# But for other events (e.g. PRs), we can cancel the previous runs.
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.sha || github.ref }}
cancel-in-progress: true
on:
pull_request:
types: [opened, synchronize, reopened]
@@ -44,6 +38,7 @@ jobs:
- name: chat-rn-expo App Pre Build
working-directory: ./examples/chat-rn-expo
run: |
pnpm build
pnpm expo prebuild --clean
- name: Install Maestro
@@ -66,8 +61,7 @@ jobs:
emulator-options: -no-snapshot-save -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none -no-metrics
disable-animations: true
working-directory: ./examples/chat-rn-expo/
# killall due to this issue: https://github.com/ReactiveCircus/android-emulator-runner/issues/385
script: ./test/e2e/run.sh && ( killall -INT crashpad_handler || true )
script: ./test/e2e/run.sh
- name: Copy Maestro Output
if: steps.e2e_test.outcome != 'success'

View File

@@ -1,11 +1,5 @@
name: Jazz Run Tests
concurrency:
# For pushes, this lets concurrent runs happen, so each push gets a result.
# But for other events (e.g. PRs), we can cancel the previous runs.
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.sha || github.ref }}
cancel-in-progress: true
on:
push:
branches: ["main"]
@@ -14,7 +8,7 @@ on:
jobs:
test:
runs-on: blacksmith-4vcpu-ubuntu-2404
runs-on: blacksmith-4vcpu-ubuntu-2204
timeout-minutes: 5
steps:

View File

@@ -1,11 +1,5 @@
name: Playwright Tests
concurrency:
# For pushes, this lets concurrent runs happen, so each push gets a result.
# But for other events (e.g. PRs), we can cancel the previous runs.
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.sha || github.ref }}
cancel-in-progress: true
on:
push:
branches: ["main"]
@@ -15,11 +9,11 @@ on:
jobs:
test:
timeout-minutes: 60
runs-on: blacksmith-4vcpu-ubuntu-2404
runs-on: blacksmith-4vcpu-ubuntu-2204
continue-on-error: true
strategy:
matrix:
shard: ["1/2", "2/2"]
project: ["tests/e2e", "examples/chat", "examples/clerk", "examples/betterauth", "examples/file-share-svelte", "examples/form", "examples/music-player", "examples/organization", "examples/pets", "starters/react-passkey-auth"]
steps:
- uses: actions/checkout@v4
@@ -29,131 +23,21 @@ jobs:
- name: Setup Source Code
uses: ./.github/actions/source-code/
- name: Pnpm Build
run: pnpm turbo build
working-directory: ./${{ matrix.project }}
- name: Install Playwright Browsers
run: pnpm exec playwright install
working-directory: ./${{ matrix.project }}
- name: Run Playwright tests for shard ${{ matrix.shard }}
run: |
# Parse shard information (e.g., "1/2" -> shard_num=1, total_shards=2)
IFS='/' read -r shard_num total_shards <<< "${{ matrix.shard }}"
shard_index=$((shard_num - 1)) # Convert to 0-based index
# Debug: Print parsed values
echo "Parsed shard_num: $shard_num"
echo "Parsed total_shards: $total_shards"
echo "Calculated shard_index: $shard_index"
# Define all projects to test
all_projects=(
"tests/e2e"
"examples/chat"
"examples/chat-svelte"
"examples/community-clerk-vue"
"examples/clerk"
"examples/betterauth"
"examples/file-share-svelte"
"examples/form"
"examples/inspector"
"examples/music-player"
"examples/organization"
"examples/server-worker-http"
"starters/react-passkey-auth"
"starters/svelte-passkey-auth"
"tests/jazz-svelte"
)
# Calculate which projects this shard should run
shard_projects=()
for i in "${!all_projects[@]}"; do
if [ $((i % total_shards)) -eq $shard_index ]; then
shard_projects+=("${all_projects[i]}")
fi
done
# Track project results
overall_exit_code=0
failed_projects=()
passed_projects=()
echo "=== Running tests for shard ${{ matrix.shard }} ==="
echo "Projects in this shard:"
printf '%s\n' "${shard_projects[@]}"
echo
# Run tests for each project
for project in "${shard_projects[@]}"; do
echo "=== Testing project: $project ==="
# Check if project directory exists
if [ ! -d "$project" ]; then
echo "❌ FAILED: Project directory $project does not exist"
failed_projects+=("$project (directory not found)")
overall_exit_code=1
continue
fi
# Check if project has package.json
if [ ! -f "$project/package.json" ]; then
echo "❌ FAILED: No package.json found in $project"
failed_projects+=("$project (no package.json)")
overall_exit_code=1
continue
fi
# Build the project
echo "🔨 Building $project..."
cd "$project"
if [ -f .env.test ]; then
cp .env.test .env
fi
if ! pnpm turbo build; then
echo "❌ BUILD FAILED: $project"
failed_projects+=("$project (build failed)")
overall_exit_code=1
cd - > /dev/null
continue
fi
# Run Playwright tests
echo "🧪 Running Playwright tests for $project..."
if ! pnpm exec playwright test; then
echo "❌ TESTS FAILED: $project"
failed_projects+=("$project (tests failed)")
overall_exit_code=1
else
echo "✅ TESTS PASSED: $project"
passed_projects+=("$project")
fi
cd - > /dev/null
echo "=== Finished testing $project ==="
echo
done
# Print summary report
echo "=========================================="
echo "📊 TEST SUMMARY FOR SHARD ${{ matrix.shard }}"
echo "=========================================="
if [ ${#passed_projects[@]} -gt 0 ]; then
echo "✅ PASSED (${#passed_projects[@]}):"
printf ' - %s\n' "${passed_projects[@]}"
echo
fi
if [ ${#failed_projects[@]} -gt 0 ]; then
echo "❌ FAILED (${#failed_projects[@]}):"
printf ' - %s\n' "${failed_projects[@]}"
echo
fi
echo "Total projects in shard: ${#shard_projects[@]}"
echo "Passed: ${#passed_projects[@]}"
echo "Failed: ${#failed_projects[@]}"
echo "=========================================="
# Exit with overall status
exit $overall_exit_code
- name: Run Playwright tests
run: pnpm exec playwright test
working-directory: ./${{ matrix.project }}
- uses: actions/upload-artifact@v4
if: failure()
with:
name: ${{ hashFiles(format('{0}/package.json', matrix.project)) }}-playwright-report
path: ./${{ matrix.project }}/playwright-report/
retention-days: 30

View File

@@ -1,11 +1,4 @@
name: Pre-Publish tagged Pull Requests
concurrency:
# For pushes, this lets concurrent runs happen, so each push gets a result.
# But for other events (e.g. PRs), we can cancel the previous runs.
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.sha || github.ref }}
cancel-in-progress: true
on:
pull_request:
types: [opened, synchronize, reopened, labeled]

View File

@@ -17,7 +17,7 @@ concurrency: ${{ github.workflow }}-${{ github.ref }}
jobs:
release:
name: Release
runs-on: blacksmith-4vcpu-ubuntu-2404
runs-on: blacksmith-4vcpu-ubuntu-2204
steps:
- name: Checkout Repo
uses: actions/checkout@v4

View File

@@ -1,11 +1,5 @@
name: Unit Tests
concurrency:
# For pushes, this lets concurrent runs happen, so each push gets a result.
# But for other events (e.g. PRs), we can cancel the previous runs.
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.sha || github.ref }}
cancel-in-progress: true
on:
pull_request:
types: [opened, synchronize, reopened]
@@ -15,7 +9,7 @@ on:
jobs:
unit-tests:
runs-on: blacksmith-4vcpu-ubuntu-2404
runs-on: blacksmith-4vcpu-ubuntu-2204
steps:
- name: Checkout

3
.gitignore vendored
View File

@@ -20,9 +20,6 @@ __screenshots__
# Playwright
test-results
# Java
.java-version
.husky
.vscode/*

View File

@@ -63,7 +63,7 @@ You'll need Node.js 22.x installed (we're working on support for 23.x), and pnpm
4. **Build the packages**:
```bash
pnpm build:packages
pnpm build
```
5. **Run tests** to verify everything is working:

View File

@@ -1,171 +0,0 @@
import { describe, bench } from "vitest";
import * as tools from "jazz-tools";
import * as toolsLatest from "jazz-tools-latest";
import { WasmCrypto } from "cojson/crypto/WasmCrypto";
import { WasmCrypto as WasmCryptoLatest } from "cojson-latest/crypto/WasmCrypto";
import { PureJSCrypto } from "cojson/crypto/PureJSCrypto";
import { PureJSCrypto as PureJSCryptoLatest } from "cojson-latest/crypto/PureJSCrypto";
const sampleReactions = ["👍", "❤️", "😄", "🎉"];
const sampleHiddenIn = ["user1", "user2", "user3"];
// Define the schemas based on the provided Message schema
async function createSchema(
tools: typeof toolsLatest,
WasmCrypto: typeof WasmCryptoLatest,
) {
const Embed = tools.co.map({
url: tools.z.string(),
title: tools.z.string().optional(),
description: tools.z.string().optional(),
image: tools.z.string().optional(),
});
const Message = tools.co.map({
content: tools.z.string(),
createdAt: tools.z.date(),
updatedAt: tools.z.date(),
hiddenIn: tools.co.list(tools.z.string()),
replyTo: tools.z.string().optional(),
reactions: tools.co.list(tools.z.string()),
softDeleted: tools.z.boolean().optional(),
embeds: tools.co.optional(tools.co.list(Embed)),
author: tools.z.string().optional(),
threadId: tools.z.string().optional(),
});
const ctx = await tools.createJazzContextForNewAccount({
creationProps: {
name: "Test Account",
},
// @ts-expect-error
crypto: await WasmCrypto.create(),
});
return {
Message,
sampleReactions,
sampleHiddenIn,
Group: tools.Group,
account: ctx.account,
};
}
const PUREJS = false;
// @ts-expect-error
const schema = await createSchema(tools, PUREJS ? PureJSCrypto : WasmCrypto);
const schemaLatest = await createSchema(
toolsLatest,
// @ts-expect-error
PUREJS ? PureJSCryptoLatest : WasmCryptoLatest,
);
const message = schema.Message.create(
{
content: "A".repeat(1024),
createdAt: new Date(),
updatedAt: new Date(),
hiddenIn: sampleHiddenIn,
reactions: sampleReactions,
author: "user123",
},
schema.Group.create(schema.account).makePublic(),
);
const content = await tools.exportCoValue(schema.Message, message.id, {
// @ts-expect-error
loadAs: schema.account,
});
tools.importContentPieces(content ?? [], schema.account as any);
toolsLatest.importContentPieces(content ?? [], schemaLatest.account);
schema.account._raw.core.node.internalDeleteCoValue(message.id as any);
schemaLatest.account._raw.core.node.internalDeleteCoValue(message.id as any);
describe("Message.create", () => {
bench(
"current version",
() => {
schema.Message.create(
{
content: "A".repeat(1024),
createdAt: new Date(),
updatedAt: new Date(),
hiddenIn: sampleHiddenIn,
reactions: sampleReactions,
author: "user123",
},
schema.Group.create(schema.account),
);
},
{ iterations: 1000 },
);
bench(
"Jazz 0.17.9",
() => {
schemaLatest.Message.create(
{
content: "A".repeat(1024),
createdAt: new Date(),
updatedAt: new Date(),
hiddenIn: sampleHiddenIn,
reactions: sampleReactions,
author: "user123",
},
schemaLatest.Group.create(schemaLatest.account),
);
},
{ iterations: 1000 },
);
});
describe("Message import", () => {
bench(
"current version",
() => {
tools.importContentPieces(content ?? [], schema.account as any);
schema.account._raw.core.node.internalDeleteCoValue(message.id as any);
},
{ iterations: 5000 },
);
bench(
"Jazz 0.17.9",
() => {
toolsLatest.importContentPieces(content ?? [], schemaLatest.account);
schemaLatest.account._raw.core.node.internalDeleteCoValue(
message.id as any,
);
},
{ iterations: 5000 },
);
});
describe("import+ decrypt", () => {
bench(
"current version",
() => {
tools.importContentPieces(content ?? [], schema.account as any);
const node = schema.account._raw.core.node;
node.expectCoValueLoaded(message.id as any).getCurrentContent();
node.internalDeleteCoValue(message.id as any);
},
{ iterations: 5000 },
);
bench(
"Jazz 0.17.9",
() => {
toolsLatest.importContentPieces(content ?? [], schemaLatest.account);
const node = schemaLatest.account._raw.core.node;
node.expectCoValueLoaded(message.id as any).getCurrentContent();
node.internalDeleteCoValue(message.id as any);
},
{ iterations: 5000 },
);
});

View File

@@ -1,14 +0,0 @@
{
"name": "jazz-tools-benchmark",
"private": true,
"type": "module",
"dependencies": {
"cojson": "workspace:*",
"jazz-tools": "workspace:*",
"cojson-latest": "npm:cojson@0.17.9",
"jazz-tools-latest": "npm:jazz-tools@0.17.9"
},
"scripts": {
"bench": "vitest bench"
}
}

View File

@@ -1,7 +0,0 @@
import { defineProject } from "vitest/config";
export default defineProject({
test: {
name: "bench",
},
});

View File

@@ -1,5 +1,5 @@
{
"$schema": "https://biomejs.dev/schemas/2.1.3/schema.json",
"$schema": "https://biomejs.dev/schemas/1.9.4/schema.json",
"vcs": {
"enabled": true,
"clientKind": "git",
@@ -7,36 +7,38 @@
},
"files": {
"ignoreUnknown": false,
"includes": [
"**",
"!crates/**",
"!**/jazz-tools.json",
"!**/ios/**",
"!**/android/**",
"!**/tests/jazz-svelte/src/**",
"!**/examples/**/*svelte*/**",
"!**/starters/**/*svelte*/**",
"!**/examples/server-worker-inbox/src/routeTree.gen.ts",
"!**/homepage/homepage/**",
"!**/package.json",
"!**/*svelte*/**"
"ignore": [
"jazz-tools.json",
"**/ios/**",
"**/android/**",
"packages/jazz-svelte/**",
"examples/*svelte*/**",
"examples/jazz-paper-scissors/src/routeTree.gen.ts",
"homepage/homepage/**",
"**/package.json"
]
},
"formatter": {
"enabled": true,
"indentStyle": "space"
},
"assist": { "actions": { "source": { "organizeImports": "off" } } },
"organizeImports": {
"enabled": true
},
"linter": {
"enabled": false,
"rules": {
"recommended": true,
"correctness": {
"useExhaustiveDependencies": "off",
"useImportExtensions": {
"level": "error",
"options": {
"forceJsExtensions": true
"suggestedExtensions": {
"ts": {
"module": "js",
"component": "jsx"
}
}
}
}
}
@@ -44,16 +46,7 @@
},
"overrides": [
{
"includes": ["packages/community-jazz-vue/src/**"],
"linter": {
"enabled": true,
"rules": {
"recommended": true
}
}
},
{
"includes": ["**/packages/**/src/**"],
"include": ["packages/**/src/**"],
"linter": {
"enabled": true,
"rules": {
@@ -62,10 +55,7 @@
}
},
{
"includes": [
"**/packages/cojson/src/storage/**/*/**",
"**/cojson-transport-ws/**"
],
"include": ["packages/cojson-storage*/**", "cojson-transport-ws/**"],
"linter": {
"enabled": true,
"rules": {
@@ -74,7 +64,7 @@
}
},
{
"includes": ["**/tests/**"],
"include": ["packages/**/src/tests/**"],
"linter": {
"rules": {
"correctness": {
@@ -84,7 +74,7 @@
"noNonNullAssertion": "off"
},
"suspicious": {
"noExplicitAny": "off"
"noExplicitAny": "info"
}
}
}

8
crates/.gitignore vendored
View File

@@ -1,8 +0,0 @@
# Rust
/target
# Test artifacts
lzy/compressed_66k.lzy
# OS generated files
.DS_Store

1164
crates/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +0,0 @@
[workspace]
resolver = "2"
members = [
"lzy",
"cojson-core",
"cojson-core-wasm",
]

View File

@@ -1,5 +0,0 @@
# cojson-core-wasm
## 0.17.11
## 0.17.10

View File

@@ -1,29 +0,0 @@
[package]
name = "cojson-core-wasm"
version = "0.1.0"
edition = "2021"
[lib]
crate-type = ["cdylib", "rlib"]
[dependencies]
cojson-core = { path = "../cojson-core" }
wasm-bindgen = "0.2"
console_error_panic_hook = { version = "0.1.7", optional = true }
ed25519-dalek = { version = "2.2.0", default-features = false, features = ["rand_core"] }
serde_json = "1.0"
serde-wasm-bindgen = "0.6"
serde = { version = "1.0", features = ["derive"] }
js-sys = "0.3"
getrandom = { version = "0.2", features = ["js"] }
thiserror = "1.0"
hex = "0.4"
blake3 = "1.5"
x25519-dalek = { version = "2.0", features = ["getrandom", "static_secrets"] }
crypto_secretbox = { version = "0.1.1", features = ["getrandom"] }
salsa20 = "0.10.2"
rand = "0.8"
bs58 = "0.5"
[features]
default = ["console_error_panic_hook"]

View File

@@ -1,26 +0,0 @@
import { mkdirSync, readFileSync, writeFileSync } from "node:fs";
mkdirSync("./public", { recursive: true });
const wasm = readFileSync("./pkg/cojson_core_wasm_bg.wasm");
writeFileSync(
"./public/cojson_core_wasm.wasm.js",
`export const data = "data:application/wasm;base64,${wasm.toString("base64")}";`,
);
writeFileSync(
"./public/cojson_core_wasm.wasm.d.ts",
"export const data: string;",
);
const glueJs = readFileSync("./pkg/cojson_core_wasm.js", "utf8").replace(
"module_or_path = new URL('cojson_core_wasm_bg.wasm', import.meta.url);",
"throw new Error();",
);
writeFileSync("./public/cojson_core_wasm.js", glueJs);
writeFileSync(
"./public/cojson_core_wasm.d.ts",
readFileSync("./pkg/cojson_core_wasm.d.ts", "utf8"),
);

View File

@@ -1,3 +0,0 @@
export * from "./public/cojson_core_wasm.js";
export async function initialize(): Promise<void>;

View File

@@ -1,8 +0,0 @@
export * from "./public/cojson_core_wasm.js";
import __wbg_init from "./public/cojson_core_wasm.js";
import { data } from "./public/cojson_core_wasm.wasm.js";
export async function initialize() {
return await __wbg_init({ module_or_path: data });
}

View File

@@ -1,22 +0,0 @@
{
"name": "cojson-core-wasm",
"type": "module",
"version": "0.17.11",
"files": [
"public/cojson_core_wasm.js",
"public/cojson_core_wasm.d.ts",
"public/cojson_core_wasm.wasm.js",
"public/cojson_core_wasm.wasm.d.ts",
"index.js",
"index.d.ts"
],
"main": "index.js",
"types": "index.d.ts",
"scripts": {
"build:wasm": "wasm-pack build --release --target web && node build.js",
"build:dev": "wasm-pack build --dev --target web && node build.js"
},
"devDependencies": {
"wasm-pack": "^0.13.1"
}
}

View File

@@ -1,291 +0,0 @@
/* tslint:disable */
/* eslint-disable */
/**
* WASM-exposed function for XSalsa20 encryption without authentication.
* - `key`: 32-byte key for encryption
* - `nonce_material`: Raw bytes used to generate a 24-byte nonce via BLAKE3
* - `plaintext`: Raw bytes to encrypt
* Returns the encrypted bytes or throws a JsError if encryption fails.
* Note: This function does not provide authentication. Use encrypt_xsalsa20_poly1305 for authenticated encryption.
*/
export function encrypt_xsalsa20(key: Uint8Array, nonce_material: Uint8Array, plaintext: Uint8Array): Uint8Array;
/**
* WASM-exposed function for XSalsa20 decryption without authentication.
* - `key`: 32-byte key for decryption (must match encryption key)
* - `nonce_material`: Raw bytes used to generate a 24-byte nonce (must match encryption)
* - `ciphertext`: Encrypted bytes to decrypt
* Returns the decrypted bytes or throws a JsError if decryption fails.
* Note: This function does not provide authentication. Use decrypt_xsalsa20_poly1305 for authenticated decryption.
*/
export function decrypt_xsalsa20(key: Uint8Array, nonce_material: Uint8Array, ciphertext: Uint8Array): Uint8Array;
/**
* Generate a new Ed25519 signing key using secure random number generation.
* Returns 32 bytes of raw key material suitable for use with other Ed25519 functions.
*/
export function new_ed25519_signing_key(): Uint8Array;
/**
* WASM-exposed function to derive an Ed25519 verifying key from a signing key.
* - `signing_key`: 32 bytes of signing key material
* Returns 32 bytes of verifying key material or throws JsError if key is invalid.
*/
export function ed25519_verifying_key(signing_key: Uint8Array): Uint8Array;
/**
* WASM-exposed function to sign a message using Ed25519.
* - `signing_key`: 32 bytes of signing key material
* - `message`: Raw bytes to sign
* Returns 64 bytes of signature material or throws JsError if signing fails.
*/
export function ed25519_sign(signing_key: Uint8Array, message: Uint8Array): Uint8Array;
/**
* WASM-exposed function to verify an Ed25519 signature.
* - `verifying_key`: 32 bytes of verifying key material
* - `message`: Raw bytes that were signed
* - `signature`: 64 bytes of signature material
* Returns true if signature is valid, false otherwise, or throws JsError if verification fails.
*/
export function ed25519_verify(verifying_key: Uint8Array, message: Uint8Array, signature: Uint8Array): boolean;
/**
* WASM-exposed function to validate and copy Ed25519 signing key bytes.
* - `bytes`: 32 bytes of signing key material to validate
* Returns the same 32 bytes if valid or throws JsError if invalid.
*/
export function ed25519_signing_key_from_bytes(bytes: Uint8Array): Uint8Array;
/**
* WASM-exposed function to derive the public key from an Ed25519 signing key.
* - `signing_key`: 32 bytes of signing key material
* Returns 32 bytes of public key material or throws JsError if key is invalid.
*/
export function ed25519_signing_key_to_public(signing_key: Uint8Array): Uint8Array;
/**
* WASM-exposed function to sign a message with an Ed25519 signing key.
* - `signing_key`: 32 bytes of signing key material
* - `message`: Raw bytes to sign
* Returns 64 bytes of signature material or throws JsError if signing fails.
*/
export function ed25519_signing_key_sign(signing_key: Uint8Array, message: Uint8Array): Uint8Array;
/**
* WASM-exposed function to validate and copy Ed25519 verifying key bytes.
* - `bytes`: 32 bytes of verifying key material to validate
* Returns the same 32 bytes if valid or throws JsError if invalid.
*/
export function ed25519_verifying_key_from_bytes(bytes: Uint8Array): Uint8Array;
/**
* WASM-exposed function to validate and copy Ed25519 signature bytes.
* - `bytes`: 64 bytes of signature material to validate
* Returns the same 64 bytes if valid or throws JsError if invalid.
*/
export function ed25519_signature_from_bytes(bytes: Uint8Array): Uint8Array;
/**
* WASM-exposed function to sign a message using Ed25519.
* - `message`: Raw bytes to sign
* - `secret`: Raw Ed25519 signing key bytes
* Returns base58-encoded signature with "signature_z" prefix or throws JsError if signing fails.
*/
export function sign(message: Uint8Array, secret: Uint8Array): string;
/**
* WASM-exposed function to verify an Ed25519 signature.
* - `signature`: Raw signature bytes
* - `message`: Raw bytes that were signed
* - `id`: Raw Ed25519 verifying key bytes
* Returns true if signature is valid, false otherwise, or throws JsError if verification fails.
*/
export function verify(signature: Uint8Array, message: Uint8Array, id: Uint8Array): boolean;
/**
* WASM-exposed function to derive a signer ID from a signing key.
* - `secret`: Raw Ed25519 signing key bytes
* Returns base58-encoded verifying key with "signer_z" prefix or throws JsError if derivation fails.
*/
export function get_signer_id(secret: Uint8Array): string;
/**
* Generate a 24-byte nonce from input material using BLAKE3.
* - `nonce_material`: Raw bytes to derive the nonce from
* Returns 24 bytes suitable for use as a nonce in cryptographic operations.
* This function is deterministic - the same input will produce the same nonce.
*/
export function generate_nonce(nonce_material: Uint8Array): Uint8Array;
/**
* Hash data once using BLAKE3.
* - `data`: Raw bytes to hash
* Returns 32 bytes of hash output.
* This is the simplest way to compute a BLAKE3 hash of a single piece of data.
*/
export function blake3_hash_once(data: Uint8Array): Uint8Array;
/**
* Hash data once using BLAKE3 with a context prefix.
* - `data`: Raw bytes to hash
* - `context`: Context bytes to prefix to the data
* Returns 32 bytes of hash output.
* This is useful for domain separation - the same data hashed with different contexts will produce different outputs.
*/
export function blake3_hash_once_with_context(data: Uint8Array, context: Uint8Array): Uint8Array;
/**
* Get an empty BLAKE3 state for incremental hashing.
* Returns a new Blake3Hasher instance for incremental hashing.
*/
export function blake3_empty_state(): Blake3Hasher;
/**
* Update a BLAKE3 state with new data for incremental hashing.
* - `state`: Current Blake3Hasher instance
* - `data`: New data to incorporate into the hash
* Returns the updated Blake3Hasher.
*/
export function blake3_update_state(state: Blake3Hasher, data: Uint8Array): void;
/**
* Get the final hash from a BLAKE3 state.
* - `state`: The Blake3Hasher to finalize
* Returns 32 bytes of hash output.
* This finalizes an incremental hashing operation.
*/
export function blake3_digest_for_state(state: Blake3Hasher): Uint8Array;
/**
* Generate a new X25519 private key using secure random number generation.
* Returns 32 bytes of raw key material suitable for use with other X25519 functions.
* This key can be reused for multiple Diffie-Hellman exchanges.
*/
export function new_x25519_private_key(): Uint8Array;
/**
* WASM-exposed function to derive an X25519 public key from a private key.
* - `private_key`: 32 bytes of private key material
* Returns 32 bytes of public key material or throws JsError if key is invalid.
*/
export function x25519_public_key(private_key: Uint8Array): Uint8Array;
/**
* WASM-exposed function to perform X25519 Diffie-Hellman key exchange.
* - `private_key`: 32 bytes of private key material
* - `public_key`: 32 bytes of public key material
* Returns 32 bytes of shared secret material or throws JsError if key exchange fails.
*/
export function x25519_diffie_hellman(private_key: Uint8Array, public_key: Uint8Array): Uint8Array;
/**
* WASM-exposed function to derive a sealer ID from a sealer secret.
* - `secret`: Raw bytes of the sealer secret
* Returns a base58-encoded sealer ID with "sealer_z" prefix or throws JsError if derivation fails.
*/
export function get_sealer_id(secret: Uint8Array): string;
/**
* WASM-exposed function for sealing a message using X25519 + XSalsa20-Poly1305.
* Provides authenticated encryption with perfect forward secrecy.
* - `message`: Raw bytes to seal
* - `sender_secret`: Base58-encoded sender's private key with "sealerSecret_z" prefix
* - `recipient_id`: Base58-encoded recipient's public key with "sealer_z" prefix
* - `nonce_material`: Raw bytes used to generate the nonce
* Returns sealed bytes or throws JsError if sealing fails.
*/
export function seal(message: Uint8Array, sender_secret: string, recipient_id: string, nonce_material: Uint8Array): Uint8Array;
/**
* WASM-exposed function for unsealing a message using X25519 + XSalsa20-Poly1305.
* Provides authenticated decryption with perfect forward secrecy.
* - `sealed_message`: The sealed bytes to decrypt
* - `recipient_secret`: Base58-encoded recipient's private key with "sealerSecret_z" prefix
* - `sender_id`: Base58-encoded sender's public key with "sealer_z" prefix
* - `nonce_material`: Raw bytes used to generate the nonce (must match sealing)
* Returns unsealed bytes or throws JsError if unsealing fails.
*/
export function unseal(sealed_message: Uint8Array, recipient_secret: string, sender_id: string, nonce_material: Uint8Array): Uint8Array;
/**
* WASM-exposed function to encrypt bytes with a key secret and nonce material.
* - `value`: The raw bytes to encrypt
* - `key_secret`: A base58-encoded key secret with "keySecret_z" prefix
* - `nonce_material`: Raw bytes used to generate the nonce
* Returns the encrypted bytes or throws a JsError if encryption fails.
*/
export function encrypt(value: Uint8Array, key_secret: string, nonce_material: Uint8Array): Uint8Array;
/**
* WASM-exposed function to decrypt bytes with a key secret and nonce material.
* - `ciphertext`: The encrypted bytes to decrypt
* - `key_secret`: A base58-encoded key secret with "keySecret_z" prefix
* - `nonce_material`: Raw bytes used to generate the nonce (must match encryption)
* Returns the decrypted bytes or throws a JsError if decryption fails.
*/
export function decrypt(ciphertext: Uint8Array, key_secret: string, nonce_material: Uint8Array): Uint8Array;
export class Blake3Hasher {
free(): void;
constructor();
update(data: Uint8Array): void;
finalize(): Uint8Array;
clone(): Blake3Hasher;
}
export class SessionLog {
free(): void;
constructor(co_id: string, session_id: string, signer_id?: string | null);
clone(): SessionLog;
tryAdd(transactions_json: string[], new_signature_str: string, skip_verify: boolean): void;
addNewPrivateTransaction(changes_json: string, signer_secret: string, encryption_key: string, key_id: string, made_at: number): string;
addNewTrustingTransaction(changes_json: string, signer_secret: string, made_at: number): string;
decryptNextTransactionChangesJson(tx_index: number, encryption_key: string): string;
}
export type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembly.Module;
export interface InitOutput {
readonly memory: WebAssembly.Memory;
readonly decrypt_xsalsa20: (a: number, b: number, c: number, d: number, e: number, f: number) => [number, number, number, number];
readonly encrypt_xsalsa20: (a: number, b: number, c: number, d: number, e: number, f: number) => [number, number, number, number];
readonly __wbg_sessionlog_free: (a: number, b: number) => void;
readonly sessionlog_new: (a: number, b: number, c: number, d: number, e: number, f: number) => number;
readonly sessionlog_clone: (a: number) => number;
readonly sessionlog_tryAdd: (a: number, b: number, c: number, d: number, e: number, f: number) => [number, number];
readonly sessionlog_addNewPrivateTransaction: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number, j: number) => [number, number, number, number];
readonly sessionlog_addNewTrustingTransaction: (a: number, b: number, c: number, d: number, e: number, f: number) => [number, number, number, number];
readonly sessionlog_decryptNextTransactionChangesJson: (a: number, b: number, c: number, d: number) => [number, number, number, number];
readonly new_ed25519_signing_key: () => [number, number];
readonly ed25519_sign: (a: number, b: number, c: number, d: number) => [number, number, number, number];
readonly ed25519_verify: (a: number, b: number, c: number, d: number, e: number, f: number) => [number, number, number];
readonly ed25519_signing_key_from_bytes: (a: number, b: number) => [number, number, number, number];
readonly ed25519_signing_key_to_public: (a: number, b: number) => [number, number, number, number];
readonly ed25519_verifying_key_from_bytes: (a: number, b: number) => [number, number, number, number];
readonly ed25519_signature_from_bytes: (a: number, b: number) => [number, number, number, number];
readonly ed25519_verifying_key: (a: number, b: number) => [number, number, number, number];
readonly ed25519_signing_key_sign: (a: number, b: number, c: number, d: number) => [number, number, number, number];
readonly sign: (a: number, b: number, c: number, d: number) => [number, number, number, number];
readonly verify: (a: number, b: number, c: number, d: number, e: number, f: number) => [number, number, number];
readonly get_signer_id: (a: number, b: number) => [number, number, number, number];
readonly generate_nonce: (a: number, b: number) => [number, number];
readonly blake3_hash_once: (a: number, b: number) => [number, number];
readonly blake3_hash_once_with_context: (a: number, b: number, c: number, d: number) => [number, number];
readonly __wbg_blake3hasher_free: (a: number, b: number) => void;
readonly blake3hasher_finalize: (a: number) => [number, number];
readonly blake3hasher_clone: (a: number) => number;
readonly blake3_empty_state: () => number;
readonly blake3_update_state: (a: number, b: number, c: number) => void;
readonly blake3_digest_for_state: (a: number) => [number, number];
readonly blake3hasher_update: (a: number, b: number, c: number) => void;
readonly blake3hasher_new: () => number;
readonly new_x25519_private_key: () => [number, number];
readonly x25519_public_key: (a: number, b: number) => [number, number, number, number];
readonly x25519_diffie_hellman: (a: number, b: number, c: number, d: number) => [number, number, number, number];
readonly get_sealer_id: (a: number, b: number) => [number, number, number, number];
readonly seal: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number) => [number, number, number, number];
readonly unseal: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number) => [number, number, number, number];
readonly encrypt: (a: number, b: number, c: number, d: number, e: number, f: number) => [number, number, number, number];
readonly decrypt: (a: number, b: number, c: number, d: number, e: number, f: number) => [number, number, number, number];
readonly __wbindgen_malloc: (a: number, b: number) => number;
readonly __wbindgen_realloc: (a: number, b: number, c: number, d: number) => number;
readonly __wbindgen_exn_store: (a: number) => void;
readonly __externref_table_alloc: () => number;
readonly __wbindgen_export_4: WebAssembly.Table;
readonly __externref_table_dealloc: (a: number) => void;
readonly __wbindgen_free: (a: number, b: number, c: number) => void;
readonly __wbindgen_start: () => void;
}
export type SyncInitInput = BufferSource | WebAssembly.Module;
/**
* Instantiates the given `module`, which can either be bytes or
* a precompiled `WebAssembly.Module`.
*
* @param {{ module: SyncInitInput }} module - Passing `SyncInitInput` directly is deprecated.
*
* @returns {InitOutput}
*/
export function initSync(module: { module: SyncInitInput } | SyncInitInput): InitOutput;
/**
* If `module_or_path` is {RequestInfo} or {URL}, makes a request and
* for everything else, calls `WebAssembly.instantiate` directly.
*
* @param {{ module_or_path: InitInput | Promise<InitInput> }} module_or_path - Passing `InitInput` directly is deprecated.
*
* @returns {Promise<InitOutput>}
*/
export default function __wbg_init (module_or_path?: { module_or_path: InitInput | Promise<InitInput> } | InitInput | Promise<InitInput>): Promise<InitOutput>;

File diff suppressed because it is too large Load Diff

View File

@@ -1 +0,0 @@
export const data: string;

File diff suppressed because one or more lines are too long

View File

@@ -1,240 +0,0 @@
use crate::error::CryptoError;
use ed25519_dalek::{Signer, SigningKey, Verifier, VerifyingKey};
use rand::rngs::OsRng;
use wasm_bindgen::prelude::*;
/// Generate a new Ed25519 signing key using secure random number generation.
/// Returns 32 bytes of raw key material suitable for use with other Ed25519 functions.
#[wasm_bindgen]
pub fn new_ed25519_signing_key() -> Box<[u8]> {
let mut rng = OsRng;
let signing_key = SigningKey::generate(&mut rng);
signing_key.to_bytes().into()
}
/// Internal function to derive an Ed25519 verifying key from a signing key.
/// Takes 32 bytes of signing key material and returns 32 bytes of verifying key material.
/// Returns CryptoError if the key length is invalid.
pub(crate) fn ed25519_verifying_key_internal(signing_key: &[u8]) -> Result<Box<[u8]>, CryptoError> {
let key_bytes: [u8; 32] = signing_key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, signing_key.len()))?;
let signing_key = SigningKey::from_bytes(&key_bytes);
Ok(signing_key.verifying_key().to_bytes().into())
}
/// WASM-exposed function to derive an Ed25519 verifying key from a signing key.
/// - `signing_key`: 32 bytes of signing key material
/// Returns 32 bytes of verifying key material or throws JsError if key is invalid.
#[wasm_bindgen]
pub fn ed25519_verifying_key(signing_key: &[u8]) -> Result<Box<[u8]>, JsError> {
ed25519_verifying_key_internal(signing_key).map_err(|e| JsError::new(&e.to_string()))
}
/// Internal function to sign a message using Ed25519.
/// Takes 32 bytes of signing key material and arbitrary message bytes.
/// Returns 64 bytes of signature material or CryptoError if key is invalid.
pub(crate) fn ed25519_sign_internal(
signing_key: &[u8],
message: &[u8],
) -> Result<[u8; 64], CryptoError> {
let key_bytes: [u8; 32] = signing_key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, signing_key.len()))?;
let signing_key = SigningKey::from_bytes(&key_bytes);
Ok(signing_key.sign(message).to_bytes())
}
/// WASM-exposed function to sign a message using Ed25519.
/// - `signing_key`: 32 bytes of signing key material
/// - `message`: Raw bytes to sign
/// Returns 64 bytes of signature material or throws JsError if signing fails.
#[wasm_bindgen]
pub fn ed25519_sign(signing_key: &[u8], message: &[u8]) -> Result<Box<[u8]>, JsError> {
Ok(ed25519_sign_internal(signing_key, message)?.into())
}
/// Internal function to verify an Ed25519 signature.
/// - `verifying_key`: 32 bytes of verifying key material
/// - `message`: Raw bytes that were signed
/// - `signature`: 64 bytes of signature material
/// Returns true if signature is valid, false otherwise, or CryptoError if key/signature format is invalid.
pub(crate) fn ed25519_verify_internal(
verifying_key: &[u8],
message: &[u8],
signature: &[u8],
) -> Result<bool, CryptoError> {
let key_bytes: [u8; 32] = verifying_key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, verifying_key.len()))?;
let verifying_key = VerifyingKey::from_bytes(&key_bytes)
.map_err(|e| CryptoError::InvalidVerifyingKey(e.to_string()))?;
let sig_bytes: [u8; 64] = signature
.try_into()
.map_err(|_| CryptoError::InvalidSignatureLength)?;
let signature = ed25519_dalek::Signature::from_bytes(&sig_bytes);
Ok(verifying_key.verify(message, &signature).is_ok())
}
/// WASM-exposed function to verify an Ed25519 signature.
/// - `verifying_key`: 32 bytes of verifying key material
/// - `message`: Raw bytes that were signed
/// - `signature`: 64 bytes of signature material
/// Returns true if signature is valid, false otherwise, or throws JsError if verification fails.
#[wasm_bindgen]
pub fn ed25519_verify(
verifying_key: &[u8],
message: &[u8],
signature: &[u8],
) -> Result<bool, JsError> {
ed25519_verify_internal(verifying_key, message, signature)
.map_err(|e| JsError::new(&e.to_string()))
}
/// WASM-exposed function to validate and copy Ed25519 signing key bytes.
/// - `bytes`: 32 bytes of signing key material to validate
/// Returns the same 32 bytes if valid or throws JsError if invalid.
#[wasm_bindgen]
pub fn ed25519_signing_key_from_bytes(bytes: &[u8]) -> Result<Box<[u8]>, JsError> {
let key_bytes: [u8; 32] = bytes
.try_into()
.map_err(|_| JsError::new("Invalid signing key length"))?;
Ok(key_bytes.into())
}
/// WASM-exposed function to derive the public key from an Ed25519 signing key.
/// - `signing_key`: 32 bytes of signing key material
/// Returns 32 bytes of public key material or throws JsError if key is invalid.
#[wasm_bindgen]
pub fn ed25519_signing_key_to_public(signing_key: &[u8]) -> Result<Box<[u8]>, JsError> {
ed25519_verifying_key_internal(signing_key).map_err(|e| JsError::new(&e.to_string()))
}
/// WASM-exposed function to sign a message with an Ed25519 signing key.
/// - `signing_key`: 32 bytes of signing key material
/// - `message`: Raw bytes to sign
/// Returns 64 bytes of signature material or throws JsError if signing fails.
#[wasm_bindgen]
pub fn ed25519_signing_key_sign(signing_key: &[u8], message: &[u8]) -> Result<Box<[u8]>, JsError> {
Ok(ed25519_sign_internal(signing_key, message)?.into())
}
/// WASM-exposed function to validate and copy Ed25519 verifying key bytes.
/// - `bytes`: 32 bytes of verifying key material to validate
/// Returns the same 32 bytes if valid or throws JsError if invalid.
#[wasm_bindgen]
pub fn ed25519_verifying_key_from_bytes(bytes: &[u8]) -> Result<Box<[u8]>, JsError> {
let key_bytes: [u8; 32] = bytes
.try_into()
.map_err(|_| JsError::new("Invalid verifying key length"))?;
Ok(key_bytes.into())
}
/// WASM-exposed function to validate and copy Ed25519 signature bytes.
/// - `bytes`: 64 bytes of signature material to validate
/// Returns the same 64 bytes if valid or throws JsError if invalid.
#[wasm_bindgen]
pub fn ed25519_signature_from_bytes(bytes: &[u8]) -> Result<Box<[u8]>, JsError> {
let sig_bytes: [u8; 64] = bytes
.try_into()
.map_err(|_| JsError::new("Invalid signature length"))?;
Ok(sig_bytes.into())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_ed25519_key_generation_and_signing() {
// Test key generation
let signing_key = new_ed25519_signing_key();
assert_eq!(signing_key.len(), 32, "Signing key should be 32 bytes");
// Test verifying key derivation
let verifying_key = ed25519_verifying_key_internal(&signing_key).unwrap();
assert_eq!(verifying_key.len(), 32, "Verifying key should be 32 bytes");
// Test that different signing keys produce different verifying keys
let signing_key2 = new_ed25519_signing_key();
let verifying_key2 = ed25519_verifying_key_internal(&signing_key2).unwrap();
assert_ne!(
verifying_key, verifying_key2,
"Different signing keys should produce different verifying keys"
);
// Test signing and verification
let message = b"Test message";
let signature = ed25519_sign_internal(&signing_key, message).unwrap();
assert_eq!(signature.len(), 64, "Signature should be 64 bytes");
// Test successful verification
let verification_result =
ed25519_verify_internal(&verifying_key, message, &signature).unwrap();
assert!(
verification_result,
"Valid signature should verify successfully"
);
// Test verification with wrong message
let wrong_message = b"Wrong message";
let wrong_verification =
ed25519_verify_internal(&verifying_key, wrong_message, &signature).unwrap();
assert!(
!wrong_verification,
"Signature should not verify with wrong message"
);
// Test verification with wrong key
let wrong_verification =
ed25519_verify_internal(&verifying_key2, message, &signature).unwrap();
assert!(
!wrong_verification,
"Signature should not verify with wrong key"
);
// Test verification with tampered signature
let mut tampered_signature = signature.clone();
tampered_signature[0] ^= 1;
let wrong_verification =
ed25519_verify_internal(&verifying_key, message, &tampered_signature).unwrap();
assert!(!wrong_verification, "Tampered signature should not verify");
}
#[test]
fn test_ed25519_error_cases() {
// Test invalid signing key length
let invalid_signing_key = vec![0u8; 31]; // Too short
let result = ed25519_verifying_key_internal(&invalid_signing_key);
assert!(result.is_err());
let result = ed25519_sign_internal(&invalid_signing_key, b"test");
assert!(result.is_err());
// Test invalid verifying key length
let invalid_verifying_key = vec![0u8; 31]; // Too short
let valid_signing_key = new_ed25519_signing_key();
let valid_signature = ed25519_sign_internal(&valid_signing_key, b"test").unwrap();
let result = ed25519_verify_internal(&invalid_verifying_key, b"test", &valid_signature);
assert!(result.is_err());
// Test invalid signature length
let valid_verifying_key = ed25519_verifying_key_internal(&valid_signing_key).unwrap();
let invalid_signature = vec![0u8; 63]; // Too short
let result = ed25519_verify_internal(&valid_verifying_key, b"test", &invalid_signature);
assert!(result.is_err());
// Test with too long keys
let too_long_key = vec![0u8; 33]; // Too long
let result = ed25519_verifying_key_internal(&too_long_key);
assert!(result.is_err());
let result = ed25519_sign_internal(&too_long_key, b"test");
assert!(result.is_err());
// Test with too long signature
let too_long_signature = vec![0u8; 65]; // Too long
let result = ed25519_verify_internal(&valid_verifying_key, b"test", &too_long_signature);
assert!(result.is_err());
}
}

View File

@@ -1,113 +0,0 @@
use crate::error::CryptoError;
use crate::hash::blake3::generate_nonce;
use bs58;
use wasm_bindgen::prelude::*;
/// Internal function to encrypt bytes with a key secret and nonce material.
/// Takes a base58-encoded key secret with "keySecret_z" prefix and raw nonce material.
/// Returns the encrypted bytes or a CryptoError if the key format is invalid.
pub fn encrypt_internal(
plaintext: &[u8],
key_secret: &str,
nonce_material: &[u8],
) -> Result<Box<[u8]>, CryptoError> {
// Decode the base58 key secret (removing the "keySecret_z" prefix)
let key_secret = key_secret
.strip_prefix("keySecret_z")
.ok_or(CryptoError::InvalidPrefix("key secret", "keySecret_z"))?;
let key = bs58::decode(key_secret)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
// Generate nonce from nonce material
let nonce = generate_nonce(nonce_material);
// Encrypt using XSalsa20
Ok(super::xsalsa20::encrypt_xsalsa20_raw_internal(&key, &nonce, plaintext)?.into())
}
/// Internal function to decrypt bytes with a key secret and nonce material.
/// Takes a base58-encoded key secret with "keySecret_z" prefix and raw nonce material.
/// Returns the decrypted bytes or a CryptoError if the key format is invalid.
pub fn decrypt_internal(
ciphertext: &[u8],
key_secret: &str,
nonce_material: &[u8],
) -> Result<Box<[u8]>, CryptoError> {
// Decode the base58 key secret (removing the "keySecret_z" prefix)
let key_secret = key_secret
.strip_prefix("keySecret_z")
.ok_or(CryptoError::InvalidPrefix("key secret", "keySecret_z"))?;
let key = bs58::decode(key_secret)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
// Generate nonce from nonce material
let nonce = generate_nonce(nonce_material);
// Decrypt using XSalsa20
Ok(super::xsalsa20::decrypt_xsalsa20_raw_internal(&key, &nonce, ciphertext)?.into())
}
/// WASM-exposed function to encrypt bytes with a key secret and nonce material.
/// - `value`: The raw bytes to encrypt
/// - `key_secret`: A base58-encoded key secret with "keySecret_z" prefix
/// - `nonce_material`: Raw bytes used to generate the nonce
/// Returns the encrypted bytes or throws a JsError if encryption fails.
#[wasm_bindgen(js_name = encrypt)]
pub fn encrypt(
value: &[u8],
key_secret: &str,
nonce_material: &[u8],
) -> Result<Box<[u8]>, JsError> {
encrypt_internal(value, key_secret, nonce_material).map_err(|e| JsError::new(&e.to_string()))
}
/// WASM-exposed function to decrypt bytes with a key secret and nonce material.
/// - `ciphertext`: The encrypted bytes to decrypt
/// - `key_secret`: A base58-encoded key secret with "keySecret_z" prefix
/// - `nonce_material`: Raw bytes used to generate the nonce (must match encryption)
/// Returns the decrypted bytes or throws a JsError if decryption fails.
#[wasm_bindgen(js_name = decrypt)]
pub fn decrypt(
ciphertext: &[u8],
key_secret: &str,
nonce_material: &[u8],
) -> Result<Box<[u8]>, JsError> {
Ok(decrypt_internal(ciphertext, key_secret, nonce_material)?.into())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_encrypt_decrypt() {
// Test data
let plaintext = b"Hello, World!";
let key_secret = "keySecret_z11111111111111111111111111111111"; // Example base58 encoded key
let nonce_material = b"test_nonce_material";
// Test encryption
let ciphertext = encrypt_internal(plaintext, key_secret, nonce_material).unwrap();
assert!(!ciphertext.is_empty());
// Test decryption
let decrypted = decrypt_internal(&ciphertext, key_secret, nonce_material).unwrap();
assert_eq!(&*decrypted, plaintext);
}
#[test]
fn test_invalid_key_secret() {
let plaintext = b"test";
let nonce_material = b"nonce";
// Test with invalid key secret format
let result = encrypt_internal(plaintext, "invalid_key", nonce_material);
assert!(result.is_err());
// Test with invalid base58 encoding
let result = encrypt_internal(plaintext, "keySecret_z!!!!", nonce_material);
assert!(result.is_err());
}
}

View File

@@ -1,200 +0,0 @@
use crate::crypto::x25519::x25519_diffie_hellman_internal;
use crate::crypto::xsalsa20::{decrypt_xsalsa20_poly1305, encrypt_xsalsa20_poly1305};
use crate::error::CryptoError;
use crate::hash::blake3::generate_nonce;
use bs58;
use wasm_bindgen::prelude::*;
/// Internal function to seal a message using X25519 + XSalsa20-Poly1305.
/// - `message`: Raw bytes to seal
/// - `sender_secret`: Base58-encoded sender's private key with "sealerSecret_z" prefix
/// - `recipient_id`: Base58-encoded recipient's public key with "sealer_z" prefix
/// - `nonce_material`: Raw bytes used to generate the nonce
/// Returns sealed bytes or CryptoError if key formats are invalid.
///
/// The sealing process:
/// 1. Decode base58 keys and validate prefixes
/// 2. Generate shared secret using X25519 key exchange
/// 3. Generate nonce from nonce material using BLAKE3
/// 4. Encrypt message using XSalsa20-Poly1305 with the shared secret
pub fn seal_internal(
message: &[u8],
sender_secret: &str,
recipient_id: &str,
nonce_material: &[u8],
) -> Result<Vec<u8>, CryptoError> {
// Decode the base58 sender secret (removing the "sealerSecret_z" prefix)
let sender_secret =
sender_secret
.strip_prefix("sealerSecret_z")
.ok_or(CryptoError::InvalidPrefix(
"sealer secret",
"sealerSecret_z",
))?;
let sender_private_key = bs58::decode(sender_secret)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
// Decode the base58 recipient ID (removing the "sealer_z" prefix)
let recipient_id = recipient_id
.strip_prefix("sealer_z")
.ok_or(CryptoError::InvalidPrefix("sealer ID", "sealer_z"))?;
let recipient_public_key = bs58::decode(recipient_id)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
let nonce = generate_nonce(nonce_material);
// Generate shared secret using X25519
let shared_secret = x25519_diffie_hellman_internal(&sender_private_key, &recipient_public_key)?;
// Encrypt message using XSalsa20-Poly1305
Ok(encrypt_xsalsa20_poly1305(&shared_secret, &nonce, message)?.into())
}
/// Internal function to unseal a message using X25519 + XSalsa20-Poly1305.
/// - `sealed_message`: The sealed bytes to decrypt
/// - `recipient_secret`: Base58-encoded recipient's private key with "sealerSecret_z" prefix
/// - `sender_id`: Base58-encoded sender's public key with "sealer_z" prefix
/// - `nonce_material`: Raw bytes used to generate the nonce (must match sealing)
/// Returns unsealed bytes or CryptoError if key formats are invalid or authentication fails.
///
/// The unsealing process:
/// 1. Decode base58 keys and validate prefixes
/// 2. Generate shared secret using X25519 key exchange
/// 3. Generate nonce from nonce material using BLAKE3
/// 4. Decrypt and authenticate message using XSalsa20-Poly1305 with the shared secret
fn unseal_internal(
sealed_message: &[u8],
recipient_secret: &str,
sender_id: &str,
nonce_material: &[u8],
) -> Result<Box<[u8]>, CryptoError> {
// Decode the base58 recipient secret (removing the "sealerSecret_z" prefix)
let recipient_secret =
recipient_secret
.strip_prefix("sealerSecret_z")
.ok_or(CryptoError::InvalidPrefix(
"sealer secret",
"sealerSecret_z",
))?;
let recipient_private_key = bs58::decode(recipient_secret)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
// Decode the base58 sender ID (removing the "sealer_z" prefix)
let sender_id = sender_id
.strip_prefix("sealer_z")
.ok_or(CryptoError::InvalidPrefix("sealer ID", "sealer_z"))?;
let sender_public_key = bs58::decode(sender_id)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
let nonce = generate_nonce(nonce_material);
// Generate shared secret using X25519
let shared_secret = x25519_diffie_hellman_internal(&recipient_private_key, &sender_public_key)?;
// Decrypt message using XSalsa20-Poly1305
Ok(decrypt_xsalsa20_poly1305(&shared_secret, &nonce, sealed_message)?.into())
}
/// WASM-exposed function for sealing a message using X25519 + XSalsa20-Poly1305.
/// Provides authenticated encryption with perfect forward secrecy.
/// - `message`: Raw bytes to seal
/// - `sender_secret`: Base58-encoded sender's private key with "sealerSecret_z" prefix
/// - `recipient_id`: Base58-encoded recipient's public key with "sealer_z" prefix
/// - `nonce_material`: Raw bytes used to generate the nonce
/// Returns sealed bytes or throws JsError if sealing fails.
#[wasm_bindgen(js_name = seal)]
pub fn seal(
message: &[u8],
sender_secret: &str,
recipient_id: &str,
nonce_material: &[u8],
) -> Result<Box<[u8]>, JsError> {
Ok(seal_internal(message, sender_secret, recipient_id, nonce_material)?.into())
}
/// WASM-exposed function for unsealing a message using X25519 + XSalsa20-Poly1305.
/// Provides authenticated decryption with perfect forward secrecy.
/// - `sealed_message`: The sealed bytes to decrypt
/// - `recipient_secret`: Base58-encoded recipient's private key with "sealerSecret_z" prefix
/// - `sender_id`: Base58-encoded sender's public key with "sealer_z" prefix
/// - `nonce_material`: Raw bytes used to generate the nonce (must match sealing)
/// Returns unsealed bytes or throws JsError if unsealing fails.
#[wasm_bindgen(js_name = unseal)]
pub fn unseal(
sealed_message: &[u8],
recipient_secret: &str,
sender_id: &str,
nonce_material: &[u8],
) -> Result<Box<[u8]>, JsError> {
Ok(unseal_internal(sealed_message, recipient_secret, sender_id, nonce_material)?.into())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::crypto::x25519::{new_x25519_private_key, x25519_public_key_internal};
#[test]
fn test_seal_unseal() {
// Generate real keys
let sender_private = new_x25519_private_key();
let sender_public = x25519_public_key_internal(&sender_private).unwrap();
// Encode keys with proper prefixes
let sender_secret = format!(
"sealerSecret_z{}",
bs58::encode(&sender_private).into_string()
);
let recipient_id = format!("sealer_z{}", bs58::encode(&sender_public).into_string());
// Test data
let message = b"Secret message";
let nonce_material = b"test_nonce_material";
// Test sealing
let sealed = seal_internal(message, &sender_secret, &recipient_id, nonce_material).unwrap();
assert!(!sealed.is_empty());
// Test unsealing (using same keys since it's a test)
let unsealed =
unseal_internal(&sealed, &sender_secret, &recipient_id, nonce_material).unwrap();
assert_eq!(&*unsealed, message);
}
#[test]
fn test_invalid_keys() {
let message = b"test";
let nonce_material = b"nonce";
// Test with invalid sender secret format
let result = seal_internal(
message,
"invalid_key",
"sealer_z22222222222222222222222222222222",
nonce_material,
);
assert!(result.is_err());
// Test with invalid recipient ID format
let result = seal_internal(
message,
"sealerSecret_z11111111111111111111111111111111",
"invalid_key",
nonce_material,
);
assert!(result.is_err());
// Test with invalid base58 encoding
let result = seal_internal(
message,
"sealerSecret_z!!!!",
"sealer_z22222222222222222222222222222222",
nonce_material,
);
assert!(result.is_err());
}
}

View File

@@ -1,184 +0,0 @@
use crate::crypto::ed25519::{
ed25519_sign_internal, ed25519_verify_internal, ed25519_verifying_key_internal,
};
use crate::error::CryptoError;
use bs58;
use wasm_bindgen::prelude::*;
/// Internal function to sign a message using Ed25519.
/// - `message`: Raw bytes to sign
/// - `secret`: Base58-encoded signing key with "signerSecret_z" prefix
/// Returns base58-encoded signature with "signature_z" prefix or error string.
pub fn sign_internal(message: &[u8], secret: &str) -> Result<String, CryptoError> {
let secret_bytes = bs58::decode(secret.strip_prefix("signerSecret_z").ok_or(
CryptoError::InvalidPrefix("signer secret", "signerSecret_z"),
)?)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
let signature = ed25519_sign_internal(&secret_bytes, message)
.map_err(|e| CryptoError::InvalidVerifyingKey(e.to_string()))?;
Ok(format!(
"signature_z{}",
bs58::encode(signature).into_string()
))
}
/// Internal function to verify an Ed25519 signature.
/// - `signature`: Base58-encoded signature with "signature_z" prefix
/// - `message`: Raw bytes that were signed
/// - `id`: Base58-encoded verifying key with "signer_z" prefix
/// Returns true if signature is valid, false otherwise, or error string if formats are invalid.
pub fn verify_internal(signature: &str, message: &[u8], id: &str) -> Result<bool, CryptoError> {
let signature_bytes = bs58::decode(
signature
.strip_prefix("signature_z")
.ok_or(CryptoError::InvalidPrefix("signature_z", "signature"))?,
)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
let verifying_key = bs58::decode(
id.strip_prefix("signer_z")
.ok_or(CryptoError::InvalidPrefix("signer_z", "signer ID"))?,
)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
ed25519_verify_internal(&verifying_key, message, &signature_bytes)
.map_err(|e| CryptoError::InvalidVerifyingKey(e.to_string()))
}
/// Internal function to derive a signer ID from a signing key.
/// - `secret`: Base58-encoded signing key with "signerSecret_z" prefix
/// Returns base58-encoded verifying key with "signer_z" prefix or error string.
pub fn get_signer_id_internal(secret: &str) -> Result<String, CryptoError> {
let secret_bytes = bs58::decode(secret.strip_prefix("signerSecret_z").ok_or(
CryptoError::InvalidPrefix("signerSecret_z", "signer secret"),
)?)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
let verifying_key = ed25519_verifying_key_internal(&secret_bytes)
.map_err(|e| CryptoError::InvalidVerifyingKey(e.to_string()))?;
Ok(format!(
"signer_z{}",
bs58::encode(verifying_key).into_string()
))
}
/// WASM-exposed function to sign a message using Ed25519.
/// - `message`: Raw bytes to sign
/// - `secret`: Raw Ed25519 signing key bytes
/// Returns base58-encoded signature with "signature_z" prefix or throws JsError if signing fails.
#[wasm_bindgen(js_name = sign)]
pub fn sign(message: &[u8], secret: &[u8]) -> Result<String, JsError> {
let secret_str = std::str::from_utf8(secret)
.map_err(|e| JsError::new(&format!("Invalid UTF-8 in secret: {:?}", e)))?;
sign_internal(message, secret_str).map_err(|e| JsError::new(&e.to_string()))
}
/// WASM-exposed function to verify an Ed25519 signature.
/// - `signature`: Raw signature bytes
/// - `message`: Raw bytes that were signed
/// - `id`: Raw Ed25519 verifying key bytes
/// Returns true if signature is valid, false otherwise, or throws JsError if verification fails.
#[wasm_bindgen(js_name = verify)]
pub fn verify(signature: &[u8], message: &[u8], id: &[u8]) -> Result<bool, JsError> {
let signature_str = std::str::from_utf8(signature)
.map_err(|e| JsError::new(&format!("Invalid UTF-8 in signature: {:?}", e)))?;
let id_str = std::str::from_utf8(id)
.map_err(|e| JsError::new(&format!("Invalid UTF-8 in id: {:?}", e)))?;
verify_internal(signature_str, message, id_str).map_err(|e| JsError::new(&e.to_string()))
}
/// WASM-exposed function to derive a signer ID from a signing key.
/// - `secret`: Raw Ed25519 signing key bytes
/// Returns base58-encoded verifying key with "signer_z" prefix or throws JsError if derivation fails.
#[wasm_bindgen(js_name = get_signer_id)]
pub fn get_signer_id(secret: &[u8]) -> Result<String, JsError> {
let secret_str = std::str::from_utf8(secret)
.map_err(|e| JsError::new(&format!("Invalid UTF-8 in secret: {:?}", e)))?;
get_signer_id_internal(secret_str).map_err(|e| JsError::new(&e.to_string()))
}
#[cfg(test)]
mod tests {
use super::*;
use crate::crypto::ed25519::new_ed25519_signing_key;
#[test]
fn test_sign_and_verify() {
let message = b"hello world";
// Create a test signing key
let signing_key = new_ed25519_signing_key();
let secret = format!("signerSecret_z{}", bs58::encode(&signing_key).into_string());
// Sign the message
let signature = sign_internal(message, &secret).unwrap();
// Get the public key for verification
let secret_bytes = bs58::decode(secret.strip_prefix("signerSecret_z").unwrap())
.into_vec()
.unwrap();
let verifying_key = ed25519_verifying_key_internal(&secret_bytes).unwrap();
let signer_id = format!("signer_z{}", bs58::encode(&verifying_key).into_string());
// Verify the signature
assert!(verify_internal(&signature, message, &signer_id).unwrap());
}
#[test]
fn test_invalid_inputs() {
let message = b"hello world";
// Test invalid base58 in secret
let result = sign_internal(message, "signerSecret_z!!!invalid!!!");
assert!(matches!(result, Err(CryptoError::Base58Error(_))));
// Test invalid signature format
let result = verify_internal("not_a_signature", message, "signer_z123");
assert!(matches!(
result,
Err(CryptoError::InvalidPrefix("signature_z", "signature"))
));
// Test invalid signer ID format
let result = verify_internal("signature_z123", message, "not_a_signer");
assert!(matches!(
result,
Err(CryptoError::InvalidPrefix("signer_z", "signer ID"))
));
}
#[test]
fn test_get_signer_id() {
// Create a test signing key
let signing_key = new_ed25519_signing_key();
let secret = format!("signerSecret_z{}", bs58::encode(&signing_key).into_string());
// Get signer ID
let signer_id = get_signer_id_internal(&secret).unwrap();
assert!(signer_id.starts_with("signer_z"));
// Test that same secret produces same ID
let signer_id2 = get_signer_id_internal(&secret).unwrap();
assert_eq!(signer_id, signer_id2);
// Test invalid secret format
let result = get_signer_id_internal("invalid_secret");
assert!(matches!(
result,
Err(CryptoError::InvalidPrefix(
"signerSecret_z",
"signer secret"
))
));
// Test invalid base58
let result = get_signer_id_internal("signerSecret_z!!!invalid!!!");
assert!(matches!(result, Err(CryptoError::Base58Error(_))));
}
}

View File

@@ -1,168 +0,0 @@
use crate::error::CryptoError;
use bs58;
use wasm_bindgen::prelude::*;
use x25519_dalek::{PublicKey, StaticSecret};
/// Generate a new X25519 private key using secure random number generation.
/// Returns 32 bytes of raw key material suitable for use with other X25519 functions.
/// This key can be reused for multiple Diffie-Hellman exchanges.
#[wasm_bindgen]
pub fn new_x25519_private_key() -> Vec<u8> {
let secret = StaticSecret::random();
secret.to_bytes().to_vec()
}
/// Internal function to derive an X25519 public key from a private key.
/// Takes 32 bytes of private key material and returns 32 bytes of public key material.
/// Returns CryptoError if the key length is invalid.
pub(crate) fn x25519_public_key_internal(private_key: &[u8]) -> Result<[u8; 32], CryptoError> {
let bytes: [u8; 32] = private_key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, private_key.len()))?;
let secret = StaticSecret::from(bytes);
Ok(PublicKey::from(&secret).to_bytes())
}
/// WASM-exposed function to derive an X25519 public key from a private key.
/// - `private_key`: 32 bytes of private key material
/// Returns 32 bytes of public key material or throws JsError if key is invalid.
#[wasm_bindgen]
pub fn x25519_public_key(private_key: &[u8]) -> Result<Vec<u8>, JsError> {
Ok(x25519_public_key_internal(private_key)?.to_vec())
}
/// Internal function to perform X25519 Diffie-Hellman key exchange.
/// Takes 32 bytes each of private and public key material.
/// Returns 32 bytes of shared secret material or CryptoError if key lengths are invalid.
pub(crate) fn x25519_diffie_hellman_internal(
private_key: &[u8],
public_key: &[u8],
) -> Result<[u8; 32], CryptoError> {
let private_bytes: [u8; 32] = private_key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, private_key.len()))?;
let public_bytes: [u8; 32] = public_key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, public_key.len()))?;
let secret = StaticSecret::from(private_bytes);
let public = PublicKey::from(public_bytes);
Ok(secret.diffie_hellman(&public).to_bytes())
}
/// WASM-exposed function to perform X25519 Diffie-Hellman key exchange.
/// - `private_key`: 32 bytes of private key material
/// - `public_key`: 32 bytes of public key material
/// Returns 32 bytes of shared secret material or throws JsError if key exchange fails.
#[wasm_bindgen]
pub fn x25519_diffie_hellman(private_key: &[u8], public_key: &[u8]) -> Result<Vec<u8>, JsError> {
Ok(x25519_diffie_hellman_internal(private_key, public_key)?.to_vec())
}
/// Internal function to derive a sealer ID from a sealer secret.
/// Takes a base58-encoded sealer secret with "sealerSecret_z" prefix.
/// Returns a base58-encoded sealer ID with "sealer_z" prefix or error string if format is invalid.
pub fn get_sealer_id_internal(secret: &str) -> Result<String, CryptoError> {
let private_bytes = bs58::decode(secret.strip_prefix("sealerSecret_z").ok_or(
CryptoError::InvalidPrefix("sealerSecret_z", "sealer secret"),
)?)
.into_vec()
.map_err(|e| CryptoError::Base58Error(e.to_string()))?;
let public_bytes = x25519_public_key_internal(&private_bytes)
.map_err(|e| CryptoError::InvalidPublicKey(e.to_string()))?;
Ok(format!(
"sealer_z{}",
bs58::encode(public_bytes).into_string()
))
}
/// WASM-exposed function to derive a sealer ID from a sealer secret.
/// - `secret`: Raw bytes of the sealer secret
/// Returns a base58-encoded sealer ID with "sealer_z" prefix or throws JsError if derivation fails.
#[wasm_bindgen]
pub fn get_sealer_id(secret: &[u8]) -> Result<String, JsError> {
let secret_str = std::str::from_utf8(secret)
.map_err(|e| JsError::new(&format!("Invalid UTF-8 in secret: {:?}", e)))?;
get_sealer_id_internal(secret_str).map_err(|e| JsError::new(&e.to_string()))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_x25519_key_generation() {
// Test that we get the correct length keys
let private_key = new_x25519_private_key();
assert_eq!(private_key.len(), 32);
// Test that public key generation works and produces correct length
let public_key = x25519_public_key_internal(&private_key).unwrap();
assert_eq!(public_key.len(), 32);
// Test that different private keys produce different public keys
let private_key2 = new_x25519_private_key();
let public_key2 = x25519_public_key_internal(&private_key2).unwrap();
assert_ne!(public_key, public_key2);
}
#[test]
fn test_x25519_key_exchange() {
// Generate sender's keypair
let sender_private = new_x25519_private_key();
let sender_public = x25519_public_key_internal(&sender_private).unwrap();
// Generate recipient's keypair
let recipient_private = new_x25519_private_key();
let recipient_public = x25519_public_key_internal(&recipient_private).unwrap();
// Test properties we expect from the shared secret
let shared_secret1 =
x25519_diffie_hellman_internal(&sender_private, &recipient_public).unwrap();
let shared_secret2 =
x25519_diffie_hellman_internal(&recipient_private, &sender_public).unwrap();
// Both sides should arrive at the same shared secret
assert_eq!(shared_secret1, shared_secret2);
// Shared secret should be 32 bytes
assert_eq!(shared_secret1.len(), 32);
// Different recipient should produce different shared secret
let other_recipient_private = new_x25519_private_key();
let other_recipient_public = x25519_public_key_internal(&other_recipient_private).unwrap();
let different_shared_secret =
x25519_diffie_hellman_internal(&sender_private, &other_recipient_public).unwrap();
assert_ne!(shared_secret1, different_shared_secret);
}
#[test]
fn test_get_sealer_id() {
// Create a test private key
let private_key = new_x25519_private_key();
let secret = format!("sealerSecret_z{}", bs58::encode(&private_key).into_string());
// Get sealer ID
let sealer_id = get_sealer_id_internal(&secret).unwrap();
assert!(sealer_id.starts_with("sealer_z"));
// Test that same secret produces same ID
let sealer_id2 = get_sealer_id_internal(&secret).unwrap();
assert_eq!(sealer_id, sealer_id2);
// Test invalid secret format
let result = get_sealer_id_internal("invalid_secret");
assert!(matches!(
result,
Err(CryptoError::InvalidPrefix(
"sealerSecret_z",
"sealer secret"
))
));
// Test invalid base58
let result = get_sealer_id_internal("sealerSecret_z!!!invalid!!!");
assert!(matches!(result, Err(CryptoError::Base58Error(_))));
}
}

View File

@@ -1,256 +0,0 @@
use crate::error::CryptoError;
use crate::hash::blake3::generate_nonce;
use crypto_secretbox::{
aead::{Aead, KeyInit},
XSalsa20Poly1305,
};
use salsa20::cipher::{KeyIvInit, StreamCipher};
use salsa20::XSalsa20;
use wasm_bindgen::prelude::*;
/// WASM-exposed function for XSalsa20 encryption without authentication.
/// - `key`: 32-byte key for encryption
/// - `nonce_material`: Raw bytes used to generate a 24-byte nonce via BLAKE3
/// - `plaintext`: Raw bytes to encrypt
/// Returns the encrypted bytes or throws a JsError if encryption fails.
/// Note: This function does not provide authentication. Use encrypt_xsalsa20_poly1305 for authenticated encryption.
#[wasm_bindgen]
pub fn encrypt_xsalsa20(
key: &[u8],
nonce_material: &[u8],
plaintext: &[u8],
) -> Result<Box<[u8]>, JsError> {
let nonce = generate_nonce(nonce_material);
Ok(encrypt_xsalsa20_raw_internal(key, &nonce, plaintext)?.into())
}
/// WASM-exposed function for XSalsa20 decryption without authentication.
/// - `key`: 32-byte key for decryption (must match encryption key)
/// - `nonce_material`: Raw bytes used to generate a 24-byte nonce (must match encryption)
/// - `ciphertext`: Encrypted bytes to decrypt
/// Returns the decrypted bytes or throws a JsError if decryption fails.
/// Note: This function does not provide authentication. Use decrypt_xsalsa20_poly1305 for authenticated decryption.
#[wasm_bindgen]
pub fn decrypt_xsalsa20(
key: &[u8],
nonce_material: &[u8],
ciphertext: &[u8],
) -> Result<Box<[u8]>, JsError> {
let nonce = generate_nonce(nonce_material);
Ok(decrypt_xsalsa20_raw_internal(key, &nonce, ciphertext)?.into())
}
/// Internal function for raw XSalsa20 encryption without nonce generation.
/// Takes a 32-byte key and 24-byte nonce directly.
/// Returns encrypted bytes or CryptoError if key/nonce lengths are invalid.
pub fn encrypt_xsalsa20_raw_internal(
key: &[u8],
nonce: &[u8],
plaintext: &[u8],
) -> Result<Box<[u8]>, CryptoError> {
// Key must be 32 bytes
let key_bytes: [u8; 32] = key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, key.len()))?;
// Nonce must be 24 bytes
let nonce_bytes: [u8; 24] = nonce
.try_into()
.map_err(|_| CryptoError::InvalidNonceLength)?;
// Create cipher instance and encrypt
let mut cipher = XSalsa20::new_from_slices(&key_bytes, &nonce_bytes)
.map_err(|_| CryptoError::CipherError)?;
let mut buffer = plaintext.to_vec();
cipher.apply_keystream(&mut buffer);
Ok(buffer.into_boxed_slice())
}
/// Internal function for raw XSalsa20 decryption without nonce generation.
/// Takes a 32-byte key and 24-byte nonce directly.
/// Returns decrypted bytes or CryptoError if key/nonce lengths are invalid.
pub fn decrypt_xsalsa20_raw_internal(
key: &[u8],
nonce: &[u8],
ciphertext: &[u8],
) -> Result<Box<[u8]>, CryptoError> {
// Key must be 32 bytes
let key_bytes: [u8; 32] = key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, key.len()))?;
// Nonce must be 24 bytes
let nonce_bytes: [u8; 24] = nonce
.try_into()
.map_err(|_| CryptoError::InvalidNonceLength)?;
// Create cipher instance and decrypt (XSalsa20 is symmetric)
let mut cipher = XSalsa20::new_from_slices(&key_bytes, &nonce_bytes)
.map_err(|_| CryptoError::CipherError)?;
let mut buffer = ciphertext.to_vec();
cipher.apply_keystream(&mut buffer);
Ok(buffer.into_boxed_slice())
}
/// XSalsa20-Poly1305 encryption
pub fn encrypt_xsalsa20_poly1305(
key: &[u8],
nonce: &[u8],
plaintext: &[u8],
) -> Result<Box<[u8]>, CryptoError> {
// Key must be 32 bytes
let key_bytes: [u8; 32] = key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, key.len()))?;
// Nonce must be 24 bytes
let nonce_bytes: [u8; 24] = nonce
.try_into()
.map_err(|_| CryptoError::InvalidNonceLength)?;
// Create cipher instance
let cipher = XSalsa20Poly1305::new(&key_bytes.into());
// Encrypt the plaintext
cipher
.encrypt(&nonce_bytes.into(), plaintext)
.map(|v| v.into_boxed_slice())
.map_err(|_| CryptoError::WrongTag)
}
/// XSalsa20-Poly1305 decryption
pub fn decrypt_xsalsa20_poly1305(
key: &[u8],
nonce: &[u8],
ciphertext: &[u8],
) -> Result<Box<[u8]>, CryptoError> {
// Key must be 32 bytes
let key_bytes: [u8; 32] = key
.try_into()
.map_err(|_| CryptoError::InvalidKeyLength(32, key.len()))?;
// Nonce must be 24 bytes
let nonce_bytes: [u8; 24] = nonce
.try_into()
.map_err(|_| CryptoError::InvalidNonceLength)?;
// Create cipher instance
let cipher = XSalsa20Poly1305::new(&key_bytes.into());
// Decrypt the ciphertext
cipher
.decrypt(&nonce_bytes.into(), ciphertext)
.map(|v| v.into_boxed_slice())
.map_err(|_| CryptoError::WrongTag)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_xsalsa20() {
// Test vectors
let key = [0u8; 32]; // All zeros key
let nonce = [0u8; 24]; // All zeros nonce
let plaintext = b"Hello, World!";
// Test encryption
let ciphertext = encrypt_xsalsa20_raw_internal(&key, &nonce, plaintext).unwrap();
assert_ne!(&*ciphertext, plaintext); // Ciphertext should be different from plaintext
// Test decryption
let decrypted = decrypt_xsalsa20_raw_internal(&key, &nonce, &ciphertext).unwrap();
assert_eq!(&*decrypted, plaintext);
// Test that different nonce produces different ciphertext
let nonce2 = [1u8; 24];
let ciphertext2 = encrypt_xsalsa20_raw_internal(&key, &nonce2, plaintext).unwrap();
assert_ne!(ciphertext, ciphertext2);
// Test that different key produces different ciphertext
let key2 = [1u8; 32];
let ciphertext3 = encrypt_xsalsa20_raw_internal(&key2, &nonce, plaintext).unwrap();
assert_ne!(ciphertext, ciphertext3);
// Test invalid key length
assert!(encrypt_xsalsa20_raw_internal(&key[..31], &nonce, plaintext).is_err());
assert!(decrypt_xsalsa20_raw_internal(&key[..31], &nonce, &ciphertext).is_err());
// Test invalid nonce length
assert!(encrypt_xsalsa20_raw_internal(&key, &nonce[..23], plaintext).is_err());
assert!(decrypt_xsalsa20_raw_internal(&key, &nonce[..23], &ciphertext).is_err());
}
#[test]
fn test_xsalsa20_error_handling() {
let key = [0u8; 32];
let nonce = [0u8; 24];
let plaintext = b"test message";
// Test encryption with invalid key length
let invalid_key = vec![0u8; 31]; // Too short
let result = encrypt_xsalsa20_raw_internal(&invalid_key, &nonce, plaintext);
assert!(result.is_err());
// Test with too long key
let too_long_key = vec![0u8; 33]; // Too long
let result = encrypt_xsalsa20_raw_internal(&too_long_key, &nonce, plaintext);
assert!(result.is_err());
// Test decryption with invalid key length
let ciphertext = encrypt_xsalsa20_raw_internal(&key, &nonce, plaintext).unwrap();
let result = decrypt_xsalsa20_raw_internal(&invalid_key, &nonce, &ciphertext);
assert!(result.is_err());
// Test decryption with too long key
let result = decrypt_xsalsa20_raw_internal(&too_long_key, &nonce, &ciphertext);
assert!(result.is_err());
// Test with invalid nonce length
let invalid_nonce = vec![0u8; 23]; // Too short
let result = encrypt_xsalsa20_raw_internal(&key, &invalid_nonce, plaintext);
assert!(result.is_err());
let result = decrypt_xsalsa20_raw_internal(&key, &invalid_nonce, &ciphertext);
assert!(result.is_err());
// Test with too long nonce
let too_long_nonce = vec![0u8; 25]; // Too long
let result = encrypt_xsalsa20_raw_internal(&key, &too_long_nonce, plaintext);
assert!(result.is_err());
let result = decrypt_xsalsa20_raw_internal(&key, &too_long_nonce, &ciphertext);
assert!(result.is_err());
}
#[test]
fn test_xsalsa20_poly1305() {
let key = [0u8; 32]; // All zeros key
let nonce = [0u8; 24]; // All zeros nonce
let plaintext = b"Hello, World!";
// Test encryption
let ciphertext = encrypt_xsalsa20_poly1305(&key, &nonce, plaintext).unwrap();
assert!(ciphertext.len() > plaintext.len()); // Should include authentication tag
// Test decryption
let decrypted = decrypt_xsalsa20_poly1305(&key, &nonce, &ciphertext).unwrap();
assert_eq!(&*decrypted, plaintext);
// Test that different nonce produces different ciphertext
let nonce2 = [1u8; 24];
let ciphertext2 = encrypt_xsalsa20_poly1305(&key, &nonce2, plaintext).unwrap();
assert_ne!(ciphertext, ciphertext2);
// Test that different key produces different ciphertext
let key2 = [1u8; 32];
let ciphertext3 = encrypt_xsalsa20_poly1305(&key2, &nonce, plaintext).unwrap();
assert_ne!(ciphertext, ciphertext3);
// Test that decryption fails with wrong key
assert!(decrypt_xsalsa20_poly1305(&key2, &nonce, &ciphertext).is_err());
// Test that decryption fails with wrong nonce
assert!(decrypt_xsalsa20_poly1305(&key, &nonce2, &ciphertext).is_err());
// Test that decryption fails with tampered ciphertext
let mut tampered = ciphertext.clone();
tampered[0] ^= 1;
assert!(decrypt_xsalsa20_poly1305(&key, &nonce, &tampered).is_err());
}
}

View File

@@ -1,43 +0,0 @@
use std::fmt;
#[derive(Debug)]
pub enum CryptoError {
InvalidKeyLength(usize, usize),
InvalidNonceLength,
InvalidSealerSecretFormat,
InvalidSignatureLength,
InvalidVerifyingKey(String),
InvalidPublicKey(String),
WrongTag,
CipherError,
InvalidPrefix(&'static str, &'static str),
Base58Error(String),
}
impl fmt::Display for CryptoError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
CryptoError::InvalidKeyLength(expected, actual) => {
write!(f, "Invalid key length (expected {expected}, got {actual})")
}
CryptoError::InvalidNonceLength => write!(f, "Invalid nonce length"),
CryptoError::InvalidSealerSecretFormat => {
write!(
f,
"Invalid sealer secret format: must start with 'sealerSecret_z'"
)
}
CryptoError::InvalidSignatureLength => write!(f, "Invalid signature length"),
CryptoError::InvalidVerifyingKey(e) => write!(f, "Invalid verifying key: {}", e),
CryptoError::InvalidPublicKey(e) => write!(f, "Invalid public key: {}", e),
CryptoError::WrongTag => write!(f, "Wrong tag"),
CryptoError::CipherError => write!(f, "Failed to create cipher"),
CryptoError::InvalidPrefix(prefix, field) => {
write!(f, "Invalid {} format: must start with '{}'", field, prefix)
}
CryptoError::Base58Error(e) => write!(f, "Invalid base58: {}", e),
}
}
}
impl std::error::Error for CryptoError {}

View File

@@ -1,218 +0,0 @@
use wasm_bindgen::prelude::*;
/// Generate a 24-byte nonce from input material using BLAKE3.
/// - `nonce_material`: Raw bytes to derive the nonce from
/// Returns 24 bytes suitable for use as a nonce in cryptographic operations.
/// This function is deterministic - the same input will produce the same nonce.
#[wasm_bindgen]
pub fn generate_nonce(nonce_material: &[u8]) -> Box<[u8]> {
let mut hasher = blake3::Hasher::new();
hasher.update(nonce_material);
hasher.finalize().as_bytes()[..24].into()
}
/// Hash data once using BLAKE3.
/// - `data`: Raw bytes to hash
/// Returns 32 bytes of hash output.
/// This is the simplest way to compute a BLAKE3 hash of a single piece of data.
#[wasm_bindgen]
pub fn blake3_hash_once(data: &[u8]) -> Box<[u8]> {
let mut hasher = blake3::Hasher::new();
hasher.update(data);
hasher.finalize().as_bytes().to_vec().into_boxed_slice()
}
/// Hash data once using BLAKE3 with a context prefix.
/// - `data`: Raw bytes to hash
/// - `context`: Context bytes to prefix to the data
/// Returns 32 bytes of hash output.
/// This is useful for domain separation - the same data hashed with different contexts will produce different outputs.
#[wasm_bindgen]
pub fn blake3_hash_once_with_context(data: &[u8], context: &[u8]) -> Box<[u8]> {
let mut hasher = blake3::Hasher::new();
hasher.update(context);
hasher.update(data);
hasher.finalize().as_bytes().to_vec().into_boxed_slice()
}
#[wasm_bindgen]
pub struct Blake3Hasher(blake3::Hasher);
#[wasm_bindgen]
impl Blake3Hasher {
#[wasm_bindgen(constructor)]
pub fn new() -> Self {
Blake3Hasher(blake3::Hasher::new())
}
pub fn update(&mut self, data: &[u8]) {
self.0.update(data);
}
pub fn finalize(&self) -> Box<[u8]> {
self.0.finalize().as_bytes().to_vec().into_boxed_slice()
}
pub fn clone(&self) -> Self {
// The blake3::Hasher type implements Clone
Blake3Hasher(self.0.clone())
}
}
/// Get an empty BLAKE3 state for incremental hashing.
/// Returns a new Blake3Hasher instance for incremental hashing.
#[wasm_bindgen]
pub fn blake3_empty_state() -> Blake3Hasher {
Blake3Hasher::new()
}
/// Update a BLAKE3 state with new data for incremental hashing.
/// - `state`: Current Blake3Hasher instance
/// - `data`: New data to incorporate into the hash
/// Returns the updated Blake3Hasher.
#[wasm_bindgen]
pub fn blake3_update_state(state: &mut Blake3Hasher, data: &[u8]) {
state.update(data);
}
/// Get the final hash from a BLAKE3 state.
/// - `state`: The Blake3Hasher to finalize
/// Returns 32 bytes of hash output.
/// This finalizes an incremental hashing operation.
#[wasm_bindgen]
pub fn blake3_digest_for_state(state: Blake3Hasher) -> Box<[u8]> {
state.finalize()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_nonce_generation() {
let input = b"test input";
let nonce = generate_nonce(input);
assert_eq!(nonce.len(), 24);
// Same input should produce same nonce
let nonce2 = generate_nonce(input);
assert_eq!(nonce, nonce2);
// Different input should produce different nonce
let nonce3 = generate_nonce(b"different input");
assert_ne!(nonce, nonce3);
}
#[test]
fn test_blake3_hash_once() {
let input = b"test input";
let hash = blake3_hash_once(input);
// BLAKE3 produces 32-byte hashes
assert_eq!(hash.len(), 32);
// Same input should produce same hash
let hash2 = blake3_hash_once(input);
assert_eq!(hash, hash2);
// Different input should produce different hash
let hash3 = blake3_hash_once(b"different input");
assert_ne!(hash, hash3);
}
#[test]
fn test_blake3_hash_once_with_context() {
let input = b"test input";
let context = b"test context";
let hash = blake3_hash_once_with_context(input, context);
// BLAKE3 produces 32-byte hashes
assert_eq!(hash.len(), 32);
// Same input and context should produce same hash
let hash2 = blake3_hash_once_with_context(input, context);
assert_eq!(hash, hash2);
// Different input should produce different hash
let hash3 = blake3_hash_once_with_context(b"different input", context);
assert_ne!(hash, hash3);
// Different context should produce different hash
let hash4 = blake3_hash_once_with_context(input, b"different context");
assert_ne!(hash, hash4);
// Hash with context should be different from hash without context
let hash_no_context = blake3_hash_once(input);
assert_ne!(hash, hash_no_context);
}
#[test]
fn test_blake3_incremental() {
// Initial state
let mut state = blake3_empty_state();
// First update with [1,2,3,4,5]
let data1 = &[1u8, 2, 3, 4, 5];
blake3_update_state(&mut state, data1);
// Check that this matches a direct hash
let direct_hash = blake3_hash_once(data1);
let state_hash = state.finalize();
assert_eq!(
state_hash, direct_hash,
"First update should match direct hash"
);
// Create new state for second test
let mut state = blake3_empty_state();
blake3_update_state(&mut state, data1);
// Verify the exact expected hash from the TypeScript test for the first update
let expected_first_hash = [
2, 79, 103, 192, 66, 90, 61, 192, 47, 186, 245, 140, 185, 61, 229, 19, 46, 61, 117,
197, 25, 250, 160, 186, 218, 33, 73, 29, 136, 201, 112, 87,
]
.to_vec()
.into_boxed_slice();
assert_eq!(
state.finalize(),
expected_first_hash,
"First update should match expected hash"
);
// Test with two updates
let mut state = blake3_empty_state();
let data1 = &[1u8, 2, 3, 4, 5];
let data2 = &[6u8, 7, 8, 9, 10];
blake3_update_state(&mut state, data1);
blake3_update_state(&mut state, data2);
// Compare with a single hash of all data
let mut all_data = Vec::new();
all_data.extend_from_slice(data1);
all_data.extend_from_slice(data2);
let direct_hash_all = blake3_hash_once(&all_data);
assert_eq!(
state.finalize(),
direct_hash_all,
"Final state should match direct hash of all data"
);
// Test final hash matches expected value
let mut state = blake3_empty_state();
blake3_update_state(&mut state, data1);
blake3_update_state(&mut state, data2);
let expected_final_hash = [
165, 131, 141, 69, 2, 69, 39, 236, 196, 244, 180, 213, 147, 124, 222, 39, 68, 223, 54,
176, 242, 97, 200, 101, 204, 79, 21, 233, 56, 51, 1, 199,
]
.to_vec()
.into_boxed_slice();
assert_eq!(
state.finalize(),
expected_final_hash,
"Final state should match expected hash"
);
}
}

View File

@@ -1,165 +0,0 @@
use cojson_core::{
CoID, CoJsonCoreError, KeyID, KeySecret, SessionID, SessionLogInternal, Signature, SignerID, SignerSecret, TransactionMode
};
use serde_json::value::RawValue;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use wasm_bindgen::prelude::*;
mod error;
pub use error::CryptoError;
pub mod hash {
pub mod blake3;
pub use blake3::*;
}
pub mod crypto {
pub mod ed25519;
pub mod encrypt;
pub mod seal;
pub mod sign;
pub mod x25519;
pub mod xsalsa20;
pub use ed25519::*;
pub use encrypt::*;
pub use seal::*;
pub use sign::*;
pub use x25519::*;
pub use xsalsa20::*;
}
#[derive(Error, Debug)]
pub enum CojsonCoreWasmError {
#[error(transparent)]
CoJson(#[from] CoJsonCoreError),
#[error(transparent)]
Serde(#[from] serde_json::Error),
#[error(transparent)]
SerdeWasmBindgen(#[from] serde_wasm_bindgen::Error),
#[error("JsValue Error: {0:?}")]
Js(JsValue),
}
impl From<CojsonCoreWasmError> for JsValue {
fn from(err: CojsonCoreWasmError) -> Self {
JsValue::from_str(&err.to_string())
}
}
#[wasm_bindgen]
#[derive(Clone)]
pub struct SessionLog {
internal: SessionLogInternal,
}
#[derive(Serialize, Deserialize)]
struct PrivateTransactionResult {
signature: String,
encrypted_changes: String,
}
#[wasm_bindgen]
impl SessionLog {
#[wasm_bindgen(constructor)]
pub fn new(co_id: String, session_id: String, signer_id: Option<String>) -> SessionLog {
let co_id = CoID(co_id);
let session_id = SessionID(session_id);
let signer_id = signer_id.map(|id| SignerID(id));
let internal = SessionLogInternal::new(co_id, session_id, signer_id);
SessionLog { internal }
}
#[wasm_bindgen(js_name = clone)]
pub fn clone_js(&self) -> SessionLog {
self.clone()
}
#[wasm_bindgen(js_name = tryAdd)]
pub fn try_add(
&mut self,
transactions_json: Vec<String>,
new_signature_str: String,
skip_verify: bool,
) -> Result<(), CojsonCoreWasmError> {
let transactions: Vec<Box<RawValue>> = transactions_json
.into_iter()
.map(|s| {
serde_json::from_str(&s).map_err(|e| {
CojsonCoreWasmError::Js(JsValue::from(format!(
"Failed to parse transaction string: {}",
e
)))
})
})
.collect::<Result<Vec<_>, _>>()?;
let new_signature = Signature(new_signature_str);
self.internal
.try_add(transactions, &new_signature, skip_verify)?;
Ok(())
}
#[wasm_bindgen(js_name = addNewPrivateTransaction)]
pub fn add_new_private_transaction(
&mut self,
changes_json: &str,
signer_secret: String,
encryption_key: String,
key_id: String,
made_at: f64,
) -> Result<String, CojsonCoreWasmError> {
let (signature, transaction) = self.internal.add_new_transaction(
changes_json,
TransactionMode::Private{key_id: KeyID(key_id), key_secret: KeySecret(encryption_key)},
&SignerSecret(signer_secret),
made_at as u64,
);
// Extract encrypted_changes from the private transaction
let encrypted_changes = match transaction {
cojson_core::Transaction::Private(private_tx) => private_tx.encrypted_changes.value,
_ => return Err(CojsonCoreWasmError::Js(JsValue::from_str("Expected private transaction"))),
};
let result = PrivateTransactionResult{
signature: signature.0,
encrypted_changes,
};
Ok(serde_json::to_string(&result)?)
}
#[wasm_bindgen(js_name = addNewTrustingTransaction)]
pub fn add_new_trusting_transaction(
&mut self,
changes_json: &str,
signer_secret: String,
made_at: f64,
) -> Result<String, CojsonCoreWasmError> {
let (signature, _) = self.internal.add_new_transaction(
changes_json,
TransactionMode::Trusting,
&SignerSecret(signer_secret),
made_at as u64,
);
Ok(signature.0)
}
#[wasm_bindgen(js_name = decryptNextTransactionChangesJson)]
pub fn decrypt_next_transaction_changes_json(
&self,
tx_index: u32,
encryption_key: String,
) -> Result<String, CojsonCoreWasmError> {
Ok(self
.internal
.decrypt_next_transaction_changes_json(tx_index, KeySecret(encryption_key))?)
}
}

View File

@@ -1,18 +0,0 @@
[package]
name = "cojson-core"
version = "0.1.0"
edition = "2021"
[dependencies]
lzy = { path = "../lzy", optional = true }
serde = { version = "1.0", features = ["derive"] }
serde_json = { version = "1.0", features = ["raw_value"] }
ed25519-dalek = { version = "2.2.0", features = ["rand_core"] }
bs58 = "0.5.1"
blake3 = "1.5.1"
salsa20 = "0.10.2"
base64 = "0.22.1"
thiserror = "1.0"
[dev-dependencies]
rand_core = { version = "0.6", features = ["getrandom"] }

View File

@@ -1,8 +0,0 @@
{
"coID": "co_zUsz4gkwCCWqMXa4LHXdwyAkVK3",
"signerID":"signer_z3FdM2ucYXUkbJQgPRf8R4Di6exd2sNPVaHaJHhQ8WAqi",
"knownKeys":[],
"exampleBase": {
"co_zkNajJ1BhLzR962jpzvXxx917ZB_session_zXzrQLTtp8rR":{"transactions":[{"changes":"[{\"key\":\"co_zkNajJ1BhLzR962jpzvXxx917ZB\",\"op\":\"set\",\"value\":\"admin\"}]","madeAt":1750685354142,"privacy":"trusting"},{"changes":"[{\"key\":\"key_z268nqpkZYFFWPoGzL_for_co_zkNajJ1BhLzR962jpzvXxx917ZB\",\"op\":\"set\",\"value\":\"sealed_UmZaEEzCUrP3Q-t2KrN00keV66wzA4LWadqhEmw0jlku5frSW2QyXUY3zYIC_XLig6BDS9rcZZdTm3CwnLjTPzp9hgd9TlJLf_Q==\"}]","madeAt":1750685354142,"privacy":"trusting"},{"changes":"[{\"key\":\"readKey\",\"op\":\"set\",\"value\":\"key_z268nqpkZYFFWPoGzL\"}]","madeAt":1750685354143,"privacy":"trusting"},{"changes":"[{\"key\":\"everyone\",\"op\":\"set\",\"value\":\"writer\"}]","madeAt":1750685354143,"privacy":"trusting"},{"changes":"[{\"key\":\"key_z268nqpkZYFFWPoGzL_for_everyone\",\"op\":\"set\",\"value\":\"keySecret_zHRFDaEsnpYSZh6rUAvXS8uUrKCxJAzeBPSSaVU1r9RZY\"}]","madeAt":1750685354143,"privacy":"trusting"}],"lastHash":"hash_z5j1DUZjBiTKm5XnLi8ZrNPV3P7zGuXnMNCZfh2qGXGC7","streamingHash":{"state":{"__wbg_ptr":1127736},"crypto":{}},"lastSignature":"signature_z4LoRVDLnJBfAzHvRn3avgK4RVBd7iAfqUMJdpDEtV8HGLKGAqLyweBkNp8jggcNUQZatrMeU9tdc31ct9qxw7rib","signatureAfter":{}}
}
}

View File

@@ -1,6 +0,0 @@
{
"coID": "co_zWnX74VrMP3n3dkm9wZVPszfiCw",
"signerID":"signer_z3FdM2ucYXUkbJQgPRf8R4Di6exd2sNPVaHaJHhQ8WAqi",
"knownKeys":[{"secret":"keySecret_zHRFDaEsnpYSZh6rUAvXS8uUrKCxJAzeBPSSaVU1r9RZY","id":"key_z268nqpkZYFFWPoGzL"}],
"exampleBase":{"co_zkNajJ1BhLzR962jpzvXxx917ZB_session_zXzrQLTtp8rR":{"transactions":[{"encryptedChanges":"encrypted_UxN_r7X7p-3GUE3GRGRO4NfIhEUvB01m-HaSSipRRrUsTmNBW9dZ-pkAk-NoVP_iEB0moLFbG9GDq9U9S-rUDfSPcaWCJtpE=","keyUsed":"key_z268nqpkZYFFWPoGzL","madeAt":1750685368555,"privacy":"private"}],"lastHash":"hash_zJCdoTRgDuFdUK2XogR7qgNnxezfYAVih3qve2UV65L5X","streamingHash":{"state":{"__wbg_ptr":1129680},"crypto":{}},"lastSignature":"signature_z3UErpugJAqDEYKgzUhs88xBMohzmaL228PgkNhEomf6AeVr7NYNxY17iUoCmPQTpGJNqYPo3y82mGX4oWBhkqN4y","signatureAfter":{}}}
}

View File

@@ -1,689 +0,0 @@
use base64::{engine::general_purpose::URL_SAFE, Engine as _};
use bs58;
use ed25519_dalek::{Signature as Ed25519Signature, Signer, SigningKey, Verifier, VerifyingKey};
use salsa20::{
cipher::{KeyIvInit, StreamCipher},
XSalsa20,
};
use serde::{Deserialize, Serialize};
use serde_json::{value::RawValue, Number, Value as JsonValue};
use thiserror::Error;
// Re-export lzy for convenience
#[cfg(feature = "lzy")]
pub use lzy;
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct SessionID(pub String);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(transparent)]
pub struct SignerID(pub String);
impl From<VerifyingKey> for SignerID {
fn from(key: VerifyingKey) -> Self {
SignerID(format!(
"signer_z{}",
bs58::encode(key.to_bytes()).into_string()
))
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(transparent)]
pub struct SignerSecret(pub String);
impl From<SigningKey> for SignerSecret {
fn from(key: SigningKey) -> Self {
SignerSecret(format!(
"signerSecret_z{}",
bs58::encode(key.to_bytes()).into_string()
))
}
}
impl Into<SigningKey> for &SignerSecret {
fn into(self) -> SigningKey {
let key_bytes = decode_z(&self.0).expect("Invalid key secret");
SigningKey::from_bytes(&key_bytes.try_into().expect("Invalid key secret length"))
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(transparent)]
pub struct Signature(pub String);
impl From<Ed25519Signature> for Signature {
fn from(signature: Ed25519Signature) -> Self {
Signature(format!(
"signature_z{}",
bs58::encode(signature.to_bytes()).into_string()
))
}
}
impl Into<Ed25519Signature> for &Signature {
fn into(self) -> Ed25519Signature {
let signature_bytes = decode_z(&self.0).expect("Invalid signature");
Ed25519Signature::from_bytes(
&signature_bytes
.try_into()
.expect("Invalid signature length"),
)
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(transparent)]
pub struct Hash(pub String);
impl From<blake3::Hash> for Hash {
fn from(hash: blake3::Hash) -> Self {
Hash(format!("hash_z{}", bs58::encode(hash.as_bytes()).into_string()))
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(transparent)]
pub struct KeyID(pub String);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(transparent)]
pub struct KeySecret(pub String);
impl Into<[u8; 32]> for &KeySecret {
fn into(self) -> [u8; 32] {
let key_bytes = decode_z(&self.0).expect("Invalid key secret");
key_bytes.try_into().expect("Invalid key secret length")
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(transparent)]
pub struct CoID(pub String);
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct TransactionID {
#[serde(rename = "sessionID")]
pub session_id: SessionID,
#[serde(rename = "txIndex")]
pub tx_index: u32,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(transparent)]
pub struct Encrypted<T> {
pub value: String,
_phantom: std::marker::PhantomData<T>,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct PrivateTransaction {
#[serde(rename = "encryptedChanges")]
pub encrypted_changes: Encrypted<JsonValue>,
#[serde(rename = "keyUsed")]
pub key_used: KeyID,
#[serde(rename = "madeAt")]
pub made_at: Number,
pub privacy: String,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct TrustingTransaction {
pub changes: String,
#[serde(rename = "madeAt")]
pub made_at: Number,
pub privacy: String,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum Transaction {
Private(PrivateTransaction),
Trusting(TrustingTransaction),
}
pub enum TransactionMode {
Private {
key_id: KeyID,
key_secret: KeySecret,
},
Trusting,
}
#[derive(Error, Debug)]
pub enum CoJsonCoreError {
#[error("Transaction not found at index {0}")]
TransactionNotFound(u32),
#[error("Invalid encrypted prefix in transaction")]
InvalidEncryptedPrefix,
#[error("Base64 decoding failed")]
Base64Decode(#[from] base64::DecodeError),
#[error("UTF-8 conversion failed")]
Utf8(#[from] std::string::FromUtf8Error),
#[error("JSON deserialization failed")]
Json(#[from] serde_json::Error),
#[error("Signature verification failed: (hash: {0})")]
SignatureVerification(String),
}
#[derive(Clone)]
pub struct SessionLogInternal {
co_id: CoID,
session_id: SessionID,
public_key: Option<VerifyingKey>,
hasher: blake3::Hasher,
transactions_json: Vec<String>,
last_signature: Option<Signature>,
}
impl SessionLogInternal {
pub fn new(co_id: CoID, session_id: SessionID, signer_id: Option<SignerID>) -> Self {
let hasher = blake3::Hasher::new();
let public_key = match signer_id {
Some(signer_id) => Some(VerifyingKey::try_from(
decode_z(&signer_id.0)
.expect("Invalid public key")
.as_slice(),
)
.expect("Invalid public key")),
None => None,
};
Self {
co_id,
session_id,
public_key,
hasher,
transactions_json: Vec::new(),
last_signature: None,
}
}
pub fn transactions_json(&self) -> &Vec<String> {
&self.transactions_json
}
pub fn last_signature(&self) -> Option<&Signature> {
self.last_signature.as_ref()
}
fn expected_hash_after(&self, transactions: &[Box<RawValue>]) -> blake3::Hasher {
let mut hasher = self.hasher.clone();
for tx in transactions {
hasher.update(tx.get().as_bytes());
}
hasher
}
pub fn try_add(
&mut self,
transactions: Vec<Box<RawValue>>,
new_signature: &Signature,
skip_verify: bool,
) -> Result<(), CoJsonCoreError> {
if !skip_verify {
let hasher = self.expected_hash_after(&transactions);
let new_hash_encoded_stringified = format!(
"\"hash_z{}\"",
bs58::encode(hasher.finalize().as_bytes()).into_string()
);
if let Some(public_key) = self.public_key {
match public_key.verify(
new_hash_encoded_stringified.as_bytes(),
&(new_signature).into(),
) {
Ok(()) => {}
Err(_) => {
return Err(CoJsonCoreError::SignatureVerification(
new_hash_encoded_stringified.replace("\"", ""),
));
}
}
} else {
return Err(CoJsonCoreError::SignatureVerification(
new_hash_encoded_stringified.replace("\"", ""),
));
}
self.hasher = hasher;
}
for tx in transactions {
self.transactions_json.push(tx.get().to_string());
}
self.last_signature = Some(new_signature.clone());
Ok(())
}
pub fn add_new_transaction(
&mut self,
changes_json: &str,
mode: TransactionMode,
signer_secret: &SignerSecret,
made_at: u64,
) -> (Signature, Transaction) {
let new_tx = match mode {
TransactionMode::Private { key_id, key_secret } => {
let tx_index = self.transactions_json.len() as u32;
let nonce_material = JsonValue::Object(serde_json::Map::from_iter(vec![
("in".to_string(), JsonValue::String(self.co_id.0.clone())),
(
"tx".to_string(),
serde_json::to_value(TransactionID {
session_id: self.session_id.clone(),
tx_index,
})
.unwrap(),
),
]));
let nonce = self.generate_json_nonce(&nonce_material);
let secret_key_bytes: [u8; 32] = (&key_secret).into();
let mut ciphertext = changes_json.as_bytes().to_vec();
let mut cipher = XSalsa20::new(&secret_key_bytes.into(), &nonce.into());
cipher.apply_keystream(&mut ciphertext);
let encrypted_str = format!("encrypted_U{}", URL_SAFE.encode(&ciphertext));
Transaction::Private(PrivateTransaction {
encrypted_changes: Encrypted {
value: encrypted_str,
_phantom: std::marker::PhantomData,
},
key_used: key_id.clone(),
made_at: Number::from(made_at),
privacy: "private".to_string(),
})
}
TransactionMode::Trusting => Transaction::Trusting(TrustingTransaction {
changes: changes_json.to_string(),
made_at: Number::from(made_at),
privacy: "trusting".to_string(),
}),
};
let tx_json = serde_json::to_string(&new_tx).unwrap();
self.hasher.update(tx_json.as_bytes());
self.transactions_json.push(tx_json);
let new_hash = self.hasher.finalize();
let new_hash_encoded_stringified = format!("\"hash_z{}\"", bs58::encode(new_hash.as_bytes()).into_string());
let signing_key: SigningKey = signer_secret.into();
let new_signature: Signature = signing_key.sign(new_hash_encoded_stringified.as_bytes()).into();
self.last_signature = Some(new_signature.clone());
(new_signature, new_tx)
}
pub fn decrypt_next_transaction_changes_json(
&self,
tx_index: u32,
key_secret: KeySecret,
) -> Result<String, CoJsonCoreError> {
let tx_json = self
.transactions_json
.get(tx_index as usize)
.ok_or(CoJsonCoreError::TransactionNotFound(tx_index))?;
let tx: Transaction = serde_json::from_str(tx_json)?;
match tx {
Transaction::Private(private_tx) => {
let nonce_material = JsonValue::Object(serde_json::Map::from_iter(vec![
("in".to_string(), JsonValue::String(self.co_id.0.clone())),
(
"tx".to_string(),
serde_json::to_value(TransactionID {
session_id: self.session_id.clone(),
tx_index,
})?,
),
]));
let nonce = self.generate_json_nonce(&nonce_material);
let encrypted_val = private_tx.encrypted_changes.value;
let prefix = "encrypted_U";
if !encrypted_val.starts_with(prefix) {
return Err(CoJsonCoreError::InvalidEncryptedPrefix);
}
let ciphertext_b64 = &encrypted_val[prefix.len()..];
let mut ciphertext = URL_SAFE.decode(ciphertext_b64)?;
let secret_key_bytes: [u8; 32] = (&key_secret).into();
let mut cipher = XSalsa20::new((&secret_key_bytes).into(), &nonce.into());
cipher.apply_keystream(&mut ciphertext);
Ok(String::from_utf8(ciphertext)?)
}
Transaction::Trusting(trusting_tx) => Ok(trusting_tx.changes),
}
}
fn generate_nonce(&self, material: &[u8]) -> [u8; 24] {
let mut hasher = blake3::Hasher::new();
hasher.update(material);
let mut output = [0u8; 24];
let mut output_reader = hasher.finalize_xof();
output_reader.fill(&mut output);
output
}
fn generate_json_nonce(&self, material: &JsonValue) -> [u8; 24] {
let stable_json = serde_json::to_string(&material).unwrap();
self.generate_nonce(stable_json.as_bytes())
}
}
pub fn decode_z(value: &str) -> Result<Vec<u8>, String> {
let prefix_end = value.find("_z").ok_or("Invalid prefix")? + 2;
bs58::decode(&value[prefix_end..])
.into_vec()
.map_err(|e| e.to_string())
}
#[cfg(test)]
mod tests {
use super::*;
use rand_core::OsRng;
use std::{collections::HashMap, fs};
#[test]
fn it_works() {
let mut csprng = OsRng;
let signing_key = SigningKey::generate(&mut csprng);
let verifying_key = signing_key.verifying_key();
let session = SessionLogInternal::new(
CoID("co_test1".to_string()),
SessionID("session_test1".to_string()),
verifying_key.into(),
);
assert!(session.last_signature.is_none());
}
#[test]
fn test_add_from_example_json() {
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct TestSession<'a> {
last_signature: Signature,
#[serde(borrow)]
transactions: Vec<&'a RawValue>,
last_hash: String,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct Root<'a> {
#[serde(borrow)]
example_base: HashMap<String, TestSession<'a>>,
#[serde(rename = "signerID")]
signer_id: SignerID,
}
let data = fs::read_to_string("data/singleTxSession.json")
.expect("Unable to read singleTxSession.json");
let root: Root = serde_json::from_str(&data).unwrap();
let (session_id_str, example) = root.example_base.into_iter().next().unwrap();
let session_id = SessionID(session_id_str.clone());
let co_id = CoID(
session_id_str
.split("_session_")
.next()
.unwrap()
.to_string(),
);
let mut session = SessionLogInternal::new(co_id, session_id, root.signer_id);
let new_signature = example.last_signature;
let result = session.try_add(
vec![example.transactions[0].to_owned()],
&new_signature,
false,
);
match result {
Ok(returned_final_hash) => {
let final_hash = session.hasher.finalize();
let final_hash_encoded = format!(
"hash_z{}",
bs58::encode(final_hash.as_bytes()).into_string()
);
assert_eq!(final_hash_encoded, example.last_hash);
assert_eq!(session.last_signature, Some(new_signature));
}
Err(CoJsonCoreError::SignatureVerification(new_hash_encoded)) => {
assert_eq!(new_hash_encoded, example.last_hash);
panic!("Signature verification failed despite same hash");
}
Err(e) => {
panic!("Unexpected error: {:?}", e);
}
}
}
#[test]
fn test_add_from_example_json_multi_tx() {
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct TestSession<'a> {
last_signature: Signature,
#[serde(borrow)]
transactions: Vec<&'a RawValue>,
last_hash: String,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct Root<'a> {
#[serde(borrow)]
example_base: HashMap<String, TestSession<'a>>,
#[serde(rename = "signerID")]
signer_id: SignerID,
}
let data = fs::read_to_string("data/multiTxSession.json")
.expect("Unable to read multiTxSession.json");
let root: Root = serde_json::from_str(&data).unwrap();
let (session_id_str, example) = root.example_base.into_iter().next().unwrap();
let session_id = SessionID(session_id_str.clone());
let co_id = CoID(
session_id_str
.split("_session_")
.next()
.unwrap()
.to_string(),
);
let mut session = SessionLogInternal::new(co_id, session_id, root.signer_id);
let new_signature = example.last_signature;
let result = session.try_add(
example.transactions.into_iter().map(|tx| tx.to_owned()).collect(),
&new_signature,
false,
);
match result {
Ok(returned_final_hash) => {
let final_hash = session.hasher.finalize();
let final_hash_encoded = format!(
"hash_z{}",
bs58::encode(final_hash.as_bytes()).into_string()
);
assert_eq!(final_hash_encoded, example.last_hash);
assert_eq!(session.last_signature, Some(new_signature));
}
Err(CoJsonCoreError::SignatureVerification(new_hash_encoded)) => {
assert_eq!(new_hash_encoded, example.last_hash);
panic!("Signature verification failed despite same hash");
}
Err(e) => {
panic!("Unexpected error: {:?}", e);
}
}
}
#[test]
fn test_add_new_transaction() {
// Load the example data to get all the pieces we need
let data = fs::read_to_string("data/singleTxSession.json")
.expect("Unable to read singleTxSession.json");
let root: serde_json::Value = serde_json::from_str(&data).unwrap();
let session_data =
&root["exampleBase"]["co_zkNajJ1BhLzR962jpzvXxx917ZB_session_zXzrQLTtp8rR"];
let tx_from_example = &session_data["transactions"][0];
let known_key = &root["knownKeys"][0];
// Since we don't have the original private key, we generate a new one for this test.
let mut csprng = OsRng;
let signing_key = SigningKey::generate(&mut csprng);
let public_key = signing_key.verifying_key();
// Initialize an empty session
let mut session = SessionLogInternal::new(
CoID(root["coID"].as_str().unwrap().to_string()),
SessionID("co_zkNajJ1BhLzR962jpzvXxx917ZB_session_zXzrQLTtp8rR".to_string()),
public_key.into(),
);
// The plaintext changes we want to add
let changes_json =
r#"[{"after":"start","op":"app","value":"co_zMphsnYN6GU8nn2HDY5suvyGufY"}]"#;
// Extract all the necessary components from the example data
let key_secret = KeySecret(known_key["secret"].as_str().unwrap().to_string());
let key_id = KeyID(known_key["id"].as_str().unwrap().to_string());
let made_at = tx_from_example["madeAt"].as_u64().unwrap();
// Call the function we are testing
let (new_signature, _new_tx) = session.add_new_transaction(
changes_json,
TransactionMode::Private {
key_id: key_id,
key_secret: key_secret,
},
&signing_key.into(),
made_at,
);
// 1. Check that the transaction we created matches the one in the file
let created_tx_json = &session.transactions_json[0];
let expected_tx_json = serde_json::to_string(tx_from_example).unwrap();
assert_eq!(created_tx_json, &expected_tx_json);
// 2. Check that the final hash of the session matches the one in the file
let final_hash = session.hasher.finalize();
let final_hash_encoded = format!(
"hash_z{}",
bs58::encode(final_hash.as_bytes()).into_string()
);
assert_eq!(
final_hash_encoded,
session_data["lastHash"].as_str().unwrap()
);
let final_hash_encoded_stringified = format!(
"\"{}\"",
final_hash_encoded
);
// 3. Check that the signature is valid for our generated key
assert!(session
.public_key
.verify(final_hash_encoded_stringified.as_bytes(), &(&new_signature).into())
.is_ok());
assert_eq!(session.last_signature, Some(new_signature));
}
#[test]
fn test_decrypt_from_example_json() {
#[derive(Deserialize, Debug)]
struct KnownKey {
secret: String,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
#[serde(bound(deserialize = "'de: 'a"))]
struct TestSession<'a> {
last_signature: String,
#[serde(borrow)]
transactions: Vec<&'a RawValue>,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
#[serde(bound(deserialize = "'de: 'a"))]
struct Root<'a> {
#[serde(borrow)]
example_base: HashMap<String, TestSession<'a>>,
#[serde(rename = "signerID")]
signer_id: SignerID,
known_keys: Vec<KnownKey>,
#[serde(rename = "coID")]
co_id: CoID,
}
let data = fs::read_to_string("data/singleTxSession.json")
.expect("Unable to read singleTxSession.json");
let root: Root = serde_json::from_str(&data).unwrap();
let (session_id_str, example) = root.example_base.into_iter().next().unwrap();
let session_id = SessionID(session_id_str.clone());
let public_key =
VerifyingKey::from_bytes(&decode_z(&root.signer_id.0).unwrap().try_into().unwrap())
.unwrap();
let mut session = SessionLogInternal::new(root.co_id, session_id, public_key.into());
let new_signature = Signature(example.last_signature);
session
.try_add(
example
.transactions
.into_iter()
.map(|v| v.to_owned())
.collect(),
&new_signature,
true, // Skipping verification because we don't have the right initial state
)
.unwrap();
let key_secret = KeySecret(root.known_keys[0].secret.clone());
let decrypted = session
.decrypt_next_transaction_changes_json(0, key_secret)
.unwrap();
assert_eq!(
decrypted,
r#"[{"after":"start","op":"app","value":"co_zMphsnYN6GU8nn2HDY5suvyGufY"}]"#
);
}
}

View File

@@ -1,15 +0,0 @@
[package]
name = "lzy"
version = "0.1.0"
edition = "2021"
[dependencies]
[dev-dependencies]
criterion = { version = "0.5", features = ["html_reports"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
[[bench]]
name = "compression_benchmark"
harness = false

View File

@@ -1,36 +0,0 @@
use criterion::{black_box, criterion_group, criterion_main, Criterion, Throughput};
use lzy::{compress, decompress};
use std::fs;
use std::time::Duration;
fn compression_benchmark(c: &mut Criterion) {
let data = fs::read("data/compression_66k_JSON.txt").expect("Failed to read benchmark data");
let mut group = c.benchmark_group("LZY Compression");
group.measurement_time(Duration::from_secs(10));
group.sample_size(10);
group.throughput(Throughput::Bytes(data.len() as u64));
let compressed = compress(&data);
let compression_ratio = compressed.len() as f64 / data.len() as f64;
println!(
"Compression ratio (compressed/original): {:.4} ({} / {} bytes)",
compression_ratio,
compressed.len(),
data.len()
);
group.bench_function("compress", |b| {
b.iter(|| compress(black_box(&data)))
});
let decompressed = decompress(&compressed).unwrap();
assert_eq!(data, decompressed);
group.bench_function("decompress", |b| {
b.iter(|| decompress(black_box(&compressed)))
});
}
criterion_group!(benches, compression_benchmark);
criterion_main!(benches);

File diff suppressed because one or more lines are too long

View File

@@ -1,348 +0,0 @@
const MIN_MATCH_LEN: usize = 4;
const MAX_MATCH_LEN: usize = 15 + 3;
const MAX_LITERALS: usize = 15;
const HASH_LOG: u32 = 16;
const HASH_TABLE_SIZE: usize = 1 << HASH_LOG;
fn hash(data: &[u8]) -> usize {
const KNUTH_MULT_PRIME: u32 = 2654435761;
let val = u32::from_le_bytes(data.try_into().unwrap());
((val.wrapping_mul(KNUTH_MULT_PRIME)) >> (32 - HASH_LOG)) as usize
}
#[derive(Debug, PartialEq)]
pub enum DecompressionError {
InvalidToken,
UnexpectedEof,
}
pub fn decompress(input: &[u8]) -> Result<Vec<u8>, DecompressionError> {
let mut decompressed = Vec::with_capacity(input.len() * 2);
let mut i = 0;
while i < input.len() {
let token = input[i];
i += 1;
let literal_len = (token >> 4) as usize;
let match_len_token = (token & 0x0F) as usize;
if i + literal_len > input.len() {
return Err(DecompressionError::UnexpectedEof);
}
decompressed.extend_from_slice(&input[i..i + literal_len]);
i += literal_len;
if match_len_token > 0 {
if i + 2 > input.len() {
return Err(DecompressionError::UnexpectedEof);
}
let offset = u16::from_le_bytes([input[i], input[i + 1]]) as usize;
i += 2;
if offset == 0 || offset > decompressed.len() {
return Err(DecompressionError::InvalidToken);
}
let match_len = match_len_token + 3;
let match_start = decompressed.len() - offset;
for k in 0..match_len {
decompressed.push(decompressed[match_start + k]);
}
}
}
Ok(decompressed)
}
pub fn compress(input: &[u8]) -> Vec<u8> {
let mut compressor = Compressor::new();
compressor.compress_chunk(input)
}
fn emit_sequence(out: &mut Vec<u8>, mut literals: &[u8], match_len: usize, offset: u16) {
while literals.len() > MAX_LITERALS {
let token = (MAX_LITERALS as u8) << 4;
out.push(token);
out.extend_from_slice(&literals[..MAX_LITERALS]);
literals = &literals[MAX_LITERALS..];
}
let lit_len_token = literals.len() as u8;
let match_len_token = if match_len > 0 {
(match_len - 3) as u8
} else {
0
};
let token = lit_len_token << 4 | match_len_token;
out.push(token);
out.extend_from_slice(literals);
if match_len > 0 {
out.extend_from_slice(&offset.to_le_bytes());
}
}
pub struct Compressor {
hash_table: Vec<u32>,
history: Vec<u8>,
}
impl Compressor {
pub fn new() -> Self {
Self {
hash_table: vec![0; HASH_TABLE_SIZE],
history: Vec::new(),
}
}
pub fn compress_chunk(&mut self, chunk: &[u8]) -> Vec<u8> {
let mut compressed_chunk = Vec::new();
let chunk_start_cursor = self.history.len();
self.history.extend_from_slice(chunk);
let mut cursor = chunk_start_cursor;
let mut literal_anchor = chunk_start_cursor;
while cursor < self.history.len() {
let mut best_match: Option<(u16, usize)> = None;
if self.history.len() - cursor >= MIN_MATCH_LEN {
let h = hash(&self.history[cursor..cursor + 4]);
let match_pos = self.hash_table[h] as usize;
if match_pos < cursor && cursor - match_pos < u16::MAX as usize {
if self.history.get(match_pos..match_pos + MIN_MATCH_LEN) == Some(&self.history[cursor..cursor + MIN_MATCH_LEN]) {
let mut match_len = MIN_MATCH_LEN;
while cursor + match_len < self.history.len()
&& match_len < MAX_MATCH_LEN
&& self.history.get(match_pos + match_len) == self.history.get(cursor + match_len)
{
match_len += 1;
}
best_match = Some(((cursor - match_pos) as u16, match_len));
}
}
self.hash_table[h] = cursor as u32;
}
if let Some((offset, match_len)) = best_match {
let literals = &self.history[literal_anchor..cursor];
emit_sequence(&mut compressed_chunk, literals, match_len, offset);
cursor += match_len;
literal_anchor = cursor;
} else {
cursor += 1;
}
}
if literal_anchor < cursor {
let literals = &self.history[literal_anchor..cursor];
emit_sequence(&mut compressed_chunk, literals, 0, 0);
}
compressed_chunk
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_simple_roundtrip() {
let data = b"hello world, hello people";
let compressed = compress(data);
println!("Compressed '{}': {:x?}", std::str::from_utf8(data).unwrap(), compressed);
let decompressed = decompress(&compressed).unwrap();
assert_eq!(data, decompressed.as_slice());
}
#[test]
fn test_long_literals() {
let data = b"abcdefghijklmnopqrstuvwxyz";
let compressed = compress(data);
println!("Compressed '{}': {:x?}", std::str::from_utf8(data).unwrap(), compressed);
let decompressed = decompress(&compressed).unwrap();
assert_eq!(data, decompressed.as_slice());
}
#[test]
fn test_decompress_empty() {
let data = b"";
let compressed = compress(data);
assert!(compressed.is_empty());
let decompressed = decompress(&compressed).unwrap();
assert_eq!(data, decompressed.as_slice());
}
#[test]
fn test_overlapping_match() {
let data = b"abcdeabcdeabcdeabcde"; // repeating sequence
let compressed = compress(data);
println!("Compressed '{}': {:x?}", std::str::from_utf8(data).unwrap(), compressed);
let decompressed = decompress(&compressed).unwrap();
assert_eq!(data, decompressed.as_slice());
let data2 = b"abababababababababab";
let compressed2 = compress(data2);
println!("Compressed '{}': {:x?}", std::str::from_utf8(data2).unwrap(), compressed2);
let decompressed2 = decompress(&compressed2).unwrap();
assert_eq!(data2, decompressed2.as_slice());
}
#[test]
fn test_json_roundtrip() {
let data = std::fs::read("data/compression_66k_JSON.txt").unwrap();
let compressed = compress(&data);
std::fs::write("compressed_66k.lzy", &compressed).unwrap();
let decompressed = decompress(&compressed).unwrap();
assert_eq!(data, decompressed.as_slice());
}
mod crdt_helpers {
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct After {
pub session_id: String,
pub tx_index: u32,
pub change_idx: u32,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Transaction {
pub op: String,
pub value: String,
pub after: After,
}
pub fn generate_transactions(text: &str, session_id: &str) -> Vec<String> {
let mut transactions = Vec::new();
for (i, c) in text.chars().enumerate() {
let tx = Transaction {
op: "app".to_string(),
value: c.to_string(),
after: After {
session_id: session_id.to_string(),
tx_index: i as u32,
change_idx: 0,
},
};
transactions.push(serde_json::to_string(&tx).unwrap());
}
transactions
}
pub fn generate_shorthand_transactions(text: &str) -> Vec<String> {
let mut transactions = Vec::new();
for c in text.chars() {
transactions.push(serde_json::to_string(&c.to_string()).unwrap());
}
transactions
}
}
#[test]
fn test_crdt_transaction_generation() {
let sample_text = "This is a sample text for our CRDT simulation. \
It should be long enough to see some interesting compression results later on. \
Let's add another sentence to make it a bit more substantial.";
let session_id = "co_zRtnoNffeMHge9wvyL5mK1RWbdz_session_zKvAVFSV5cqW";
let transactions = crdt_helpers::generate_transactions(sample_text, session_id);
println!("--- Generated CRDT Transactions ---");
for tx in &transactions {
println!("{}", tx);
}
println!("--- End of CRDT Transactions ---");
assert!(!transactions.is_empty());
assert_eq!(transactions.len(), sample_text.chars().count());
}
#[test]
fn test_crdt_chunked_compression() {
let sample_text = "This is a sample text for our CRDT simulation. \
It should be long enough to see some interesting compression results later on. \
Let's add another sentence to make it a bit more substantial.";
let session_id = "co_zRtnoNffeMHge9wvyL5mK1RWbdz_session_zKvAVFSV5cqW";
let transactions_json = crdt_helpers::generate_transactions(sample_text, session_id);
let mut compressor = Compressor::new();
let mut compressed_log = Vec::new();
let mut total_json_len = 0;
for tx_json in &transactions_json {
let compressed_chunk = compressor.compress_chunk(tx_json.as_bytes());
compressed_log.extend_from_slice(&compressed_chunk);
total_json_len += tx_json.len();
}
let decompressed = decompress(&compressed_log).unwrap();
// Verify roundtrip
let original_log_concatenated = transactions_json.join("");
assert_eq!(decompressed, original_log_concatenated.as_bytes());
let plaintext_len = sample_text.len();
let compressed_len = compressed_log.len();
let compression_ratio = compressed_len as f64 / total_json_len as f64;
let overhead_ratio = compressed_len as f64 / plaintext_len as f64;
println!("\n--- CRDT Chunked Compression Test ---");
println!("Plaintext size: {} bytes", plaintext_len);
println!("Total JSON size: {} bytes", total_json_len);
println!("Compressed log size: {} bytes", compressed_len);
println!("Compression ratio (compressed/json): {:.4}", compression_ratio);
println!("Overhead ratio (compressed/plaintext): {:.4}", overhead_ratio);
println!("--- End of Test ---");
}
#[test]
fn test_crdt_shorthand_compression() {
let sample_text = "This is a sample text for our CRDT simulation. \
It should be long enough to see some interesting compression results later on. \
Let's add another sentence to make it a bit more substantial.";
let transactions_json = crdt_helpers::generate_shorthand_transactions(sample_text);
let mut compressor = Compressor::new();
let mut compressed_log = Vec::new();
let mut total_json_len = 0;
for tx_json in &transactions_json {
let compressed_chunk = compressor.compress_chunk(tx_json.as_bytes());
compressed_log.extend_from_slice(&compressed_chunk);
total_json_len += tx_json.len();
}
let decompressed = decompress(&compressed_log).unwrap();
// Verify roundtrip
let original_log_concatenated = transactions_json.join("");
assert_eq!(decompressed, original_log_concatenated.as_bytes());
let plaintext_len = sample_text.len();
let compressed_len = compressed_log.len();
let compression_ratio = compressed_len as f64 / total_json_len as f64;
let overhead_ratio = compressed_len as f64 / plaintext_len as f64;
println!("\n--- CRDT Shorthand Compression Test ---");
println!("Plaintext size: {} bytes", plaintext_len);
println!("Total JSON size: {} bytes", total_json_len);
println!("Compressed log size: {} bytes", compressed_len);
println!("Compression ratio (compressed/json): {:.4}", compression_ratio);
println!("Overhead ratio (compressed/plaintext): {:.4}", overhead_ratio);
println!("--- End of Test ---");
}
}

View File

@@ -36,10 +36,8 @@ yarn-error.log*
.pnpm-debug.log*
# env files (can opt-in for committing if needed)
.env
.env.*
!.env.example
!.env.test
.env*
!.env
# vercel
.vercel

View File

@@ -1,258 +1,5 @@
# betterauth
## 0.1.25
### Patch Changes
- Updated dependencies [048ac1d]
- jazz-tools@0.14.22
- jazz-betterauth-server-plugin@0.14.22
- jazz-inspector@0.14.22
- jazz-react@0.14.22
- jazz-react-auth-betterauth@0.14.22
- jazz-betterauth-client-plugin@0.14.22
## 0.1.24
### Patch Changes
- Updated dependencies [e7e505e]
- Updated dependencies [13b57aa]
- Updated dependencies [5662faa]
- Updated dependencies [2116a59]
- jazz-tools@0.14.21
- jazz-betterauth-server-plugin@0.14.21
- jazz-inspector@0.14.21
- jazz-react@0.14.21
- jazz-react-auth-betterauth@0.14.21
- jazz-betterauth-client-plugin@0.14.21
## 0.1.23
### Patch Changes
- Updated dependencies [6f72419]
- Updated dependencies [04b20c2]
- jazz-tools@0.14.20
- jazz-betterauth-server-plugin@0.14.20
- jazz-inspector@0.14.20
- jazz-react@0.14.20
- jazz-react-auth-betterauth@0.14.20
- jazz-betterauth-client-plugin@0.14.20
## 0.1.22
### Patch Changes
- jazz-betterauth-client-plugin@0.14.19
- jazz-betterauth-server-plugin@0.14.19
- jazz-react-auth-betterauth@0.14.19
- jazz-inspector@0.14.19
- jazz-react@0.14.19
- jazz-tools@0.14.19
## 0.1.21
### Patch Changes
- Updated dependencies [4b950bc]
- Updated dependencies [d6d9c0a]
- Updated dependencies [c559054]
- jazz-tools@0.14.18
- jazz-betterauth-server-plugin@0.14.18
- jazz-inspector@0.14.18
- jazz-react@0.14.18
- jazz-react-auth-betterauth@0.14.18
- jazz-betterauth-client-plugin@0.14.18
## 0.1.20
### Patch Changes
- Updated dependencies [e512df4]
- jazz-betterauth-server-plugin@0.14.17
- jazz-tools@0.14.17
- jazz-betterauth-client-plugin@0.14.17
- jazz-inspector@0.14.17
- jazz-react@0.14.17
- jazz-react-auth-betterauth@0.14.17
## 0.1.19
### Patch Changes
- jazz-betterauth-server-plugin@0.14.16
- jazz-inspector@0.14.16
- jazz-react@0.14.16
- jazz-react-auth-betterauth@0.14.16
- jazz-tools@0.14.16
- jazz-betterauth-client-plugin@0.14.16
## 0.1.18
### Patch Changes
- Updated dependencies [f9590f9]
- jazz-react@0.14.15
- jazz-betterauth-server-plugin@0.14.15
- jazz-inspector@0.14.15
- jazz-react-auth-betterauth@0.14.15
- jazz-tools@0.14.15
- jazz-betterauth-client-plugin@0.14.15
## 0.1.17
### Patch Changes
- Updated dependencies [e32a1f7]
- jazz-tools@0.14.14
- jazz-betterauth-server-plugin@0.14.14
- jazz-inspector@0.14.14
- jazz-react@0.14.14
- jazz-react-auth-betterauth@0.14.14
- jazz-betterauth-client-plugin@0.14.14
## 0.1.16
### Patch Changes
- jazz-inspector@0.14.13
- jazz-react@0.14.13
- jazz-react-auth-betterauth@0.14.13
## 0.1.15
### Patch Changes
- jazz-inspector@0.14.12
- jazz-react@0.14.12
- jazz-react-auth-betterauth@0.14.12
## 0.1.14
### Patch Changes
- Updated dependencies [dc746a2]
- Updated dependencies [f869d9a]
- Updated dependencies [3fe6832]
- jazz-react-auth-betterauth@0.14.10
- jazz-inspector@0.14.10
- jazz-react@0.14.10
- jazz-tools@0.14.10
- jazz-betterauth-server-plugin@0.14.10
- jazz-betterauth-client-plugin@0.14.10
## 0.1.13
### Patch Changes
- Updated dependencies [22c2600]
- jazz-tools@0.14.9
- jazz-betterauth-server-plugin@0.14.9
- jazz-inspector@0.14.9
- jazz-react@0.14.9
- jazz-react-auth-betterauth@0.14.9
- jazz-betterauth-client-plugin@0.14.9
## 0.1.12
### Patch Changes
- Updated dependencies [637ae13]
- jazz-tools@0.14.8
- jazz-betterauth-server-plugin@0.14.8
- jazz-inspector@0.14.8
- jazz-react@0.14.8
- jazz-react-auth-betterauth@0.14.8
- jazz-betterauth-client-plugin@0.14.8
## 0.1.11
### Patch Changes
- Updated dependencies [365b0ea]
- jazz-tools@0.14.7
- jazz-betterauth-server-plugin@0.14.7
- jazz-inspector@0.14.7
- jazz-react@0.14.7
- jazz-react-auth-betterauth@0.14.7
- jazz-betterauth-client-plugin@0.14.7
## 0.1.10
### Patch Changes
- Updated dependencies [9d6d9fe]
- Updated dependencies [9d6d9fe]
- jazz-tools@0.14.6
- jazz-betterauth-server-plugin@0.14.6
- jazz-inspector@0.14.6
- jazz-react@0.14.6
- jazz-react-auth-betterauth@0.14.6
- jazz-betterauth-client-plugin@0.14.6
## 0.1.9
### Patch Changes
- Updated dependencies [91cbb2f]
- Updated dependencies [20b3d88]
- jazz-tools@0.14.5
- jazz-betterauth-server-plugin@0.14.5
- jazz-inspector@0.14.5
- jazz-react@0.14.5
- jazz-react-auth-betterauth@0.14.5
- jazz-betterauth-client-plugin@0.14.5
## 0.1.8
### Patch Changes
- Updated dependencies [011af55]
- jazz-tools@0.14.4
- jazz-betterauth-server-plugin@0.14.4
- jazz-inspector@0.14.4
- jazz-react@0.14.4
- jazz-react-auth-betterauth@0.14.4
- jazz-betterauth-client-plugin@0.14.4
## 0.1.7
### Patch Changes
- Updated dependencies [3d1027f]
- Updated dependencies [c240eed]
- jazz-tools@0.14.2
- jazz-betterauth-server-plugin@0.14.2
- jazz-inspector@0.14.2
- jazz-react@0.14.2
- jazz-react-auth-betterauth@0.14.2
- jazz-betterauth-client-plugin@0.14.2
## 0.1.6
### Patch Changes
- Updated dependencies [cdfc105]
- jazz-tools@0.14.1
- jazz-betterauth-server-plugin@0.14.1
- jazz-inspector@0.14.1
- jazz-react@0.14.1
- jazz-react-auth-betterauth@0.14.1
- jazz-betterauth-client-plugin@0.14.1
## 0.1.5
### Patch Changes
- Updated dependencies [5835ed1]
- jazz-tools@0.14.0
- jazz-betterauth-server-plugin@0.14.0
- jazz-inspector@0.14.0
- jazz-react@0.14.0
- jazz-react-auth-betterauth@0.14.0
- jazz-betterauth-client-plugin@0.14.0
## 0.1.4
### Patch Changes

View File

@@ -5,12 +5,10 @@ This example demonstrates how to integrate [Better Auth](https://www.better-auth
## Getting started
To run this example, you may either:
- Clone the Jazz monorepo and run this example from within.
- Create a new Jazz project using this example as a template, and run that new project.
* Clone the Jazz monorepo and run this example from within.
* Create a new Jazz project using this example as a template, and run that new project.
### Setting environment variables
- `NEXT_PUBLIC_AUTH_BASE_URL`: A URL to a Better Auth server. If undefined, the example will self-host a Better Auth server.
- `BETTER_AUTH_SECRET`: The encryption secret used by the self-hosted Better Auth server (required only if `NEXT_PUBLIC_AUTH_BASE_URL` is undefined)
- `GITHUB_CLIENT_ID`: The client ID for the GitHub OAuth provider used by the self-hosted Better Auth server (required only if `NEXT_PUBLIC_AUTH_BASE_URL` is undefined)
@@ -19,64 +17,38 @@ To run this example, you may either:
### Using this example as a template
1. Create a new Jazz project, and use this example as a template.
```sh
npx create-jazz-app@latest betterauth-app --example betterauth
```
2. Navigate to the new project and install dependencies.
2. Navigate to the new project and start the development server.
```sh
cd betterauth-app
pnpm install
```
3. Create a .env file (don't forget to set your [BETTER_AUTH_SECRET](https://www.better-auth.com/docs/installation#set-environment-variables)!)
```sh
mv .env.example .env
```
4. Start the development server
```sh
pnpm dev
```
https://www.better-auth.com/docs/installation#set-environment-variables
### Using the monorepo
This requires `pnpm` to be installed, see [https://pnpm.io/installation](https://pnpm.io/installation).
Clone the jazz repository.
```bash
git clone https://github.com/garden-co/jazz.git
```
Install and build dependencies.
```bash
pnpm i && npx turbo build
```
Go to the example directory.
```bash
cd jazz/examples/betterauth/
```
Create a .env file (don't forget to set your [BETTER_AUTH_SECRET](https://www.better-auth.com/docs/installation#set-environment-variables)!)
```sh
mv .env.example .env
```
Start the dev server.
```bash
pnpm dev
```
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.

View File

@@ -1,48 +1,51 @@
{
"name": "betterauth",
"private": true,
"type": "module",
"scripts": {
"dev": "next dev --turbopack",
"build": "next build",
"start": "next start",
"lint": "next lint",
"format-and-lint": "biome check .",
"format-and-lint:fix": "biome check . --write",
"test:e2e": "playwright test",
"test:e2e:ui": "playwright test --ui",
"email": "email dev --dir src/components/emails"
},
"dependencies": {
"@icons-pack/react-simple-icons": "^12.8.0",
"@radix-ui/react-label": "^2.1.6",
"@radix-ui/react-slot": "^1.2.2",
"better-auth": "^1.2.4",
"better-sqlite3": "^11.9.1",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"jazz-react-auth-betterauth": "workspace:*",
"jazz-betterauth-client-plugin": "workspace:*",
"jazz-betterauth-server-plugin": "workspace:*",
"jazz-tools": "workspace:*",
"lucide-react": "^0.510.0",
"next": "15.3.2",
"react": "catalog:react",
"react-dom": "catalog:react",
"sonner": "^2.0.3",
"tailwind-merge": "^3.3.0",
"tw-animate-css": "^1.2.5"
},
"devDependencies": {
"@biomejs/biome": "catalog:default",
"@playwright/test": "^1.50.1",
"@tailwindcss/postcss": "^4",
"@types/better-sqlite3": "^7.6.12",
"@types/node": "^20",
"@types/react": "catalog:react",
"@types/react-dom": "catalog:react",
"react-email": "^4.0.11",
"tailwindcss": "^4",
"typescript": "catalog:default"
}
"name": "betterauth",
"version": "0.1.4",
"private": true,
"type": "module",
"scripts": {
"dev": "next dev --turbopack",
"build": "next build",
"start": "next start",
"lint": "next lint",
"format-and-lint": "biome check .",
"format-and-lint:fix": "biome check . --write",
"test:e2e": "playwright test",
"test:e2e:ui": "playwright test --ui",
"email": "email dev --dir src/components/emails"
},
"dependencies": {
"@icons-pack/react-simple-icons": "^12.8.0",
"@radix-ui/react-label": "^2.1.6",
"@radix-ui/react-slot": "^1.2.2",
"better-auth": "^1.2.4",
"better-sqlite3": "^11.9.1",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"jazz-betterauth-client-plugin": "workspace:*",
"jazz-betterauth-server-plugin": "workspace:*",
"jazz-inspector": "workspace:*",
"jazz-react": "workspace:*",
"jazz-react-auth-betterauth": "workspace:*",
"jazz-tools": "workspace:*",
"lucide-react": "^0.510.0",
"next": "15.3.2",
"react": "^18.0.0",
"react-dom": "^18.0.0",
"sonner": "^2.0.3",
"tailwind-merge": "^3.3.0",
"tw-animate-css": "^1.2.5"
},
"devDependencies": {
"@biomejs/biome": "1.9.4",
"@playwright/test": "^1.50.1",
"@tailwindcss/postcss": "^4",
"@types/better-sqlite3": "^7.6.12",
"@types/node": "^20",
"@types/react": "^18",
"@types/react-dom": "^18",
"react-email": "^4.0.11",
"tailwindcss": "^4",
"typescript": "^5"
}
}

View File

@@ -1,8 +1,7 @@
"use client";
import { Button } from "@/components/ui/button";
import { Account } from "jazz-tools";
import { useAccount } from "jazz-tools/react";
import { useAccount } from "jazz-react";
import {
AppWindowMacIcon,
FileTextIcon,
@@ -12,7 +11,7 @@ import {
import Image from "next/image";
export default function Home() {
const { me } = useAccount(Account, { resolve: { profile: {} } });
const { me } = useAccount({ resolve: { profile: {} } });
if (!me) {
return null;

View File

@@ -1,21 +1,21 @@
"use client";
import { JazzProvider } from "jazz-react";
import { AuthProvider } from "jazz-react-auth-betterauth";
import { JazzReactProvider } from "jazz-tools/react";
import { type ReactNode, lazy } from "react";
const JazzDevTools =
process.env.NODE_ENV === "production"
? () => null
: lazy(() =>
import("jazz-tools/inspector").then((res) => ({
import("jazz-inspector").then((res) => ({
default: res.JazzInspector,
})),
);
export function JazzAndAuth({ children }: { children: ReactNode }) {
return (
<JazzReactProvider
<JazzProvider
sync={{
peer: "wss://cloud.jazz.tools/?key=betterauth-example@garden.co",
}}
@@ -28,6 +28,6 @@ export function JazzAndAuth({ children }: { children: ReactNode }) {
{children}
</AuthProvider>
<JazzDevTools />
</JazzReactProvider>
</JazzProvider>
);
}

View File

@@ -1,16 +1,15 @@
"use client";
import { Button } from "@/components/ui/button";
import { useAccount, useIsAuthenticated } from "jazz-react";
import { useAuth } from "jazz-react-auth-betterauth";
import { Account } from "jazz-tools";
import { useAccount, useIsAuthenticated } from "jazz-tools/react";
import Image from "next/image";
import Link from "next/link";
import { useCallback } from "react";
export function Navbar() {
const { authClient } = useAuth();
const { logOut } = useAccount(Account, { resolve: { profile: {} } });
const { logOut } = useAccount({ resolve: { profile: {} } });
const isAuthenticated = useIsAuthenticated();

View File

@@ -1,8 +1,8 @@
"use client";
import { Button } from "@/components/ui/button";
import { useAccount, useIsAuthenticated } from "jazz-react";
import { useAuth } from "jazz-react-auth-betterauth";
import { useAccount, useIsAuthenticated } from "jazz-tools/react";
import Link from "next/link";
import { useRouter } from "next/navigation";
import { toast } from "sonner";

17
examples/chat-rn-expo-clerk/.gitignore vendored Normal file
View File

@@ -0,0 +1,17 @@
node_modules/
.expo/
dist/
npm-debug.*
*.jks
*.p8
*.p12
*.key
*.mobileprovision
*.orig.*
web-build/
# macOS
.DS_Store
ios
android

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,24 @@
# 🎷 Jazz + Expo + `expo-router` + Clerk Auth
## 🚀 How to Run
### 1. Inside the Workspace Root
First, install dependencies and build the project:
```bash
pnpm i
pnpm run build
```
### 2. Inside the `examples/chat-rn-expo-clerk` Directory
Next, navigate to the specific example project and run the following commands:
```bash
pnpm expo prebuild
pnpx pod-install
pnpm expo run:ios
```
This will set up and launch the app on iOS. For Android, you can replace the last command with `pnpm expo run:android`.

View File

@@ -0,0 +1,45 @@
{
"expo": {
"name": "jazz-chat-rn-expo-clerk",
"scheme": "jazz-chat-rn-expo-clerk",
"slug": "jazz-chat-rn-expo-clerk",
"version": "1.0.0",
"orientation": "portrait",
"icon": "./assets/images/icon.png",
"userInterfaceStyle": "light",
"splash": {
"image": "./assets/images/splash.png",
"resizeMode": "contain",
"backgroundColor": "#ffffff"
},
"ios": {
"supportsTablet": true,
"bundleIdentifier": "com.jazz.chatrnclerk"
},
"android": {
"adaptiveIcon": {
"foregroundImage": "./assets/images/adaptive-icon.png",
"backgroundColor": "#ffffff"
},
"package": "com.jazz.chatrnclerk"
},
"newArchEnabled": true,
"plugins": [
"expo-secure-store",
"expo-font",
"expo-router",
"expo-sqlite",
[
"expo-image-picker",
{
"photosPermission": "The app accesses your photos to let you share them with your friends."
}
]
],
"extra": {
"eas": {
"projectId": "ca3d46e5-a10a-47ec-9d77-3b841e1c62d4"
}
}
}
}

View File

@@ -0,0 +1,15 @@
import { Redirect, Stack } from "expo-router";
import { useIsAuthenticated } from "jazz-expo";
import React from "react";
export default function HomeLayout() {
const isAuthenticated = useIsAuthenticated();
if (isAuthenticated) {
return <Redirect href={"/chat"} />;
}
return (
<Stack screenOptions={{ headerShown: false, headerBackVisible: true }} />
);
}

View File

@@ -0,0 +1,33 @@
import { SignedOut } from "@clerk/clerk-expo";
import { Link } from "expo-router";
import React from "react";
import { Text, View } from "react-native";
export default function HomePage() {
return (
<View className="flex-1 justify-center items-center bg-gray-100 p-6">
<SignedOut>
<View className="bg-white p-6 rounded-lg shadow-lg w-11/12 max-w-md">
<Text className="text-2xl font-bold text-center text-gray-900 mb-4">
Jazz 🤝 Clerk 🤝 Expo
</Text>
<Link href="/sign-in" className="mb-4">
<Text className="text-center text-blue-600 underline text-lg">
Sign In
</Text>
</Link>
<Link href="/sign-in-oauth" className="mb-4">
<Text className="text-center text-blue-600 underline text-lg">
Sign In OAuth
</Text>
</Link>
<Link href="/sign-up">
<Text className="text-center text-blue-600 underline text-lg">
Sign Up
</Text>
</Link>
</View>
</SignedOut>
</View>
);
}

View File

@@ -0,0 +1,20 @@
import { Redirect, Stack } from "expo-router";
import { useIsAuthenticated } from "jazz-expo";
export default function UnAuthenticatedLayout() {
const isAuthenticated = useIsAuthenticated();
if (isAuthenticated) {
return <Redirect href={"/chat"} />;
}
return (
<Stack
screenOptions={{
headerShown: true,
headerBackVisible: true,
headerTitle: "",
}}
/>
);
}

View File

@@ -0,0 +1,65 @@
import { useOAuth } from "@clerk/clerk-expo";
import * as Linking from "expo-linking";
import { Link } from "expo-router";
import * as WebBrowser from "expo-web-browser";
import React from "react";
import { Text, TouchableOpacity, View } from "react-native";
export const useWarmUpBrowser = () => {
React.useEffect(() => {
// Warm up the android browser to improve UX
// https://docs.expo.dev/guides/authentication/#improving-user-experience
void WebBrowser.warmUpAsync();
return () => {
void WebBrowser.coolDownAsync();
};
}, []);
};
WebBrowser.maybeCompleteAuthSession();
const SignInWithOAuth = () => {
useWarmUpBrowser();
const { startOAuthFlow } = useOAuth({ strategy: "oauth_google" });
const onPress = React.useCallback(async () => {
try {
const { createdSessionId, signIn, signUp, setActive } =
await startOAuthFlow({
redirectUrl: Linking.createURL("/", {
scheme: "jazz-chat-rn-expo-clerk",
}),
});
if (createdSessionId) {
setActive!({ session: createdSessionId });
} else {
// Use signIn or signUp for next steps such as MFA
}
} catch (err) {
console.error("OAuth error", err);
}
}, []);
return (
<View className="flex-1 justify-center items-center bg-gray-50 p-6">
<View className="bg-white w-11/12 max-w-md p-8 rounded-lg shadow-lg items-center">
<TouchableOpacity
onPress={onPress}
className="w-full bg-red-500 py-3 rounded-lg flex items-center justify-center"
>
<Text className="text-white text-lg font-semibold">
Sign in with Google
</Text>
</TouchableOpacity>
<Link href="/" className="mt-4">
<Text className="text-blue-600 text-lg font-semibold underline mb-6">
Back to Home
</Text>
</Link>
</View>
</View>
);
};
export default SignInWithOAuth;

View File

@@ -0,0 +1,79 @@
import { useSignIn } from "@clerk/clerk-expo";
import { Link } from "expo-router";
import React from "react";
import { Text, TextInput, TouchableOpacity, View } from "react-native";
export default function SignInPage() {
const { signIn, setActive, isLoaded } = useSignIn();
const [emailAddress, setEmailAddress] = React.useState("");
const [password, setPassword] = React.useState("");
const [errorMessage, setErrorMessage] = React.useState("");
const onSignInPress = React.useCallback(async () => {
if (!isLoaded) {
return;
}
setErrorMessage("");
try {
const signInAttempt = await signIn.create({
identifier: emailAddress,
password,
});
if (signInAttempt.status === "complete") {
await setActive({ session: signInAttempt.createdSessionId });
} else {
console.error(JSON.stringify(signInAttempt, null, 2));
setErrorMessage("Invalid credentials. Please try again.");
}
} catch (err: any) {
console.error(JSON.stringify(err, null, 2));
if (err.errors && err.errors[0]?.message) {
setErrorMessage(err.errors[0].message);
} else {
setErrorMessage("An unexpected error occurred. Please try again.");
}
}
}, [isLoaded, emailAddress, password]);
return (
<View className="flex-1 justify-center items-center bg-gray-50 p-6">
<View className="bg-white w-11/12 max-w-md p-8 rounded-lg shadow-md">
<Text className="text-3xl font-bold text-center text-gray-800 mb-6">
Sign In
</Text>
{errorMessage ? (
<Text className="text-red-500 text-center mb-4">{errorMessage}</Text>
) : null}
<TextInput
autoCapitalize="none"
value={emailAddress}
placeholder="Email..."
onChangeText={(emailAddress) => setEmailAddress(emailAddress)}
className="w-full h-12 mb-4 px-4 bg-gray-100 border border-gray-300 rounded-lg focus:border-blue-500 focus:outline-none"
/>
<TextInput
value={password}
placeholder="Password..."
secureTextEntry={true}
onChangeText={(password) => setPassword(password)}
className="w-full h-12 mb-6 px-4 bg-gray-100 border border-gray-300 rounded-lg focus:border-blue-500 focus:outline-none"
/>
<TouchableOpacity
onPress={onSignInPress}
className="w-full h-12 bg-blue-600 rounded-lg flex items-center justify-center"
>
<Text className="text-white text-lg font-semibold">Sign In</Text>
</TouchableOpacity>
<View className="flex-row items-center justify-center mt-4">
<Text className="text-gray-600">Don't have an account?</Text>
<Link href="/sign-up">
<Text className="text-blue-500 ml-2 font-semibold">Sign up</Text>
</Link>
</View>
</View>
</View>
);
}

View File

@@ -0,0 +1,120 @@
import { useSignUp } from "@clerk/clerk-expo";
import { useNavigation } from "@react-navigation/native";
import * as React from "react";
import { Text, TextInput, TouchableOpacity, View } from "react-native";
export default function SignUpPage() {
const { isLoaded, signUp, setActive } = useSignUp();
const [emailAddress, setEmailAddress] = React.useState("");
const [password, setPassword] = React.useState("");
const [pendingVerification, setPendingVerification] = React.useState(false);
const [code, setCode] = React.useState("");
const [errorMessage, setErrorMessage] = React.useState("");
const navigation = useNavigation();
const onSignUpPress = async () => {
if (!isLoaded) return;
setErrorMessage("");
try {
await signUp.create({
emailAddress,
password,
});
await signUp.prepareEmailAddressVerification({
strategy: "email_code",
});
setPendingVerification(true);
} catch (err: any) {
console.error(JSON.stringify(err, null, 2));
if (err.errors && err.errors[0]?.message) {
setErrorMessage(err.errors[0].message);
} else {
setErrorMessage("An unexpected error occurred. Please try again.");
}
}
};
const onPressVerify = async () => {
if (!isLoaded) return;
setErrorMessage("");
try {
const completeSignUp = await signUp.attemptEmailAddressVerification({
code,
});
if (completeSignUp.status === "complete") {
await setActive({ session: completeSignUp.createdSessionId });
} else {
console.error(JSON.stringify(completeSignUp, null, 2));
setErrorMessage("Failed to verify. Please check your code.");
}
} catch (err: any) {
console.error(JSON.stringify(err, null, 2));
setErrorMessage("Invalid verification code. Please try again.");
}
};
return (
<View className="flex-1 justify-center items-center bg-gray-50 p-6">
<View className="bg-white w-11/12 max-w-md p-8 rounded-lg shadow-lg">
<Text className="text-3xl font-bold text-center text-gray-800 mb-6">
{pendingVerification ? "Verify Email" : "Sign Up"}
</Text>
{errorMessage ? (
<Text className="text-red-500 text-center mb-4">{errorMessage}</Text>
) : null}
{!pendingVerification && (
<>
<TextInput
autoCapitalize="none"
value={emailAddress}
placeholder="Email..."
onChangeText={(email) => setEmailAddress(email)}
className="w-full h-12 mb-4 px-4 bg-gray-100 border border-gray-300 rounded-lg focus:border-blue-500 focus:outline-none"
/>
<TextInput
value={password}
placeholder="Password..."
secureTextEntry={true}
onChangeText={(password) => setPassword(password)}
className="w-full h-12 mb-6 px-4 bg-gray-100 border border-gray-300 rounded-lg focus:border-blue-500 focus:outline-none"
/>
<TouchableOpacity
onPress={onSignUpPress}
className="w-full h-12 bg-blue-600 rounded-lg flex justify-center items-center mb-4"
>
<Text className="text-white text-lg font-semibold">Sign Up</Text>
</TouchableOpacity>
</>
)}
{pendingVerification && (
<>
<TextInput
value={code}
placeholder="Verification Code..."
onChangeText={(code) => setCode(code)}
className="w-full h-12 mb-4 px-4 bg-gray-100 border border-gray-300 rounded-lg focus:border-blue-500 focus:outline-none"
/>
<TouchableOpacity
onPress={onPressVerify}
className="w-full h-12 bg-green-600 rounded-lg flex justify-center items-center mb-4"
>
<Text className="text-white text-lg font-semibold">
Verify Email
</Text>
</TouchableOpacity>
</>
)}
</View>
</View>
);
}

View File

@@ -0,0 +1,42 @@
import { ScrollViewStyleReset } from "expo-router/html";
import { type PropsWithChildren } from "react";
/**
* This file is web-only and used to configure the root HTML for every web page during static rendering.
* The contents of this function only run in Node.js environments and do not have access to the DOM or browser APIs.
*/
export default function Root({ children }: PropsWithChildren) {
return (
<html lang="en">
<head>
<meta charSet="utf-8" />
<meta httpEquiv="X-UA-Compatible" content="IE=edge" />
<meta
name="viewport"
content="width=device-width, initial-scale=1, shrink-to-fit=no"
/>
{/*
Disable body scrolling on web. This makes ScrollView components work closer to how they do on native.
However, body scrolling is often nice to have for mobile web. If you want to enable it, remove this line.
*/}
<ScrollViewStyleReset />
{/* Using raw CSS styles as an escape-hatch to ensure the background color never flickers in dark-mode. */}
<style dangerouslySetInnerHTML={{ __html: responsiveBackground }} />
{/* Add any additional <head> elements that you want globally available on web... */}
</head>
<body>{children}</body>
</html>
);
}
const responsiveBackground = `
body {
background-color: #fff;
}
@media (prefers-color-scheme: dark) {
body {
background-color: #000;
}
}`;

View File

@@ -0,0 +1,29 @@
import { Link, Stack } from "expo-router";
import { StyleSheet, Text, View } from "react-native";
export default function NotFoundScreen() {
return (
<>
<Stack.Screen options={{ title: "Oops!" }} />
<View style={styles.container}>
<Text>This screen doesn't exist.</Text>
<Link href="/" style={styles.link}>
<Text>Go to home screen!</Text>
</Link>
</View>
</>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: "center",
justifyContent: "center",
padding: 20,
},
link: {
marginTop: 15,
paddingVertical: 15,
},
});

View File

@@ -0,0 +1,71 @@
import "../global.css";
import { ClerkLoaded, ClerkProvider } from "@clerk/clerk-expo";
import { secureStore } from "@clerk/clerk-expo/secure-store";
import { useFonts } from "expo-font";
import { Slot, useRouter, useSegments } from "expo-router";
import * as SplashScreen from "expo-splash-screen";
import { useIsAuthenticated, useJazzContext } from "jazz-expo";
import React, { useEffect } from "react";
import { tokenCache } from "../cache";
import { JazzAndAuth } from "../src/auth-context";
SplashScreen.preventAutoHideAsync();
function InitialLayout() {
const isAuthenticated = useIsAuthenticated();
const segments = useSegments();
const router = useRouter();
useEffect(() => {
const inAuthGroup = segments[0] === "(auth)";
if (isAuthenticated && inAuthGroup) {
router.replace("/chat");
} else if (!isAuthenticated && !inAuthGroup) {
router.replace("/");
}
SplashScreen.hideAsync();
}, [isAuthenticated, segments, router]);
return <Slot />;
}
export default function RootLayout() {
const [fontsLoaded] = useFonts({
SpaceMono: require("../assets/fonts/SpaceMono-Regular.ttf"),
});
const publishableKey = process.env.EXPO_PUBLIC_CLERK_PUBLISHABLE_KEY;
if (!publishableKey) {
throw new Error(
"Missing Publishable Key. Please set EXPO_PUBLIC_CLERK_PUBLISHABLE_KEY in your .env",
);
}
useEffect(() => {
if (fontsLoaded) {
} else {
SplashScreen.preventAutoHideAsync();
}
}, [fontsLoaded]);
if (!fontsLoaded) {
return null;
}
return (
<ClerkProvider
tokenCache={tokenCache}
publishableKey={publishableKey}
__experimental_resourceCache={secureStore}
>
<ClerkLoaded>
<JazzAndAuth>
<InitialLayout />
</JazzAndAuth>
</ClerkLoaded>
</ClerkProvider>
);
}

View File

@@ -0,0 +1,234 @@
import { Chat, Message } from "@/src/schema";
import { useNavigation } from "@react-navigation/native";
import clsx from "clsx";
import * as Clipboard from "expo-clipboard";
import * as ImagePicker from "expo-image-picker";
import { useLocalSearchParams } from "expo-router";
import { useAccount, useCoState } from "jazz-expo";
import { ProgressiveImg } from "jazz-expo";
import { createImage } from "jazz-react-native-media-images";
import { CoPlainText, Group, ID } from "jazz-tools";
import { useEffect, useLayoutEffect, useState } from "react";
import React, {
SafeAreaView,
View,
Text,
Alert,
TouchableOpacity,
FlatList,
KeyboardAvoidingView,
TextInput,
Button,
Image,
ActivityIndicator,
} from "react-native";
export default function Conversation() {
const { chatId } = useLocalSearchParams();
const { me } = useAccount();
const [chat, setChat] = useState<Chat>();
const [message, setMessage] = useState("");
const loadedChat = useCoState(Chat, chat?.id, { resolve: { $each: true } });
const navigation = useNavigation();
const [isUploading, setIsUploading] = useState(false);
useEffect(() => {
if (chat) return;
if (chatId === "new") {
createChat();
} else {
loadChat(chatId as ID<Chat>);
}
}, [chat]);
// Effect to dynamically set header options
useLayoutEffect(() => {
navigation.setOptions({
headerTitle: "Chat",
headerRight: () =>
chat ? (
<Button
onPress={() => {
if (chat?.id) {
Clipboard.setStringAsync(
`https://chat.jazz.tools/#/chat/${chat.id}`,
);
Alert.alert("Copied to clipboard", `Chat ID: ${chat.id}`);
}
}}
title="Share"
/>
) : null,
});
}, [navigation, chat]);
const createChat = () => {
const group = Group.create({ owner: me });
group.addMember("everyone", "writer");
const chat = Chat.create([], { owner: group });
setChat(chat);
};
const loadChat = async (chatId: ID<Chat>) => {
try {
const chat = await Chat.load(chatId);
if (chat) setChat(chat);
} catch (error) {
console.log("Error loading chat", error);
Alert.alert("Error", `Error loading chat: ${error}`);
}
};
const sendMessage = () => {
if (!chat) return;
if (message.trim()) {
chat.push(
Message.create(
{ text: CoPlainText.create(message, chat._owner) },
chat._owner,
),
);
setMessage("");
}
};
const handleImageUpload = async () => {
try {
const result = await ImagePicker.launchImageLibraryAsync({
mediaTypes: ImagePicker.MediaTypeOptions.Images,
base64: true,
quality: 0.7,
});
if (!result.canceled && result.assets[0].base64 && chat) {
setIsUploading(true);
const base64Uri = `data:image/jpeg;base64,${result.assets[0].base64}`;
const image = await createImage(base64Uri, {
owner: chat._owner,
maxSize: 2048,
});
chat.push(
Message.create(
{ text: CoPlainText.create("", chat._owner), image },
chat._owner,
),
);
}
} catch (error) {
Alert.alert("Error", "Failed to upload image");
} finally {
setIsUploading(false);
}
};
const renderMessageItem = ({ item }: { item: Message }) => {
const isMe = item._edits.text.by?.isMe;
return (
<View
className={clsx(
`rounded-xl px-3 py-2 max-w-[75%] my-1`,
isMe ? `bg-blue-500 self-end` : `bg-gray-200 self-start`,
)}
>
{!isMe ? (
<Text
className={clsx(
`text-xs text-gray-500 mb-1`,
isMe ? "text-right" : "text-left",
)}
>
{item._edits.text.by?.profile?.name}
</Text>
) : null}
<View
className={clsx(
"flex relative items-end justify-between",
isMe ? "flex-row" : "flex-row",
)}
>
{item.image && (
<ProgressiveImg image={item.image} maxWidth={1024}>
{({ src, res, originalSize }) => (
<Image
source={{ uri: src }}
className="w-48 h-48 rounded-lg mb-2"
resizeMode="cover"
/>
)}
</ProgressiveImg>
)}
{item.text && (
<Text
className={clsx(
!isMe ? "text-black" : "text-gray-200",
`text-md max-w-[85%]`,
)}
>
{item.text}
</Text>
)}
<Text
className={clsx(
"text-[10px] text-right ml-2",
!isMe ? "mt-2 text-gray-500" : "mt-1 text-gray-200",
)}
>
{item._edits.text.madeAt?.getHours().toString().padStart(2, "0")}:
{item._edits.text.madeAt?.getMinutes().toString().padStart(2, "0")}
</Text>
</View>
</View>
);
};
return (
<View className="flex-1 bg-gray-50">
<FlatList
contentContainerStyle={{
flexGrow: 1,
paddingVertical: 10,
paddingHorizontal: 8,
}}
className="flex"
data={loadedChat}
keyExtractor={(item) => item.id}
renderItem={renderMessageItem}
/>
<KeyboardAvoidingView
keyboardVerticalOffset={110}
behavior="padding"
className="p-3 bg-white border-t border-gray-300"
>
<SafeAreaView className="flex-row items-center gap-2">
<TouchableOpacity
onPress={handleImageUpload}
disabled={isUploading}
className="h-10 w-10 items-center justify-center"
>
{isUploading ? (
<ActivityIndicator size="small" color="#0000ff" />
) : (
<Text className="text-2xl">🖼</Text>
)}
</TouchableOpacity>
<TextInput
className="flex-1 rounded-full h-10 px-4 bg-gray-100 border border-gray-300 focus:border-blue-500 focus:bg-white"
value={message}
onChangeText={setMessage}
placeholder="Type a message..."
textAlignVertical="center"
onSubmitEditing={sendMessage}
/>
<TouchableOpacity
onPress={sendMessage}
className="bg-blue-500 rounded-full h-10 w-10 items-center justify-center"
>
<Text className="text-white text-xl"></Text>
</TouchableOpacity>
</SafeAreaView>
</KeyboardAvoidingView>
</View>
);
}

View File

@@ -0,0 +1,14 @@
import { Stack } from "expo-router";
import React from "react";
export default function ChatLayout() {
return (
<Stack
screenOptions={{
headerShown: true,
headerBackVisible: true,
headerTitle: "",
}}
/>
);
}

View File

@@ -0,0 +1,90 @@
import { useNavigation } from "@react-navigation/native";
import { useRouter } from "expo-router";
import { ID } from "jazz-tools";
import { useLayoutEffect } from "react";
import React, {
Button,
Text,
TouchableOpacity,
View,
Alert,
} from "react-native";
import { useUser } from "@clerk/clerk-expo";
import { useAccount } from "jazz-expo";
import { Chat } from "../../src/schema";
export default function ChatScreen() {
const { logOut } = useAccount();
const router = useRouter();
const navigation = useNavigation();
const { user } = useUser();
function handleLogOut() {
logOut();
router.navigate("/");
}
useLayoutEffect(() => {
navigation.setOptions({
headerTitle: "Chat",
headerRight: () => <Button onPress={handleLogOut} title="Logout" />,
});
}, [navigation]);
const loadChat = async (chatId: ID<Chat> | "new") => {
router.navigate(`/chat/${chatId}`);
};
const joinChat = () => {
Alert.prompt(
"Join Chat",
"Enter the Chat ID (example: co_zBGEHYvRfGuT2YSBraY3njGjnde)",
[
{
text: "Cancel",
style: "cancel",
},
{
text: "Join",
onPress: (chatId) => {
if (chatId) {
loadChat(chatId as ID<Chat>);
} else {
Alert.alert("Error", "Chat ID cannot be empty.");
}
},
},
],
"plain-text",
);
};
return (
<View className="flex-1 bg-gray-50">
<View className="flex-1 justify-center items-center px-6">
<View className="w-full max-w-sm bg-white p-8 rounded-lg shadow-lg">
<Text className="text-xl font-semibold text-gray-800">
Welcome, {user?.emailAddresses[0].emailAddress}
</Text>
<TouchableOpacity
onPress={() => loadChat("new")}
className="w-full bg-blue-600 py-4 rounded-md mb-4 mt-4"
>
<Text className="text-white text-lg font-semibold text-center">
Start New Chat
</Text>
</TouchableOpacity>
<TouchableOpacity
onPress={joinChat}
className="w-full bg-green-500 py-4 rounded-md"
>
<Text className="text-white text-lg font-semibold text-center">
Join Chat
</Text>
</TouchableOpacity>
</View>
</View>
</View>
);
}

View File

Before

Width:  |  Height:  |  Size: 17 KiB

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 KiB

View File

Before

Width:  |  Height:  |  Size: 1.4 KiB

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 313 KiB

View File

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

View File

@@ -0,0 +1,9 @@
module.exports = function (api) {
api.cache(true);
return {
presets: [
["babel-preset-expo", { jsxImportSource: "nativewind" }],
"nativewind/babel",
],
};
};

View File

@@ -0,0 +1,39 @@
import * as SecureStore from "expo-secure-store";
import { Platform } from "react-native";
export interface TokenCache {
getToken: (key: string) => Promise<string | undefined | null>;
saveToken: (key: string, token: string) => Promise<void>;
clearToken: (key: string) => void;
}
const createTokenCache = (): TokenCache => {
return {
getToken: async (key: string) => {
try {
const item = await SecureStore.getItemAsync(key);
if (item) {
console.log(`${key} was used 🔐 \n`);
} else {
console.log("No values stored under key: " + key);
}
return item;
} catch (error) {
console.error("secure store get item error: ", error);
await SecureStore.deleteItemAsync(key);
return null;
}
},
saveToken: (key: string, token: string) => {
return SecureStore.setItemAsync(key, token);
},
clearToken: (key: string) => {
return SecureStore.deleteItemAsync(key);
},
};
};
// SecureStore is not supported on the web
// https://github.com/expo/expo/issues/7744#issuecomment-611093485
export const tokenCache =
Platform.OS !== "web" ? createTokenCache() : undefined;

View File

@@ -0,0 +1,27 @@
{
"cli": {
"version": ">= 12.5.1",
"appVersionSource": "remote"
},
"build": {
"development": {
"developmentClient": true,
"distribution": "internal"
},
"ios-simulator": {
"extends": "development",
"ios": {
"simulator": true
}
},
"preview": {
"distribution": "internal"
},
"production": {
"autoIncrement": true
}
},
"submit": {
"production": {}
}
}

View File

@@ -0,0 +1,2 @@
import "./polyfills";
import "expo-router/entry";

View File

@@ -0,0 +1,35 @@
// Learn more https://docs.expo.dev/guides/monorepos
const { getDefaultConfig } = require("expo/metro-config");
const { withNativeWind } = require("nativewind/metro");
const { FileStore } = require("metro-cache");
const path = require("path");
// eslint-disable-next-line no-undef
const projectRoot = __dirname;
const workspaceRoot = path.resolve(projectRoot, "../..");
const config = getDefaultConfig(projectRoot);
// Since we are using pnpm, we have to setup the monorepo manually for Metro
// #1 - Watch all files in the monorepo
config.watchFolders = [workspaceRoot];
// #2 - Try resolving with project modules first, then workspace modules
config.resolver.nodeModulesPaths = [
path.resolve(projectRoot, "node_modules"),
path.resolve(workspaceRoot, "node_modules"),
];
config.resolver.sourceExts = ["mjs", "js", "json", "ts", "tsx"];
config.resolver.requireCycleIgnorePatterns = [
/(^|\/|\\)node_modules($|\/|\\)/,
/(^|\/|\\)packages($|\/|\\)/,
];
// Use turborepo to restore the cache when possible
config.cacheStores = [
new FileStore({
root: path.join(projectRoot, "node_modules", ".cache", "metro"),
}),
];
// module.exports = config;
module.exports = withNativeWind(config, { input: "./global.css" });

View File

@@ -0,0 +1 @@
/// <reference types="nativewind/types" />

View File

@@ -0,0 +1,67 @@
{
"name": "chat-rn-expo-clerk",
"main": "index.js",
"version": "1.0.123",
"scripts": {
"build": "expo export -p ios",
"start": "expo start",
"format-and-lint": "biome check .",
"format-and-lint:fix": "biome check . --write",
"android": "expo run:android",
"ios": "expo prebuild && pnpx pod-install && expo run:ios",
"web": "expo start --web",
"run:ios": "pnpm expo prebuild && npx pod-install && pnpm expo run:ios"
},
"dependencies": {
"@azure/core-asynciterator-polyfill": "^1.0.2",
"@bacons/text-decoder": "0.0.0",
"@bam.tech/react-native-image-resizer": "^3.0.11",
"@clerk/clerk-expo": "^2.2.21",
"@craftzdog/react-native-buffer": "6.0.5",
"@expo/vector-icons": "^14.0.2",
"@react-native-community/netinfo": "11.4.1",
"@react-navigation/native": "7.0.19",
"@react-navigation/native-stack": "7.2.1",
"clsx": "^2.0.0",
"expo": "^52.0.42",
"expo-build-properties": "~0.13.1",
"expo-clipboard": "~7.0.0",
"expo-constants": "~17.0.8",
"expo-crypto": "~14.0.2",
"expo-dev-client": "~5.0.16",
"expo-file-system": "^18.0.4",
"expo-font": "~13.0.1",
"expo-image-picker": "~16.0.6",
"expo-linking": "~7.0.5",
"expo-router": "~4.0.19",
"expo-secure-store": "~14.0.0",
"expo-splash-screen": "~0.29.22",
"expo-sqlite": "15.1.3",
"expo-status-bar": "~2.0.1",
"expo-web-browser": "~14.0.1",
"jazz-expo": "workspace:*",
"jazz-react-native-media-images": "workspace:*",
"jazz-tools": "workspace:*",
"nativewind": "^4.1.21",
"react": "18.3.1",
"react-dom": "18.3.1",
"react-native": "0.76.7",
"react-native-gesture-handler": "~2.20.2",
"react-native-get-random-values": "^1.11.0",
"react-native-reanimated": "~3.16.3",
"react-native-safe-area-context": "4.12.0",
"react-native-screens": "4.4.0",
"react-native-url-polyfill": "^2.0.0",
"react-native-web": "~0.19.13",
"readable-stream": "4.7.0"
},
"devDependencies": {
"@babel/core": "^7.25.2",
"@types/react": "~18.3.12",
"@types/react-test-renderer": "^19.0.0",
"react-test-renderer": "18.3.1",
"tailwindcss": "^3.4.17",
"typescript": "5.6.2"
},
"private": true
}

View File

@@ -3,6 +3,9 @@
// @ts-expect-error - @types/react-native doesn't cover this file
import { polyfillGlobal } from "react-native/Libraries/Utilities/PolyfillFunctions";
import { Buffer } from "@craftzdog/react-native-buffer";
polyfillGlobal("Buffer", () => Buffer);
// @ts-expect-error - @types/readable-stream doesn't have ReadableStream type
import { ReadableStream } from "readable-stream";
polyfillGlobal("ReadableStream", () => ReadableStream);

View File

@@ -0,0 +1 @@
export const apiKey = "chat-rn-expo-clerk-example-jazz@garden.co";

View File

@@ -0,0 +1,19 @@
import { useClerk } from "@clerk/clerk-expo";
import { JazzProviderWithClerk } from "jazz-expo/auth/clerk";
import React, { PropsWithChildren } from "react";
import { apiKey } from "./apiKey";
export function JazzAndAuth({ children }: PropsWithChildren) {
const clerk = useClerk();
return (
<JazzProviderWithClerk
clerk={clerk}
sync={{
peer: `wss://cloud.jazz.tools/?key=${apiKey}`,
}}
>
{children}
</JazzProviderWithClerk>
);
}

View File

@@ -0,0 +1,8 @@
import { CoList, CoMap, CoPlainText, ImageDefinition, co } from "jazz-tools";
export class Message extends CoMap {
text = co.ref(CoPlainText);
image = co.optional.ref(ImageDefinition);
}
export class Chat extends CoList.Of(co.ref(Message)) {}

View File

@@ -0,0 +1,14 @@
/** @type {import('tailwindcss').Config} */
module.exports = {
// NOTE: Update this to include the paths to all of your component files.
content: [
"./app/**/*.{js,jsx,ts,tsx}",
"./components/**/*.{js,jsx,ts,tsx}",
"./src/**/*.{js,jsx,ts,tsx}",
],
presets: [require("nativewind/preset")],
theme: {
extend: {},
},
plugins: [],
};

Some files were not shown because too many files have changed in this diff Show More