Merge branch 'beta' into fix/lexical-localization

This commit is contained in:
Alessio Gravili
2024-04-23 15:20:56 -04:00
committed by GitHub
411 changed files with 7941 additions and 1785 deletions

38
.github/CODEOWNERS vendored
View File

@@ -1,41 +1,33 @@
# Order matters. The last matching pattern takes precedence. # Order matters. The last matching pattern takes precedence.
### Core ### ### Package Exports ###
/packages/payload/src/uploads/ @denolfe /**/exports/ @denolfe @jmikrut
/packages/payload/src/admin/ @jmikrut @jacobsfletch @JarrodMFlesch
### Adapters ### ### Adapters ###
/packages/db-*/ @denolfe @jmikrut @DanRibbens /packages/richtext-*/ @AlessioGr
/packages/richtext-*/ @denolfe @jmikrut @DanRibbens @AlessioGr
### Plugins ### ### Plugins ###
/packages/plugin-*/ @denolfe @jmikrut @DanRibbens
/packages/plugin-cloud*/ @denolfe /packages/plugin-cloud*/ @denolfe
/packages/plugin-form-builder/ @jacobsfletch
/packages/plugin-live-preview*/ @jacobsfletch
/packages/plugin-nested-docs/ @jacobsfletch
/packages/plugin-redirects/ @jacobsfletch
/packages/plugin-search/ @jacobsfletch
/packages/plugin-sentry/ @JessChowdhury
/packages/plugin-seo/ @jacobsfletch
/packages/plugin-stripe/ @jacobsfletch
### Examples ###
/examples/ @jacobsfletch
/examples/testing/ @JarrodMFlesch
/examples/email/ @JessChowdhury
/examples/whitelabel/ @JessChowdhury
### Templates ### ### Templates ###
/templates/ @jacobsfletch @denolfe /templates/ @jacobsfletch @denolfe
### Misc ### ### Misc ###
/packages/create-payload-app/ @denolfe /packages/create-payload-app/ @denolfe
/packages/eslint-config-payload/ @denolfe /packages/eslint-*/ @denolfe
/packages/payload-admin-bar/ @jacobsfletch
### Build Files ###
/**/package.json @denolfe
/tsconfig.json @denolfe
/**/tsconfig*.json @denolfe
/jest.config.js @denolfe
/**/jest.config.js @denolfe
### Root ### ### Root ###
/package.json @denolfe /package.json @denolfe
/scripts/ @denolfe /scripts/ @denolfe
/.husky/ @denolfe
/.vscode/ @denolfe
/.github/ @denolfe /.github/ @denolfe
/.github/CODEOWNERS @denolfe

View File

@@ -10,6 +10,10 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
env:
NODE_VERSION: 18.20.2
PNPM_VERSION: 8.15.7
jobs: jobs:
changes: changes:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -19,6 +23,10 @@ jobs:
needs_build: ${{ steps.filter.outputs.needs_build }} needs_build: ${{ steps.filter.outputs.needs_build }}
templates: ${{ steps.filter.outputs.templates }} templates: ${{ steps.filter.outputs.templates }}
steps: steps:
# https://github.com/actions/virtual-environments/issues/1187
- name: tune linux network
run: sudo ethtool -K eth0 tx off rx off
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
fetch-depth: 25 fetch-depth: 25
@@ -49,15 +57,19 @@ jobs:
with: with:
fetch-depth: 25 fetch-depth: 25
- name: Use Node.js 18 # https://github.com/actions/virtual-environments/issues/1187
- name: tune linux network
run: sudo ethtool -K eth0 tx off rx off
- name: Setup Node@${{ env.NODE_VERSION }}
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version: 18 node-version: ${{ env.NODE_VERSION }}
- name: Install pnpm - name: Install pnpm
uses: pnpm/action-setup@v3 uses: pnpm/action-setup@v3
with: with:
version: 8 version: ${{ env.PNPM_VERSION }}
run_install: false run_install: false
- name: Get pnpm store directory - name: Get pnpm store directory
@@ -90,15 +102,19 @@ jobs:
needs: build needs: build
steps: steps:
- name: Use Node.js 18 # https://github.com/actions/virtual-environments/issues/1187
- name: tune linux network
run: sudo ethtool -K eth0 tx off rx off
- name: Setup Node@${{ env.NODE_VERSION }}
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version: 18 node-version: ${{ env.NODE_VERSION }}
- name: Install pnpm - name: Install pnpm
uses: pnpm/action-setup@v3 uses: pnpm/action-setup@v3
with: with:
version: 8 version: ${{ env.PNPM_VERSION }}
run_install: false run_install: false
- name: Restore build - name: Restore build
@@ -135,15 +151,19 @@ jobs:
AWS_REGION: us-east-1 AWS_REGION: us-east-1
steps: steps:
- name: Use Node.js 18 # https://github.com/actions/virtual-environments/issues/1187
- name: tune linux network
run: sudo ethtool -K eth0 tx off rx off
- name: Setup Node@${{ env.NODE_VERSION }}
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version: 18 node-version: ${{ env.NODE_VERSION }}
- name: Install pnpm - name: Install pnpm
uses: pnpm/action-setup@v3 uses: pnpm/action-setup@v3
with: with:
version: 8 version: ${{ env.PNPM_VERSION }}
run_install: false run_install: false
- name: Restore build - name: Restore build
@@ -217,7 +237,6 @@ jobs:
- access-control - access-control
- admin - admin
- auth - auth
- email
- field-error-states - field-error-states
- fields-relationship - fields-relationship
- fields - fields
@@ -227,6 +246,7 @@ jobs:
- fields__collections__Lexical - fields__collections__Lexical
- live-preview - live-preview
- localization - localization
- plugin-cloud-storage
- plugin-form-builder - plugin-form-builder
- plugin-nested-docs - plugin-nested-docs
- plugin-seo - plugin-seo
@@ -234,15 +254,19 @@ jobs:
- uploads - uploads
steps: steps:
- name: Use Node.js 18 # https://github.com/actions/virtual-environments/issues/1187
- name: tune linux network
run: sudo ethtool -K eth0 tx off rx off
- name: Setup Node@${{ env.NODE_VERSION }}
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version: 18 node-version: ${{ env.NODE_VERSION }}
- name: Install pnpm - name: Install pnpm
uses: pnpm/action-setup@v3 uses: pnpm/action-setup@v3
with: with:
version: 8 version: ${{ env.PNPM_VERSION }}
run_install: false run_install: false
- name: Restore build - name: Restore build
@@ -252,6 +276,10 @@ jobs:
path: ./* path: ./*
key: ${{ github.sha }}-${{ github.run_number }} key: ${{ github.sha }}-${{ github.run_number }}
- name: Start LocalStack
run: pnpm docker:start
if: ${{ matrix.suite == 'plugin-cloud-storage' }}
- name: Install Playwright - name: Install Playwright
run: pnpm exec playwright install --with-deps run: pnpm exec playwright install --with-deps
@@ -263,6 +291,7 @@ jobs:
with: with:
name: test-results-${{ matrix.suite }} name: test-results-${{ matrix.suite }}
path: test/test-results/ path: test/test-results/
if-no-files-found: ignore
retention-days: 1 retention-days: 1
tests-type-generation: tests-type-generation:
@@ -271,15 +300,19 @@ jobs:
needs: build needs: build
steps: steps:
- name: Use Node.js 18 # https://github.com/actions/virtual-environments/issues/1187
- name: tune linux network
run: sudo ethtool -K eth0 tx off rx off
- name: Setup Node@${{ env.NODE_VERSION }}
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version: 18 node-version: ${{ env.NODE_VERSION }}
- name: Install pnpm - name: Install pnpm
uses: pnpm/action-setup@v3 uses: pnpm/action-setup@v3
with: with:
version: 8 version: ${{ env.PNPM_VERSION }}
run_install: false run_install: false
- name: Restore build - name: Restore build
@@ -308,11 +341,14 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
fetch-depth: 25 fetch-depth: 25
# https://github.com/actions/virtual-environments/issues/1187
- name: tune linux network
run: sudo ethtool -K eth0 tx off rx off
- name: Use Node.js 18 - name: Setup Node@${{ env.NODE_VERSION }}
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version: 18 node-version: ${{ env.NODE_VERSION }}
- name: Start MongoDB - name: Start MongoDB
uses: supercharge/mongodb-github-action@1.10.0 uses: supercharge/mongodb-github-action@1.10.0

23
.vscode/launch.json vendored
View File

@@ -41,6 +41,13 @@
"request": "launch", "request": "launch",
"type": "node-terminal" "type": "node-terminal"
}, },
{
"command": "node --no-deprecation test/dev.js auth",
"cwd": "${workspaceFolder}",
"name": "Run Dev Auth",
"request": "launch",
"type": "node-terminal"
},
{ {
"command": "pnpm run dev plugin-cloud-storage", "command": "pnpm run dev plugin-cloud-storage",
"cwd": "${workspaceFolder}", "cwd": "${workspaceFolder}",
@@ -69,36 +76,26 @@
} }
}, },
{ {
"command": "pnpm run dev versions", "command": "node --no-deprecation test/dev.js versions",
"cwd": "${workspaceFolder}", "cwd": "${workspaceFolder}",
"name": "Run Dev Versions", "name": "Run Dev Versions",
"request": "launch", "request": "launch",
"type": "node-terminal" "type": "node-terminal"
}, },
{ {
"command": "pnpm run dev localization", "command": "node --no-deprecation test/dev.js localization",
"cwd": "${workspaceFolder}", "cwd": "${workspaceFolder}",
"name": "Run Dev Localization", "name": "Run Dev Localization",
"request": "launch", "request": "launch",
"type": "node-terminal" "type": "node-terminal"
}, },
{ {
"command": "pnpm run dev uploads", "command": "node --no-deprecation test/dev.js uploads",
"cwd": "${workspaceFolder}", "cwd": "${workspaceFolder}",
"name": "Run Dev Uploads", "name": "Run Dev Uploads",
"request": "launch", "request": "launch",
"type": "node-terminal" "type": "node-terminal"
}, },
{
"command": "PAYLOAD_BUNDLER=vite pnpm run dev fields",
"cwd": "${workspaceFolder}",
"name": "Run Dev Fields (Vite)",
"request": "launch",
"type": "node-terminal",
"env": {
"NODE_ENV": "production"
}
},
{ {
"command": "pnpm run test:int live-preview", "command": "pnpm run test:int live-preview",
"cwd": "${workspaceFolder}", "cwd": "${workspaceFolder}",

View File

@@ -1,9 +1,10 @@
/* THIS FILE WAS GENERATED AUTOMATICALLY BY PAYLOAD. */ /* THIS FILE WAS GENERATED AUTOMATICALLY BY PAYLOAD. */
/* DO NOT MODIFY it because it could be re-written at any time. */ /* DO NOT MODIFY it because it could be re-written at any time. */
import config from '@payload-config' import config from '@payload-config'
import { REST_DELETE, REST_GET, REST_PATCH, REST_POST } from '@payloadcms/next/routes' import { REST_DELETE, REST_GET, REST_OPTIONS, REST_PATCH, REST_POST } from '@payloadcms/next/routes'
export const GET = REST_GET(config) export const GET = REST_GET(config)
export const POST = REST_POST(config) export const POST = REST_POST(config)
export const DELETE = REST_DELETE(config) export const DELETE = REST_DELETE(config)
export const PATCH = REST_PATCH(config) export const PATCH = REST_PATCH(config)
export const OPTIONS = REST_OPTIONS(config)

View File

@@ -4,7 +4,7 @@ label: JSON
order: 50 order: 50
desc: The JSON field type will store any string in the Database. Learn how to use JSON fields, see examples and options. desc: The JSON field type will store any string in the Database. Learn how to use JSON fields, see examples and options.
keywords: json, fields, config, configuration, documentation, Content Management System, cms, headless, javascript, node, react, express keywords: json, jsonSchema, schema, validation, fields, config, configuration, documentation, Content Management System, cms, headless, javascript, node, react, express
--- ---
<Banner> <Banner>
@@ -30,6 +30,7 @@ This field uses the `monaco-react` editor syntax highlighting.
| **`unique`** | Enforce that each entry in the Collection has a unique value for this field. | | **`unique`** | Enforce that each entry in the Collection has a unique value for this field. |
| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | | **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. |
| **`validate`** | Provide a custom validation function that will be executed on both the Admin panel and the backend. [More](/docs/fields/overview#validation) | | **`validate`** | Provide a custom validation function that will be executed on both the Admin panel and the backend. [More](/docs/fields/overview#validation) |
| **`jsonSchema`** | Provide a JSON schema that will be used for validation. [JSON schemas](https://json-schema.org/learn/getting-started-step-by-step)
| **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/config), include its data in the user JWT. | | **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/config), include its data in the user JWT. |
| **`hooks`** | Provide field-based hooks to control logic for this field. [More](/docs/fields/overview#field-level-hooks) | | **`hooks`** | Provide field-based hooks to control logic for this field. [More](/docs/fields/overview#field-level-hooks) |
| **`access`** | Provide field-based access control to denote what users can see and do with this field's data. [More](/docs/fields/overview#field-level-access-control) | | **`access`** | Provide field-based access control to denote what users can see and do with this field's data. [More](/docs/fields/overview#field-level-access-control) |
@@ -52,7 +53,7 @@ In addition to the default [field admin config](/docs/fields/overview#admin-conf
### Example ### Example
`collections/ExampleCollection.ts `collections/ExampleCollection.ts`
```ts ```ts
import { CollectionConfig } from 'payload/types' import { CollectionConfig } from 'payload/types'
@@ -68,3 +69,67 @@ export const ExampleCollection: CollectionConfig = {
], ],
} }
``` ```
### JSON Schema Validation
Payload JSON fields fully support the [JSON schema](https://json-schema.org/) standard. By providing a schema in your field config, the editor will be guided in the admin UI, getting typeahead for properties and their formats automatically. When the document is saved, the default validation will prevent saving any invalid data in the field according to the schema in your config.
If you only provide a URL to a schema, Payload will fetch the desired schema if it is publicly available. If not, it is recommended to add the schema directly to your config or import it from another file so that it can be implemented consistently in your project.
#### Local JSON Schema
`collections/ExampleCollection.ts`
```ts
import { CollectionConfig } from 'payload/types'
export const ExampleCollection: CollectionConfig = {
slug: 'example-collection',
fields: [
{
name: 'customerJSON', // required
type: 'json', // required
jsonSchema: {
uri: 'a://b/foo.json', // required
fileMatch: ['a://b/foo.json'], // required
schema: {
type: 'object',
properties: {
foo: {
enum: ['bar', 'foobar'],
}
},
},
},
},
],
}
// {"foo": "bar"} or {"foo": "foobar"} - ok
// Attempting to create {"foo": "not-bar"} will throw an error
```
#### Remote JSON Schema
`collections/ExampleCollection.ts`
```ts
import { CollectionConfig } from 'payload/types'
export const ExampleCollection: CollectionConfig = {
slug: 'example-collection',
fields: [
{
name: 'customerJSON', // required
type: 'json', // required
jsonSchema: {
uri: 'https://example.com/customer.schema.json', // required
fileMatch: ['https://example.com/customer.schema.json'], // required
},
},
],
}
// If 'https://example.com/customer.schema.json' has a JSON schema
// {"foo": "bar"} or {"foo": "foobar"} - ok
// Attempting to create {"foo": "not-bar"} will throw an error
```

View File

@@ -42,11 +42,12 @@ export const PublicUser: CollectionConfig = {
**Payload will automatically open up the following queries:** **Payload will automatically open up the following queries:**
| Query Name | Operation | | Query Name | Operation |
| ------------------ | ------------------- | | ------------------ | ------------------- |
| **`PublicUser`** | `findByID` | | **`PublicUser`** | `findByID` |
| **`PublicUsers`** | `find` | | **`PublicUsers`** | `find` |
| **`mePublicUser`** | `me` auth operation | | **`countPublicUsers`** | `count` |
| **`mePublicUser`** | `me` auth operation |
**And the following mutations:** **And the following mutations:**

View File

@@ -8,7 +8,7 @@ keywords: live preview, frontend, react, next.js, vue, nuxt.js, svelte, hook, us
While using Live Preview, the Admin panel emits a new `window.postMessage` event every time a change is made to the document. Your front-end application can listen for these events and re-render accordingly. While using Live Preview, the Admin panel emits a new `window.postMessage` event every time a change is made to the document. Your front-end application can listen for these events and re-render accordingly.
Wiring your front-end into Live Preview is easy. If your front-end application is built with React or Next.js, use the [`useLivePreview`](#react) React hook that Payload provides. In the future, all other major frameworks like Vue, Svelte, etc will be officially supported. If you are using any of these frameworks today, you can still integrate with Live Preview yourself using the underlying tooling that Payload provides. See [building your own hook](#building-your-own-hook) for more information. Wiring your front-end into Live Preview is easy. If your front-end application is built with React, Next.js, Vue or Nuxt.js, use the `useLivePreview` hook that Payload provides. In the future, all other major frameworks like Svelte will be officially supported. If you are using any of these frameworks today, you can still integrate with Live Preview yourself using the underlying tooling that Payload provides. See [building your own hook](#building-your-own-hook) for more information.
By default, all hooks accept the following args: By default, all hooks accept the following args:
@@ -36,6 +36,10 @@ And return the following values:
For example, `data?.relatedPosts?.[0]?.title`. For example, `data?.relatedPosts?.[0]?.title`.
</Banner> </Banner>
<Banner type="info">
It is important that the `depth` argument matches exactly with the depth of your initial page request. The depth property is used to populated relationships and uploads beyond their IDs. See [Depth](../getting-started/concepts#depth) for more information.
</Banner>
### React ### React
If your front-end application is built with React or Next.js, you can use the `useLivePreview` hook that Payload provides. If your front-end application is built with React or Next.js, you can use the `useLivePreview` hook that Payload provides.
@@ -71,11 +75,40 @@ export const PageClient: React.FC<{
} }
``` ```
<Banner type="info"> ### Vue
If is important that the `depth` argument matches exactly with the depth of your initial page
request. The depth property is used to populated relationships and uploads beyond their IDs. See If your front-end application is built with Vue 3 or Nuxt 3, you can use the `useLivePreview` composable that Payload provides.
[Depth](../getting-started/concepts#depth) for more information.
</Banner> First, install the `@payloadcms/live-preview-vue` package:
```bash
npm install @payloadcms/live-preview-vue
```
Then, use the `useLivePreview` hook in your Vue component:
```vue
<script setup lang="ts">
import type { PageData } from '~/types';
import { defineProps } from 'vue';
import { useLivePreview } from '@payloadcms/live-preview-vue';
// Fetch the initial data on the parent component or using async state
const props = defineProps<{ initialData: PageData }>();
// The hook will take over from here and keep the preview in sync with the changes you make.
// The `data` property will contain the live data of the document only when viewed from the Preview view of the Admin UI.
const { data } = useLivePreview<PageData>({
initialData: props.initialData,
serverURL: "<PAYLOAD_SERVER_URL>",
depth: 2,
});
</script>
<template>
<h1>{{ data.title }}</h1>
</template>
```
## Building your own hook ## Building your own hook

View File

@@ -164,6 +164,22 @@ const result = await payload.findByID({
}) })
``` ```
#### Count
```js
// Result will be an object with:
// {
// totalDocs: 10, // count of the documents satisfies query
// }
const result = await payload.count({
collection: 'posts', // required
locale: 'en',
where: {}, // pass a `where` query here
user: dummyUser,
overrideAccess: false,
})
```
#### Update by ID #### Update by ID
```js ```js

View File

@@ -90,6 +90,19 @@ Note: Collection slugs must be formatted in kebab-case
}, },
}, },
}, },
{
operation: "Count",
method: "GET",
path: "/api/{collection-slug}/count",
description: "Count the documents",
example: {
slug: "count",
req: true,
res: {
totalDocs: 10
},
},
},
{ {
operation: "Create", operation: "Create",
method: "POST", method: "POST",

View File

@@ -40,21 +40,22 @@ Every Payload Collection can opt-in to supporting Uploads by specifying the `upl
### Collection Upload Options ### Collection Upload Options
| Option | Description | | Option | Description |
| --------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | ------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **`staticURL`** \* | The URL path to use to access your uploads. Relative path like `/media` will be served by payload. Full path like `https://example.com/media` needs to be served by another web server. | | **`staticURL`** \* | The URL path to use to access your uploads. Relative path like `/media` will be served by payload. Full path like `https://example.com/media` needs to be served by another web server. |
| **`staticDir`** \* | The folder directory to use to store media in. Can be either an absolute path or relative to the directory that contains your config. | | **`staticDir`** \* | The folder directory to use to store media in. Can be either an absolute path or relative to the directory that contains your config. |
| **`adminThumbnail`** | Set the way that the Admin panel will display thumbnails for this Collection. [More](#admin-thumbnails) | | **`adminThumbnail`** | Set the way that the Admin panel will display thumbnails for this Collection. [More](#admin-thumbnails) |
| **`crop`** | Set to `false` to disable the cropping tool in the Admin panel. Crop is enabled by default. [More](#crop-and-focal-point-selector) | | **`crop`** | Set to `false` to disable the cropping tool in the Admin panel. Crop is enabled by default. [More](#crop-and-focal-point-selector) |
| **`disableLocalStorage`** | Completely disable uploading files to disk locally. [More](#disabling-local-upload-storage) | | **`disableLocalStorage`** | Completely disable uploading files to disk locally. [More](#disabling-local-upload-storage) |
| **`focalPoint`** | Set to `false` to disable the focal point selection tool in the Admin panel. The focal point selector is only available when `imageSizes` or `resizeOptions` are defined. [More](#crop-and-focal-point-selector) | | **`externalFileHeaderFilter`** | Accepts existing headers and can filter/modify them. |
| **`formatOptions`** | An object with `format` and `options` that are used with the Sharp image library to format the upload file. [More](https://sharp.pixelplumbing.com/api-output#toformat) | | **`focalPoint`** | Set to `false` to disable the focal point selection tool in the Admin panel. The focal point selector is only available when `imageSizes` or `resizeOptions` are defined. [More](#crop-and-focal-point-selector) |
| **`handlers`** | Array of Express request handlers to execute before the built-in Payload static middleware executes. | | **`formatOptions`** | An object with `format` and `options` that are used with the Sharp image library to format the upload file. [More](https://sharp.pixelplumbing.com/api-output#toformat) |
| **`imageSizes`** | If specified, image uploads will be automatically resized in accordance to these image sizes. [More](#image-sizes) | | **`handlers`** | Array of Express request handlers to execute before the built-in Payload static middleware executes. |
| **`mimeTypes`** | Restrict mimeTypes in the file picker. Array of valid mimetypes or mimetype wildcards [More](#mimetypes) | | **`imageSizes`** | If specified, image uploads will be automatically resized in accordance to these image sizes. [More](#image-sizes) |
| **`staticOptions`** | Set options for `express.static` to use while serving your static files. [More](http://expressjs.com/en/resources/middleware/serve-static.html) | | **`mimeTypes`** | Restrict mimeTypes in the file picker. Array of valid mimetypes or mimetype wildcards [More](#mimetypes) |
| **`resizeOptions`** | An object passed to the the Sharp image library to resize the uploaded file. [More](https://sharp.pixelplumbing.com/api-resize) | | **`staticOptions`** | Set options for `express.static` to use while serving your static files. [More](http://expressjs.com/en/resources/middleware/serve-static.html) |
| **`filesRequiredOnCreate`** | Mandate file data on creation, default is true. | | **`resizeOptions`** | An object passed to the the Sharp image library to resize the uploaded file. [More](https://sharp.pixelplumbing.com/api-resize) |
| **`filesRequiredOnCreate`** | Mandate file data on creation, default is true. |
_An asterisk denotes that a property above is required._ _An asterisk denotes that a property above is required._

View File

@@ -1,6 +1,6 @@
{ {
"name": "payload-monorepo", "name": "payload-monorepo",
"version": "3.0.0-beta.10", "version": "3.0.0-beta.13",
"private": true, "private": true,
"type": "module", "type": "module",
"workspaces:": [ "workspaces:": [
@@ -18,6 +18,7 @@
"build:create-payload-app": "turbo build --filter create-payload-app", "build:create-payload-app": "turbo build --filter create-payload-app",
"build:db-mongodb": "turbo build --filter db-mongodb", "build:db-mongodb": "turbo build --filter db-mongodb",
"build:db-postgres": "turbo build --filter db-postgres", "build:db-postgres": "turbo build --filter db-postgres",
"build:email-nodemailer": "turbo build --filter email-nodemailer",
"build:eslint-config-payload": "turbo build --filter eslint-config-payload", "build:eslint-config-payload": "turbo build --filter eslint-config-payload",
"build:graphql": "turbo build --filter graphql", "build:graphql": "turbo build --filter graphql",
"build:live-preview": "turbo build --filter live-preview", "build:live-preview": "turbo build --filter live-preview",
@@ -35,7 +36,7 @@
"build:plugin-stripe": "turbo build --filter plugin-stripe", "build:plugin-stripe": "turbo build --filter plugin-stripe",
"build:richtext-lexical": "turbo build --filter richtext-lexical", "build:richtext-lexical": "turbo build --filter richtext-lexical",
"build:richtext-slate": "turbo build --filter richtext-slate", "build:richtext-slate": "turbo build --filter richtext-slate",
"build:tests": "pnpm --filter test run typecheck", "build:tests": "pnpm --filter payload-test-suite run typecheck",
"build:translations": "turbo build --filter translations", "build:translations": "turbo build --filter translations",
"build:ui": "turbo build --filter ui", "build:ui": "turbo build --filter ui",
"clean": "turbo clean", "clean": "turbo clean",
@@ -127,7 +128,7 @@
"lint-staged": "^14.0.1", "lint-staged": "^14.0.1",
"minimist": "1.2.8", "minimist": "1.2.8",
"mongodb-memory-server": "^9.0", "mongodb-memory-server": "^9.0",
"next": "^14.2.0-canary.23", "next": "^14.3.0-canary.7",
"node-mocks-http": "^1.14.1", "node-mocks-http": "^1.14.1",
"nodemon": "3.0.3", "nodemon": "3.0.3",
"open": "^10.1.0", "open": "^10.1.0",
@@ -164,7 +165,7 @@
}, },
"engines": { "engines": {
"node": ">=18.20.2", "node": ">=18.20.2",
"pnpm": ">=8" "pnpm": "^8.15.7"
}, },
"lint-staged": { "lint-staged": {
"*.{md,mdx,yml,json}": "prettier --write", "*.{md,mdx,yml,json}": "prettier --write",
@@ -175,6 +176,7 @@
}, },
"dependencies": { "dependencies": {
"@sentry/react": "^7.77.0", "@sentry/react": "^7.77.0",
"ajv": "^8.12.0",
"passport-strategy": "1.0.0" "passport-strategy": "1.0.0"
}, },
"pnpm": { "pnpm": {

View File

@@ -1,6 +1,6 @@
{ {
"name": "create-payload-app", "name": "create-payload-app",
"version": "3.0.0-beta.10", "version": "3.0.0-beta.13",
"license": "MIT", "license": "MIT",
"type": "module", "type": "module",
"homepage": "https://payloadcms.com", "homepage": "https://payloadcms.com",

View File

@@ -1,5 +1,9 @@
import fse from 'fs-extra' import fse from 'fs-extra'
import globby from 'globby' import globby from 'globby'
import { fileURLToPath } from 'node:url'
import path from 'path'
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
import type { DbDetails } from '../types.js' import type { DbDetails } from '../types.js'
@@ -15,6 +19,34 @@ export async function configurePayloadConfig(args: {
return return
} }
// Update package.json
const packageJsonPath =
'projectDir' in args.projectDirOrConfigPath &&
path.resolve(args.projectDirOrConfigPath.projectDir, 'package.json')
if (packageJsonPath && fse.existsSync(packageJsonPath)) {
try {
const packageObj = await fse.readJson(packageJsonPath)
const dbPackage = dbReplacements[args.dbDetails.type]
// Delete all other db adapters
Object.values(dbReplacements).forEach((p) => {
if (p.packageName !== dbPackage.packageName) {
delete packageObj.dependencies[p.packageName]
}
})
// Set version of db adapter to match payload version
packageObj.dependencies[dbPackage.packageName] = packageObj.dependencies['payload']
await fse.writeJson(packageJsonPath, packageObj, { spaces: 2 })
} catch (err: unknown) {
warning(`Unable to configure Payload in package.json`)
warning(err instanceof Error ? err.message : '')
}
}
try { try {
let payloadConfigPath: string | undefined let payloadConfigPath: string | undefined
if (!('payloadConfigPath' in args.projectDirOrConfigPath)) { if (!('payloadConfigPath' in args.projectDirOrConfigPath)) {

View File

@@ -117,13 +117,17 @@ async function addPayloadConfigToTsConfig(projectDir: string, isSrcDir: boolean)
warning(`Could not find tsconfig.json to add @payload-config path.`) warning(`Could not find tsconfig.json to add @payload-config path.`)
return return
} }
const userTsConfigContent = await readFile(tsConfigPath, { const userTsConfigContent = await readFile(tsConfigPath, {
encoding: 'utf8', encoding: 'utf8',
}) })
const userTsConfig = parse(userTsConfigContent) as { const userTsConfig = parse(userTsConfigContent) as {
compilerOptions?: CompilerOptions compilerOptions?: CompilerOptions
} }
const hasBaseUrl =
userTsConfig?.compilerOptions?.baseUrl && userTsConfig?.compilerOptions?.baseUrl !== '.'
const baseUrl = hasBaseUrl ? userTsConfig?.compilerOptions?.baseUrl : './'
if (!userTsConfig.compilerOptions && !('extends' in userTsConfig)) { if (!userTsConfig.compilerOptions && !('extends' in userTsConfig)) {
userTsConfig.compilerOptions = {} userTsConfig.compilerOptions = {}
} }
@@ -134,7 +138,7 @@ async function addPayloadConfigToTsConfig(projectDir: string, isSrcDir: boolean)
) { ) {
userTsConfig.compilerOptions.paths = { userTsConfig.compilerOptions.paths = {
...(userTsConfig.compilerOptions.paths || {}), ...(userTsConfig.compilerOptions.paths || {}),
'@payload-config': [`./${isSrcDir ? 'src/' : ''}payload.config.ts`], '@payload-config': [`${baseUrl}${isSrcDir ? 'src/' : ''}payload.config.ts`],
} }
await writeFile(tsConfigPath, stringify(userTsConfig, null, 2), { encoding: 'utf8' }) await writeFile(tsConfigPath, stringify(userTsConfig, null, 2), { encoding: 'utf8' })
} }

View File

@@ -16,7 +16,7 @@ const dbChoiceRecord: Record<DbType, DbChoice> = {
value: 'mongodb', value: 'mongodb',
}, },
postgres: { postgres: {
dbConnectionPrefix: 'postgres://127.0.0.1:5432/', dbConnectionPrefix: 'postgres://postgres:<password>@127.0.0.1:5432/',
title: 'PostgreSQL (beta)', title: 'PostgreSQL (beta)',
value: 'postgres', value: 'postgres',
}, },

View File

@@ -41,12 +41,12 @@ export function getValidTemplates(): ProjectTemplate[] {
// description: 'E-commerce Template', // description: 'E-commerce Template',
// url: 'https://github.com/payloadcms/payload/templates/ecommerce', // url: 'https://github.com/payloadcms/payload/templates/ecommerce',
// }, // },
// { {
// name: 'plugin', name: 'plugin',
// type: 'plugin', type: 'plugin',
// description: 'Template for creating a Payload plugin', description: 'Template for creating a Payload plugin',
// url: 'https://github.com/payloadcms/payload-plugin-template', url: 'https://github.com/payloadcms/payload-plugin-template#beta',
// }, },
// { // {
// name: 'payload-demo', // name: 'payload-demo',
// type: 'starter', // type: 'starter',

View File

@@ -1,6 +1,6 @@
{ {
"name": "@payloadcms/db-mongodb", "name": "@payloadcms/db-mongodb",
"version": "3.0.0-beta.10", "version": "3.0.0-beta.13",
"description": "The officially supported MongoDB database adapter for Payload", "description": "The officially supported MongoDB database adapter for Payload",
"repository": { "repository": {
"type": "git", "type": "git",

View File

@@ -0,0 +1,49 @@
import type { QueryOptions } from 'mongoose'
import type { Count } from 'payload/database'
import type { PayloadRequest } from 'payload/types'
import { flattenWhereToOperators } from 'payload/database'
import type { MongooseAdapter } from './index.js'
import { withSession } from './withSession.js'
export const count: Count = async function count(
this: MongooseAdapter,
{ collection, locale, req = {} as PayloadRequest, where },
) {
const Model = this.collections[collection]
const options: QueryOptions = withSession(this, req.transactionID)
let hasNearConstraint = false
if (where) {
const constraints = flattenWhereToOperators(where)
hasNearConstraint = constraints.some((prop) => Object.keys(prop).some((key) => key === 'near'))
}
const query = await Model.buildQuery({
locale,
payload: this.payload,
where,
})
// useEstimatedCount is faster, but not accurate, as it ignores any filters. It is thus set to true if there are no filters.
const useEstimatedCount = hasNearConstraint || !query || Object.keys(query).length === 0
if (!useEstimatedCount && Object.keys(query).length === 0 && this.disableIndexHints !== true) {
// Improve the performance of the countDocuments query which is used if useEstimatedCount is set to false by adding
// a hint. By default, if no hint is provided, MongoDB does not use an indexed field to count the returned documents,
// which makes queries very slow. This only happens when no query (filter) is provided. If one is provided, it uses
// the correct indexed field
options.hint = {
_id: 1,
}
}
const result = await Model.countDocuments(query, options)
return {
totalDocs: result,
}
}

View File

@@ -12,6 +12,7 @@ import { createDatabaseAdapter } from 'payload/database'
import type { CollectionModel, GlobalModel } from './types.js' import type { CollectionModel, GlobalModel } from './types.js'
import { connect } from './connect.js' import { connect } from './connect.js'
import { count } from './count.js'
import { create } from './create.js' import { create } from './create.js'
import { createGlobal } from './createGlobal.js' import { createGlobal } from './createGlobal.js'
import { createGlobalVersion } from './createGlobalVersion.js' import { createGlobalVersion } from './createGlobalVersion.js'
@@ -112,6 +113,7 @@ export function mongooseAdapter({
collections: {}, collections: {},
connectOptions: connectOptions || {}, connectOptions: connectOptions || {},
connection: undefined, connection: undefined,
count,
disableIndexHints, disableIndexHints,
globals: undefined, globals: undefined,
mongoMemoryServer, mongoMemoryServer,
@@ -119,7 +121,6 @@ export function mongooseAdapter({
transactionOptions: transactionOptions === false ? undefined : transactionOptions, transactionOptions: transactionOptions === false ? undefined : transactionOptions,
url, url,
versions: {}, versions: {},
// DatabaseAdapter // DatabaseAdapter
beginTransaction: transactionOptions ? beginTransaction : undefined, beginTransaction: transactionOptions ? beginTransaction : undefined,
commitTransaction, commitTransaction,

View File

@@ -28,6 +28,7 @@ export const init: Init = function init(this: MongooseAdapter) {
const versionSchema = buildSchema(this.payload.config, versionCollectionFields, { const versionSchema = buildSchema(this.payload.config, versionCollectionFields, {
disableUnique: true, disableUnique: true,
draftsEnabled: true, draftsEnabled: true,
indexSortableFields: this.payload.config.indexSortableFields,
options: { options: {
minimize: false, minimize: false,
timestamps: false, timestamps: false,

View File

@@ -142,7 +142,10 @@ export const sanitizeQueryValue = ({
if (path !== '_id' || (path === '_id' && hasCustomID && field.type === 'text')) { if (path !== '_id' || (path === '_id' && hasCustomID && field.type === 'text')) {
if (operator === 'contains') { if (operator === 'contains') {
formattedValue = { $options: 'i', $regex: formattedValue } formattedValue = {
$options: 'i',
$regex: formattedValue.replace(/[\\^$*+?.()|[\]{}]/g, '\\$&'),
}
} }
} }

View File

@@ -6,6 +6,10 @@ export const commitTransaction: CommitTransaction = async function commitTransac
} }
await this.sessions[id].commitTransaction() await this.sessions[id].commitTransaction()
await this.sessions[id].endSession() try {
await this.sessions[id].endSession()
} catch (error) {
// ending sessions is only best effort and won't impact anything if it fails since the transaction was committed
}
delete this.sessions[id] delete this.sessions[id]
} }

View File

@@ -1,6 +1,6 @@
{ {
"name": "@payloadcms/db-postgres", "name": "@payloadcms/db-postgres",
"version": "3.0.0-beta.10", "version": "3.0.0-beta.13",
"description": "The officially supported Postgres database adapter for Payload", "description": "The officially supported Postgres database adapter for Payload",
"repository": { "repository": {
"type": "git", "type": "git",

View File

@@ -0,0 +1,65 @@
import type { Count } from 'payload/database'
import type { SanitizedCollectionConfig } from 'payload/types'
import { sql } from 'drizzle-orm'
import type { ChainedMethods } from './find/chainMethods.js'
import type { PostgresAdapter } from './types.js'
import { chainMethods } from './find/chainMethods.js'
import buildQuery from './queries/buildQuery.js'
import { getTableName } from './schema/getTableName.js'
export const count: Count = async function count(
this: PostgresAdapter,
{ collection, locale, req, where: whereArg },
) {
const collectionConfig: SanitizedCollectionConfig = this.payload.collections[collection].config
const tableName = getTableName({
adapter: this,
config: collectionConfig,
})
const db = this.sessions[req.transactionID]?.db || this.drizzle
const table = this.tables[tableName]
const { joinAliases, joins, where } = await buildQuery({
adapter: this,
fields: collectionConfig.fields,
locale,
tableName,
where: whereArg,
})
const selectCountMethods: ChainedMethods = []
joinAliases.forEach(({ condition, table }) => {
selectCountMethods.push({
args: [table, condition],
method: 'leftJoin',
})
})
Object.entries(joins).forEach(([joinTable, condition]) => {
if (joinTable) {
selectCountMethods.push({
args: [this.tables[joinTable], condition],
method: 'leftJoin',
})
}
})
const countResult = await chainMethods({
methods: selectCountMethods,
query: db
.select({
count: sql<number>`count
(DISTINCT ${this.tables[tableName].id})`,
})
.from(table)
.where(where),
})
return { totalDocs: Number(countResult[0].count) }
}

View File

@@ -2,13 +2,13 @@ import type { Destroy } from 'payload/database'
import type { PostgresAdapter } from './types.js' import type { PostgresAdapter } from './types.js'
import { pushDevSchema } from './utilities/pushDevSchema.js' // eslint-disable-next-line @typescript-eslint/require-await
export const destroy: Destroy = async function destroy(this: PostgresAdapter) { export const destroy: Destroy = async function destroy(this: PostgresAdapter) {
if (process.env.NODE_ENV !== 'production') { this.enums = {}
await pushDevSchema(this) this.schema = {}
} else { this.tables = {}
// TODO: this hangs test suite for some reason this.relations = {}
// await this.pool.end() this.blockTableNames = {}
} this.fieldConstraints = {}
this.drizzle = undefined
} }

View File

@@ -8,6 +8,7 @@ import { createDatabaseAdapter } from 'payload/database'
import type { Args, PostgresAdapter } from './types.js' import type { Args, PostgresAdapter } from './types.js'
import { connect } from './connect.js' import { connect } from './connect.js'
import { count } from './count.js'
import { create } from './create.js' import { create } from './create.js'
import { createGlobal } from './createGlobal.js' import { createGlobal } from './createGlobal.js'
import { createGlobalVersion } from './createGlobalVersion.js' import { createGlobalVersion } from './createGlobalVersion.js'
@@ -43,9 +44,11 @@ export type { MigrateDownArgs, MigrateUpArgs } from './types.js'
export { sql } from 'drizzle-orm' export { sql } from 'drizzle-orm'
export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter> { export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter> {
const postgresIDType = args.idType || 'serial'
const payloadIDType = postgresIDType ? 'number' : 'text'
function adapter({ payload }: { payload: Payload }) { function adapter({ payload }: { payload: Payload }) {
const migrationDir = findMigrationDir(args.migrationDir) const migrationDir = findMigrationDir(args.migrationDir)
const idType = args.idType || 'serial'
return createDatabaseAdapter<PostgresAdapter>({ return createDatabaseAdapter<PostgresAdapter>({
name: 'postgres', name: 'postgres',
@@ -55,7 +58,7 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter>
drizzle: undefined, drizzle: undefined,
enums: {}, enums: {},
fieldConstraints: {}, fieldConstraints: {},
idType, idType: postgresIDType,
localesSuffix: args.localesSuffix || '_locales', localesSuffix: args.localesSuffix || '_locales',
logger: args.logger, logger: args.logger,
pgSchema: undefined, pgSchema: undefined,
@@ -74,15 +77,13 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter>
beginTransaction, beginTransaction,
commitTransaction, commitTransaction,
connect, connect,
count,
create, create,
createGlobal, createGlobal,
createGlobalVersion, createGlobalVersion,
createMigration, createMigration,
createVersion, createVersion,
/** defaultIDType: payloadIDType,
* This represents how a default ID is treated in Payload as were a field type
*/
defaultIDType: idType === 'serial' ? 'number' : 'text',
deleteMany, deleteMany,
deleteOne, deleteOne,
deleteVersions, deleteVersions,
@@ -111,7 +112,7 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter>
} }
return { return {
defaultIDType: 'number', defaultIDType: payloadIDType,
init: adapter, init: adapter,
} }
} }

View File

@@ -225,6 +225,85 @@ export const getTableColumnFromPath = ({
}) })
} }
case 'select': {
if (field.hasMany) {
newTableName = getTableName({
adapter,
config: field,
parentTableName: `${tableName}_${tableNameSuffix}`,
prefix: `${tableName}_${tableNameSuffix}`,
})
if (locale && field.localized && adapter.payload.config.localization) {
joins[newTableName] = and(
eq(adapter.tables[tableName].id, adapter.tables[newTableName].parent),
eq(adapter.tables[newTableName]._locale, locale),
)
if (locale !== 'all') {
constraints.push({
columnName: '_locale',
table: adapter.tables[newTableName],
value: locale,
})
}
} else {
joins[newTableName] = eq(
adapter.tables[tableName].id,
adapter.tables[newTableName].parent,
)
}
return {
columnName: 'value',
constraints,
field,
table: adapter.tables[newTableName],
}
}
break
}
case 'text':
case 'number': {
if (field.hasMany) {
let tableType = 'texts'
let columnName = 'text'
if (field.type === 'number') {
tableType = 'numbers'
columnName = 'number'
}
newTableName = `${tableName}_${tableType}`
const joinConstraints = [
eq(adapter.tables[tableName].id, adapter.tables[newTableName].parent),
eq(adapter.tables[newTableName].path, `${constraintPath}${field.name}`),
]
if (locale && field.localized && adapter.payload.config.localization) {
joins[newTableName] = and(
...joinConstraints,
eq(adapter.tables[newTableName]._locale, locale),
)
if (locale !== 'all') {
constraints.push({
columnName: 'locale',
table: adapter.tables[newTableName],
value: locale,
})
}
} else {
joins[newTableName] = and(...joinConstraints)
}
return {
columnName,
constraints,
field,
table: adapter.tables[newTableName],
}
}
break
}
case 'array': { case 'array': {
newTableName = getTableName({ newTableName = getTableName({
adapter, adapter,
@@ -485,43 +564,41 @@ export const getTableColumnFromPath = ({
value, value,
}) })
} }
}
default: { if (fieldAffectsData(field)) {
if (fieldAffectsData(field)) { if (field.localized && adapter.payload.config.localization) {
if (field.localized && adapter.payload.config.localization) { // If localized, we go to localized table and set aliasTable to undefined
// If localized, we go to localized table and set aliasTable to undefined // so it is not picked up below to be used as targetTable
// so it is not picked up below to be used as targetTable newTableName = `${tableName}${adapter.localesSuffix}`
newTableName = `${tableName}${adapter.localesSuffix}`
const parentTable = aliasTable || adapter.tables[tableName] const parentTable = aliasTable || adapter.tables[tableName]
joins[newTableName] = eq(parentTable.id, adapter.tables[newTableName]._parentID) joins[newTableName] = eq(parentTable.id, adapter.tables[newTableName]._parentID)
aliasTable = undefined aliasTable = undefined
if (locale !== 'all') { if (locale !== 'all') {
constraints.push({ constraints.push({
columnName: '_locale', columnName: '_locale',
table: adapter.tables[newTableName], table: adapter.tables[newTableName],
value: locale, value: locale,
}) })
}
}
const targetTable = aliasTable || adapter.tables[newTableName]
selectFields[`${newTableName}.${columnPrefix}${field.name}`] =
targetTable[`${columnPrefix}${field.name}`]
return {
columnName: `${columnPrefix}${field.name}`,
constraints,
field,
pathSegments,
table: targetTable,
}
} }
} }
const targetTable = aliasTable || adapter.tables[newTableName]
selectFields[`${newTableName}.${columnPrefix}${field.name}`] =
targetTable[`${columnPrefix}${field.name}`]
return {
columnName: `${columnPrefix}${field.name}`,
constraints,
field,
pathSegments,
table: targetTable,
}
} }
} }

View File

@@ -1,6 +1,7 @@
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */
import type { Relation } from 'drizzle-orm' import type { Relation } from 'drizzle-orm'
import type { import type {
ForeignKeyBuilder,
IndexBuilder, IndexBuilder,
PgColumnBuilder, PgColumnBuilder,
PgTableWithColumns, PgTableWithColumns,
@@ -9,8 +10,17 @@ import type {
import type { Field } from 'payload/types' import type { Field } from 'payload/types'
import { relations } from 'drizzle-orm' import { relations } from 'drizzle-orm'
import { index, integer, numeric, serial, timestamp, unique, varchar } from 'drizzle-orm/pg-core' import {
import { fieldAffectsData } from 'payload/types' foreignKey,
index,
integer,
numeric,
serial,
timestamp,
unique,
varchar,
} from 'drizzle-orm/pg-core'
import toSnakeCase from 'to-snake-case'
import type { GenericColumns, GenericTable, IDType, PostgresAdapter } from '../types.js' import type { GenericColumns, GenericTable, IDType, PostgresAdapter } from '../types.js'
@@ -19,10 +29,15 @@ import { parentIDColumnMap } from './parentIDColumnMap.js'
import { setColumnID } from './setColumnID.js' import { setColumnID } from './setColumnID.js'
import { traverseFields } from './traverseFields.js' import { traverseFields } from './traverseFields.js'
export type BaseExtraConfig = Record<
string,
(cols: GenericColumns) => ForeignKeyBuilder | IndexBuilder | UniqueConstraintBuilder
>
type Args = { type Args = {
adapter: PostgresAdapter adapter: PostgresAdapter
baseColumns?: Record<string, PgColumnBuilder> baseColumns?: Record<string, PgColumnBuilder>
baseExtraConfig?: Record<string, (cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder> baseExtraConfig?: BaseExtraConfig
buildNumbers?: boolean buildNumbers?: boolean
buildRelationships?: boolean buildRelationships?: boolean
buildTexts?: boolean buildTexts?: boolean
@@ -134,10 +149,12 @@ export const buildTable = ({
return config return config
}, {}) }, {})
return Object.entries(indexes).reduce((acc, [colName, func]) => { const result = Object.entries(indexes).reduce((acc, [colName, func]) => {
acc[colName] = func(cols) acc[colName] = func(cols)
return acc return acc
}, extraConfig) }, extraConfig)
return result
}) })
adapter.tables[tableName] = table adapter.tables[tableName] = table
@@ -146,9 +163,7 @@ export const buildTable = ({
const localeTableName = `${tableName}${adapter.localesSuffix}` const localeTableName = `${tableName}${adapter.localesSuffix}`
localesColumns.id = serial('id').primaryKey() localesColumns.id = serial('id').primaryKey()
localesColumns._locale = adapter.enums.enum__locales('_locale').notNull() localesColumns._locale = adapter.enums.enum__locales('_locale').notNull()
localesColumns._parentID = parentIDColumnMap[idColType]('_parent_id') localesColumns._parentID = parentIDColumnMap[idColType]('_parent_id').notNull()
.references(() => table.id, { onDelete: 'cascade' })
.notNull()
localesTable = adapter.pgSchema.table(localeTableName, localesColumns, (cols) => { localesTable = adapter.pgSchema.table(localeTableName, localesColumns, (cols) => {
return Object.entries(localesIndexes).reduce( return Object.entries(localesIndexes).reduce(
@@ -161,6 +176,11 @@ export const buildTable = ({
cols._locale, cols._locale,
cols._parentID, cols._parentID,
), ),
_parentIdFk: foreignKey({
name: `${localeTableName}_parent_id_fk`,
columns: [cols._parentID],
foreignColumns: [table.id],
}).onDelete('cascade'),
}, },
) )
}) })
@@ -182,9 +202,7 @@ export const buildTable = ({
const columns: Record<string, PgColumnBuilder> = { const columns: Record<string, PgColumnBuilder> = {
id: serial('id').primaryKey(), id: serial('id').primaryKey(),
order: integer('order').notNull(), order: integer('order').notNull(),
parent: parentIDColumnMap[idColType]('parent_id') parent: parentIDColumnMap[idColType]('parent_id').notNull(),
.references(() => table.id, { onDelete: 'cascade' })
.notNull(),
path: varchar('path').notNull(), path: varchar('path').notNull(),
text: varchar('text'), text: varchar('text'),
} }
@@ -194,19 +212,24 @@ export const buildTable = ({
} }
textsTable = adapter.pgSchema.table(textsTableName, columns, (cols) => { textsTable = adapter.pgSchema.table(textsTableName, columns, (cols) => {
const indexes: Record<string, IndexBuilder> = { const config: Record<string, ForeignKeyBuilder | IndexBuilder> = {
orderParentIdx: index(`${textsTableName}_order_parent_idx`).on(cols.order, cols.parent), orderParentIdx: index(`${textsTableName}_order_parent_idx`).on(cols.order, cols.parent),
parentFk: foreignKey({
name: `${textsTableName}_parent_fk`,
columns: [cols.parent],
foreignColumns: [table.id],
}).onDelete('cascade'),
} }
if (hasManyTextField === 'index') { if (hasManyTextField === 'index') {
indexes.text_idx = index(`${textsTableName}_text_idx`).on(cols.text) config.text_idx = index(`${textsTableName}_text_idx`).on(cols.text)
} }
if (hasLocalizedManyTextField) { if (hasLocalizedManyTextField) {
indexes.localeParent = index(`${textsTableName}_locale_parent`).on(cols.locale, cols.parent) config.localeParent = index(`${textsTableName}_locale_parent`).on(cols.locale, cols.parent)
} }
return indexes return config
}) })
adapter.tables[textsTableName] = textsTable adapter.tables[textsTableName] = textsTable
@@ -227,9 +250,7 @@ export const buildTable = ({
id: serial('id').primaryKey(), id: serial('id').primaryKey(),
number: numeric('number'), number: numeric('number'),
order: integer('order').notNull(), order: integer('order').notNull(),
parent: parentIDColumnMap[idColType]('parent_id') parent: parentIDColumnMap[idColType]('parent_id').notNull(),
.references(() => table.id, { onDelete: 'cascade' })
.notNull(),
path: varchar('path').notNull(), path: varchar('path').notNull(),
} }
@@ -238,22 +259,27 @@ export const buildTable = ({
} }
numbersTable = adapter.pgSchema.table(numbersTableName, columns, (cols) => { numbersTable = adapter.pgSchema.table(numbersTableName, columns, (cols) => {
const indexes: Record<string, IndexBuilder> = { const config: Record<string, ForeignKeyBuilder | IndexBuilder> = {
orderParentIdx: index(`${numbersTableName}_order_parent_idx`).on(cols.order, cols.parent), orderParentIdx: index(`${numbersTableName}_order_parent_idx`).on(cols.order, cols.parent),
parentFk: foreignKey({
name: `${numbersTableName}_parent_fk`,
columns: [cols.parent],
foreignColumns: [table.id],
}).onDelete('cascade'),
} }
if (hasManyNumberField === 'index') { if (hasManyNumberField === 'index') {
indexes.numberIdx = index(`${numbersTableName}_number_idx`).on(cols.number) config.numberIdx = index(`${numbersTableName}_number_idx`).on(cols.number)
} }
if (hasLocalizedManyNumberField) { if (hasLocalizedManyNumberField) {
indexes.localeParent = index(`${numbersTableName}_locale_parent`).on( config.localeParent = index(`${numbersTableName}_locale_parent`).on(
cols.locale, cols.locale,
cols.parent, cols.parent,
) )
} }
return indexes return config
}) })
adapter.tables[numbersTableName] = numbersTable adapter.tables[numbersTableName] = numbersTable
@@ -273,9 +299,7 @@ export const buildTable = ({
const relationshipColumns: Record<string, PgColumnBuilder> = { const relationshipColumns: Record<string, PgColumnBuilder> = {
id: serial('id').primaryKey(), id: serial('id').primaryKey(),
order: integer('order'), order: integer('order'),
parent: parentIDColumnMap[idColType]('parent_id') parent: parentIDColumnMap[idColType]('parent_id').notNull(),
.references(() => table.id, { onDelete: 'cascade' })
.notNull(),
path: varchar('path').notNull(), path: varchar('path').notNull(),
} }
@@ -283,6 +307,10 @@ export const buildTable = ({
relationshipColumns.locale = adapter.enums.enum__locales('locale') relationshipColumns.locale = adapter.enums.enum__locales('locale')
} }
const relationExtraConfig: BaseExtraConfig = {}
const relationshipsTableName = `${tableName}${adapter.relationshipsSuffix}`
relationships.forEach((relationTo) => { relationships.forEach((relationTo) => {
const relationshipConfig = adapter.payload.collections[relationTo].config const relationshipConfig = adapter.payload.collections[relationTo].config
const formattedRelationTo = getTableName({ const formattedRelationTo = getTableName({
@@ -300,20 +328,38 @@ export const buildTable = ({
relationshipColumns[`${relationTo}ID`] = parentIDColumnMap[colType]( relationshipColumns[`${relationTo}ID`] = parentIDColumnMap[colType](
`${formattedRelationTo}_id`, `${formattedRelationTo}_id`,
).references(() => adapter.tables[formattedRelationTo].id, { onDelete: 'cascade' }) )
})
const relationshipsTableName = `${tableName}${adapter.relationshipsSuffix}` relationExtraConfig[`${relationTo}IdFk`] = (cols) =>
foreignKey({
name: `${relationshipsTableName}_${toSnakeCase(relationTo)}_fk`,
columns: [cols[`${relationTo}ID`]],
foreignColumns: [adapter.tables[formattedRelationTo].id],
}).onDelete('cascade')
})
relationshipsTable = adapter.pgSchema.table( relationshipsTable = adapter.pgSchema.table(
relationshipsTableName, relationshipsTableName,
relationshipColumns, relationshipColumns,
(cols) => { (cols) => {
const result: Record<string, unknown> = { const result: Record<string, ForeignKeyBuilder | IndexBuilder> = Object.entries(
order: index(`${relationshipsTableName}_order_idx`).on(cols.order), relationExtraConfig,
parentIdx: index(`${relationshipsTableName}_parent_idx`).on(cols.parent), ).reduce(
pathIdx: index(`${relationshipsTableName}_path_idx`).on(cols.path), (config, [key, func]) => {
} config[key] = func(cols)
return config
},
{
order: index(`${relationshipsTableName}_order_idx`).on(cols.order),
parentFk: foreignKey({
name: `${relationshipsTableName}_parent_fk`,
columns: [cols.parent],
foreignColumns: [table.id],
}).onDelete('cascade'),
parentIdx: index(`${relationshipsTableName}_parent_idx`).on(cols.parent),
pathIdx: index(`${relationshipsTableName}_path_idx`).on(cols.path),
},
)
if (hasLocalizedRelationshipField) { if (hasLocalizedRelationshipField) {
result.localeIdx = index(`${relationshipsTableName}_locale_idx`).on(cols.locale) result.localeIdx = index(`${relationshipsTableName}_locale_idx`).on(cols.locale)

View File

@@ -1,6 +1,6 @@
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */
import type { Relation } from 'drizzle-orm' import type { Relation } from 'drizzle-orm'
import type { IndexBuilder, PgColumnBuilder, UniqueConstraintBuilder } from 'drizzle-orm/pg-core' import type { IndexBuilder, PgColumnBuilder } from 'drizzle-orm/pg-core'
import type { Field, TabAsField } from 'payload/types' import type { Field, TabAsField } from 'payload/types'
import { relations } from 'drizzle-orm' import { relations } from 'drizzle-orm'
@@ -9,6 +9,7 @@ import {
PgUUIDBuilder, PgUUIDBuilder,
PgVarcharBuilder, PgVarcharBuilder,
boolean, boolean,
foreignKey,
index, index,
integer, integer,
jsonb, jsonb,
@@ -23,6 +24,7 @@ import { fieldAffectsData, optionIsObject } from 'payload/types'
import toSnakeCase from 'to-snake-case' import toSnakeCase from 'to-snake-case'
import type { GenericColumns, IDType, PostgresAdapter } from '../types.js' import type { GenericColumns, IDType, PostgresAdapter } from '../types.js'
import type { BaseExtraConfig } from './build.js'
import { hasLocalesTable } from '../utilities/hasLocalesTable.js' import { hasLocalesTable } from '../utilities/hasLocalesTable.js'
import { buildTable } from './build.js' import { buildTable } from './build.js'
@@ -228,7 +230,6 @@ export const traverseFields = ({
prefix: `enum_${newTableName}_`, prefix: `enum_${newTableName}_`,
target: 'enumName', target: 'enumName',
throwValidationError, throwValidationError,
versions,
}) })
adapter.enums[enumName] = pgEnum( adapter.enums[enumName] = pgEnum(
@@ -249,21 +250,21 @@ export const traverseFields = ({
parentTableName: newTableName, parentTableName: newTableName,
prefix: `${newTableName}_`, prefix: `${newTableName}_`,
throwValidationError, throwValidationError,
versions,
}) })
const baseColumns: Record<string, PgColumnBuilder> = { const baseColumns: Record<string, PgColumnBuilder> = {
order: integer('order').notNull(), order: integer('order').notNull(),
parent: parentIDColumnMap[parentIDColType]('parent_id') parent: parentIDColumnMap[parentIDColType]('parent_id').notNull(),
.references(() => adapter.tables[parentTableName].id, { onDelete: 'cascade' })
.notNull(),
value: adapter.enums[enumName]('value'), value: adapter.enums[enumName]('value'),
} }
const baseExtraConfig: Record< const baseExtraConfig: BaseExtraConfig = {
string,
(cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder
> = {
orderIdx: (cols) => index(`${selectTableName}_order_idx`).on(cols.order), orderIdx: (cols) => index(`${selectTableName}_order_idx`).on(cols.order),
parentFk: (cols) =>
foreignKey({
name: `${selectTableName}_parent_fk`,
columns: [cols.parent],
foreignColumns: [adapter.tables[parentTableName].id],
}),
parentIdx: (cols) => index(`${selectTableName}_parent_idx`).on(cols.parent), parentIdx: (cols) => index(`${selectTableName}_parent_idx`).on(cols.parent),
} }
@@ -323,18 +324,20 @@ export const traverseFields = ({
prefix: `${newTableName}_`, prefix: `${newTableName}_`,
throwValidationError, throwValidationError,
}) })
const baseColumns: Record<string, PgColumnBuilder> = { const baseColumns: Record<string, PgColumnBuilder> = {
_order: integer('_order').notNull(), _order: integer('_order').notNull(),
_parentID: parentIDColumnMap[parentIDColType]('_parent_id') _parentID: parentIDColumnMap[parentIDColType]('_parent_id').notNull(),
.references(() => adapter.tables[parentTableName].id, { onDelete: 'cascade' })
.notNull(),
} }
const baseExtraConfig: Record< const baseExtraConfig: BaseExtraConfig = {
string,
(cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder
> = {
_orderIdx: (cols) => index(`${arrayTableName}_order_idx`).on(cols._order), _orderIdx: (cols) => index(`${arrayTableName}_order_idx`).on(cols._order),
_parentIDFk: (cols) =>
foreignKey({
name: `${arrayTableName}_parent_id_fk`,
columns: [cols['_parentID']],
foreignColumns: [adapter.tables[parentTableName].id],
}).onDelete('cascade'),
_parentIDIdx: (cols) => index(`${arrayTableName}_parent_id_idx`).on(cols._parentID), _parentIDIdx: (cols) => index(`${arrayTableName}_parent_id_idx`).on(cols._parentID),
} }
@@ -412,18 +415,19 @@ export const traverseFields = ({
if (!adapter.tables[blockTableName]) { if (!adapter.tables[blockTableName]) {
const baseColumns: Record<string, PgColumnBuilder> = { const baseColumns: Record<string, PgColumnBuilder> = {
_order: integer('_order').notNull(), _order: integer('_order').notNull(),
_parentID: parentIDColumnMap[rootTableIDColType]('_parent_id') _parentID: parentIDColumnMap[rootTableIDColType]('_parent_id').notNull(),
.references(() => adapter.tables[rootTableName].id, { onDelete: 'cascade' })
.notNull(),
_path: text('_path').notNull(), _path: text('_path').notNull(),
} }
const baseExtraConfig: Record< const baseExtraConfig: BaseExtraConfig = {
string,
(cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder
> = {
_orderIdx: (cols) => index(`${blockTableName}_order_idx`).on(cols._order), _orderIdx: (cols) => index(`${blockTableName}_order_idx`).on(cols._order),
_parentIDIdx: (cols) => index(`${blockTableName}_parent_id_idx`).on(cols._parentID), _parentIDIdx: (cols) => index(`${blockTableName}_parent_id_idx`).on(cols._parentID),
_parentIdFk: (cols) =>
foreignKey({
name: `${blockTableName}_parent_id_fk`,
columns: [cols._parentID],
foreignColumns: [adapter.tables[rootTableName].id],
}).onDelete('cascade'),
_pathIdx: (cols) => index(`${blockTableName}_path_idx`).on(cols._path), _pathIdx: (cols) => index(`${blockTableName}_path_idx`).on(cols._path),
} }
@@ -493,6 +497,7 @@ export const traverseFields = ({
localized: field.localized, localized: field.localized,
rootTableName, rootTableName,
table: adapter.tables[blockTableName], table: adapter.tables[blockTableName],
tableLocales: adapter.tables[`${blockTableName}${adapter.localesSuffix}`],
}) })
} }
adapter.blockTableNames[`${rootTableName}.${toSnakeCase(block.slug)}`] = blockTableName adapter.blockTableNames[`${rootTableName}.${toSnakeCase(block.slug)}`] = blockTableName
@@ -658,7 +663,7 @@ export const traverseFields = ({
indexes, indexes,
localesColumns, localesColumns,
localesIndexes, localesIndexes,
newTableName: parentTableName, newTableName,
parentTableName, parentTableName,
relationsToBuild, relationsToBuild,
relationships, relationships,

View File

@@ -10,9 +10,13 @@ type Args = {
localized: boolean localized: boolean
rootTableName: string rootTableName: string
table: GenericTable table: GenericTable
tableLocales?: GenericTable
} }
const getFlattenedFieldNames = (fields: Field[], prefix: string = ''): string[] => { const getFlattenedFieldNames = (
fields: Field[],
prefix: string = '',
): { localized?: boolean; name: string }[] => {
return fields.reduce((fieldsToUse, field) => { return fields.reduce((fieldsToUse, field) => {
let fieldPrefix = prefix let fieldPrefix = prefix
@@ -24,7 +28,7 @@ const getFlattenedFieldNames = (fields: Field[], prefix: string = ''): string[]
} }
if (fieldHasSubFields(field)) { if (fieldHasSubFields(field)) {
fieldPrefix = 'name' in field ? `${prefix}${field.name}.` : prefix fieldPrefix = 'name' in field ? `${prefix}${field.name}_` : prefix
return [...fieldsToUse, ...getFlattenedFieldNames(field.fields, fieldPrefix)] return [...fieldsToUse, ...getFlattenedFieldNames(field.fields, fieldPrefix)]
} }
@@ -32,7 +36,7 @@ const getFlattenedFieldNames = (fields: Field[], prefix: string = ''): string[]
return [ return [
...fieldsToUse, ...fieldsToUse,
...field.tabs.reduce((tabFields, tab) => { ...field.tabs.reduce((tabFields, tab) => {
fieldPrefix = 'name' in tab ? `${prefix}.${tab.name}` : prefix fieldPrefix = 'name' in tab ? `${prefix}_${tab.name}` : prefix
return [ return [
...tabFields, ...tabFields,
...(tabHasName(tab) ...(tabHasName(tab)
@@ -44,7 +48,13 @@ const getFlattenedFieldNames = (fields: Field[], prefix: string = ''): string[]
} }
if (fieldAffectsData(field)) { if (fieldAffectsData(field)) {
return [...fieldsToUse, `${fieldPrefix?.replace('.', '_') || ''}${field.name}`] return [
...fieldsToUse,
{
name: `${fieldPrefix}${field.name}`,
localized: field.localized,
},
]
} }
return fieldsToUse return fieldsToUse
@@ -56,22 +66,30 @@ export const validateExistingBlockIsIdentical = ({
localized, localized,
rootTableName, rootTableName,
table, table,
tableLocales,
}: Args): void => { }: Args): void => {
const fieldNames = getFlattenedFieldNames(block.fields) const fieldNames = getFlattenedFieldNames(block.fields)
const missingField = const missingField =
// ensure every field from the config is in the matching table // ensure every field from the config is in the matching table
fieldNames.find((name) => Object.keys(table).indexOf(name) === -1) || fieldNames.find(({ name, localized }) => {
const fieldTable = localized && tableLocales ? tableLocales : table
return Object.keys(fieldTable).indexOf(name) === -1
}) ||
// ensure every table column is matched for every field from the config // ensure every table column is matched for every field from the config
Object.keys(table).find((fieldName) => { Object.keys(table).find((fieldName) => {
if (!['_locale', '_order', '_parentID', '_path', '_uuid'].includes(fieldName)) { if (!['_locale', '_order', '_parentID', '_path', '_uuid'].includes(fieldName)) {
return fieldNames.indexOf(fieldName) === -1 return fieldNames.findIndex((field) => field.name) === -1
} }
}) })
if (missingField) { if (missingField) {
throw new InvalidConfiguration( throw new InvalidConfiguration(
`The table ${rootTableName} has multiple blocks with slug ${block.slug}, but the schemas do not match. One block includes the field ${missingField}, while the other block does not.`, `The table ${rootTableName} has multiple blocks with slug ${
block.slug
}, but the schemas do not match. One block includes the field ${
typeof missingField === 'string' ? missingField : missingField.name
}, while the other block does not.`,
) )
} }

View File

@@ -0,0 +1,10 @@
.tmp
**/.git
**/.hg
**/.pnp.*
**/.svn
**/.yarn/**
**/build
**/dist/**
**/node_modules
**/temp

View File

@@ -0,0 +1,7 @@
/** @type {import('eslint').Linter.Config} */
module.exports = {
parserOptions: {
project: ['./tsconfig.json'],
tsconfigRootDir: __dirname,
},
}

View File

@@ -0,0 +1,10 @@
.tmp
**/.git
**/.hg
**/.pnp.*
**/.svn
**/.yarn/**
**/build
**/dist/**
**/node_modules
**/temp

View File

@@ -0,0 +1,15 @@
{
"$schema": "https://json.schemastore.org/swcrc",
"sourceMaps": true,
"jsc": {
"target": "esnext",
"parser": {
"syntax": "typescript",
"tsx": true,
"dts": true
}
},
"module": {
"type": "es6"
}
}

View File

@@ -0,0 +1,22 @@
MIT License
Copyright (c) 2018-2022 Payload CMS, LLC <info@payloadcms.com>
Portions Copyright (c) Meta Platforms, Inc. and affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1 @@
# Nodemailer Email Adapter

View File

@@ -0,0 +1,59 @@
{
"name": "@payloadcms/email-nodemailer",
"version": "3.0.0-beta.13",
"description": "Payload Nodemailer Email Adapter",
"repository": {
"type": "git",
"url": "https://github.com/payloadcms/payload.git",
"directory": "packages/email-nodemailer"
},
"license": "MIT",
"homepage": "https://payloadcms.com",
"author": "Payload CMS, Inc.",
"main": "./src/index.ts",
"types": "./src/index.ts",
"type": "module",
"scripts": {
"build": "pnpm build:swc && pnpm build:types",
"build:swc": "swc ./src -d ./dist --config-file .swcrc",
"build:types": "tsc --emitDeclarationOnly --outDir dist",
"build:clean": "find . \\( -type d \\( -name build -o -name dist -o -name .cache \\) -o -type f -name tsconfig.tsbuildinfo \\) -exec rm -rf {} + && pnpm build",
"clean": "rimraf {dist,*.tsbuildinfo}",
"prepublishOnly": "pnpm clean && pnpm turbo build"
},
"dependencies": {
"nodemailer": "6.9.10"
},
"peerDependencies": {
"payload": "workspace:*"
},
"exports": {
".": {
"import": "./src/index.ts",
"require": "./src/index.ts",
"types": "./src/index.ts"
}
},
"publishConfig": {
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"main": "./dist/index.js",
"registry": "https://registry.npmjs.org/",
"types": "./dist/index.d.ts"
},
"engines": {
"node": ">=18.20.2"
},
"files": [
"dist"
],
"devDependencies": {
"payload": "workspace:*",
"@types/nodemailer": "6.4.14"
}
}

View File

@@ -0,0 +1,123 @@
/* eslint-disable no-console */
import type { Transporter } from 'nodemailer'
import type SMTPConnection from 'nodemailer/lib/smtp-connection'
import type { EmailAdapter } from 'payload/config'
import nodemailer from 'nodemailer'
import { InvalidConfiguration } from 'payload/errors'
export type NodemailerAdapterArgs = {
defaultFromAddress: string
defaultFromName: string
skipVerify?: boolean
transport?: Transporter
transportOptions?: SMTPConnection.Options
}
type NodemailerAdapter = EmailAdapter<unknown>
/**
* Creates an email adapter using nodemailer
*
* If no email configuration is provided, an ethereal email test account is returned
*/
export const nodemailerAdapter = async (
args?: NodemailerAdapterArgs,
): Promise<NodemailerAdapter> => {
const { defaultFromAddress, defaultFromName, transport } = await buildEmail(args)
const adapter: NodemailerAdapter = () => ({
defaultFromAddress,
defaultFromName,
sendEmail: async (message) => {
return await transport.sendMail({
from: `${defaultFromName} <${defaultFromAddress}>`,
...message,
})
},
})
return adapter
}
async function buildEmail(emailConfig?: NodemailerAdapterArgs): Promise<{
defaultFromAddress: string
defaultFromName: string
transport: Transporter
}> {
if (!emailConfig) {
const transport = await createMockAccount(emailConfig)
if (!transport) throw new InvalidConfiguration('Unable to create Nodemailer test account.')
return {
defaultFromAddress: 'info@payloadcms.com',
defaultFromName: 'Payload',
transport,
}
}
// Create or extract transport
let transport: Transporter
if ('transport' in emailConfig && emailConfig.transport) {
;({ transport } = emailConfig)
} else if ('transportOptions' in emailConfig && emailConfig.transportOptions) {
transport = nodemailer.createTransport(emailConfig.transportOptions)
} else {
transport = await createMockAccount(emailConfig)
}
if (emailConfig.skipVerify !== false) {
await verifyTransport(transport)
}
return {
defaultFromAddress: emailConfig.defaultFromAddress,
defaultFromName: emailConfig.defaultFromName,
transport,
}
}
async function verifyTransport(transport: Transporter) {
try {
await transport.verify()
} catch (err: unknown) {
console.error({ err, msg: 'Error verifying Nodemailer transport.' })
}
}
/**
* Use ethereal.email to create a mock email account
*/
async function createMockAccount(emailConfig?: NodemailerAdapterArgs) {
try {
const etherealAccount = await nodemailer.createTestAccount()
const smtpOptions = {
...(emailConfig || {}),
auth: {
pass: etherealAccount.pass,
user: etherealAccount.user,
},
fromAddress: emailConfig?.defaultFromAddress,
fromName: emailConfig?.defaultFromName,
host: 'smtp.ethereal.email',
port: 587,
secure: false,
}
const transport = nodemailer.createTransport(smtpOptions)
const { pass, user, web } = etherealAccount
console.info('E-mail configured with ethereal.email test account. ')
console.info(`Log into mock email provider at ${web}`)
console.info(`Mock email account username: ${user}`)
console.info(`Mock email account password: ${pass}`)
return transport
} catch (err: unknown) {
if (err instanceof Error) {
console.error({ err, msg: 'There was a problem setting up the mock email handler' })
throw new InvalidConfiguration(
`Unable to create Nodemailer test account. Error: ${err.message}`,
)
}
throw new InvalidConfiguration('Unable to create Nodemailer test account.')
}
}

View File

@@ -0,0 +1,19 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"composite": true, // Make sure typescript knows that this module depends on their references
"noEmit": false /* Do not emit outputs. */,
"emitDeclarationOnly": true,
"outDir": "./dist" /* Specify an output folder for all emitted files. */,
"rootDir": "./src" /* Specify the root folder within your source files. */,
"strict": true,
},
"exclude": [
"dist",
"node_modules",
],
"include": ["src/**/*.ts", "src/**/*.tsx", "src/**/*.d.ts", "src/**/*.json"],
"references": [
{ "path": "../payload" },
]
}

View File

@@ -1,6 +1,6 @@
{ {
"name": "@payloadcms/graphql", "name": "@payloadcms/graphql",
"version": "3.0.0-beta.10", "version": "3.0.0-beta.13",
"main": "./src/index.ts", "main": "./src/index.ts",
"types": "./src/index.d.ts", "types": "./src/index.d.ts",
"type": "module", "type": "module",
@@ -27,20 +27,17 @@
"devDependencies": { "devDependencies": {
"@payloadcms/eslint-config": "workspace:*", "@payloadcms/eslint-config": "workspace:*",
"@types/pluralize": "^0.0.33", "@types/pluralize": "^0.0.33",
"graphql-http": "^1.22.0",
"payload": "workspace:*", "payload": "workspace:*",
"ts-essentials": "7.0.3" "ts-essentials": "7.0.3"
}, },
"dependencies": { "dependencies": {
"graphql": "16.8.1",
"graphql-http": "^1.22.0",
"graphql-playground-html": "1.6.30",
"graphql-query-complexity": "0.12.0",
"graphql-scalars": "1.22.2", "graphql-scalars": "1.22.2",
"graphql-type-json": "0.3.2",
"pluralize": "8.0.0" "pluralize": "8.0.0"
}, },
"peerDependencies": { "peerDependencies": {
"payload": "workspace:*" "payload": "workspace:*",
"graphql": "^16.8.1"
}, },
"publishConfig": { "publishConfig": {
"main": "./dist/index.js", "main": "./dist/index.js",

View File

@@ -4,12 +4,12 @@ import type { GraphQLInfo } from 'payload/config'
import type { SanitizedConfig } from 'payload/types' import type { SanitizedConfig } from 'payload/types'
import * as GraphQL from 'graphql' import * as GraphQL from 'graphql'
import { import {
createComplexityRule, createComplexityRule,
fieldExtensionsEstimator, fieldExtensionsEstimator,
simpleEstimator, simpleEstimator,
} from 'graphql-query-complexity' } from './packages/graphql-query-complexity/index.js'
import accessResolver from './resolvers/auth/access.js' import accessResolver from './resolvers/auth/access.js'
import buildFallbackLocaleInputType from './schema/buildFallbackLocaleInputType.js' import buildFallbackLocaleInputType from './schema/buildFallbackLocaleInputType.js'
import buildLocaleInputType from './schema/buildLocaleInputType.js' import buildLocaleInputType from './schema/buildLocaleInputType.js'
@@ -18,10 +18,10 @@ import initCollections from './schema/initCollections.js'
import initGlobals from './schema/initGlobals.js' import initGlobals from './schema/initGlobals.js'
import { wrapCustomFields } from './utilities/wrapCustomResolver.js' import { wrapCustomFields } from './utilities/wrapCustomResolver.js'
export async function configToSchema(config: SanitizedConfig): Promise<{ export function configToSchema(config: SanitizedConfig): {
schema: GraphQL.GraphQLSchema schema: GraphQL.GraphQLSchema
validationRules: (args: OperationArgs<any>) => GraphQL.ValidationRule[] validationRules: (args: OperationArgs<any>) => GraphQL.ValidationRule[]
}> { } {
const collections = config.collections.reduce((acc, collection) => { const collections = config.collections.reduce((acc, collection) => {
acc[collection.slug] = { acc[collection.slug] = {
config: collection, config: collection,

View File

@@ -0,0 +1,455 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable @typescript-eslint/no-use-before-define */
/* eslint-disable @typescript-eslint/no-unsafe-enum-comparison */
/**
* Created by Ivo Meißner on 28.07.17.
*/
import type {
DocumentNode,
FieldNode,
FragmentDefinitionNode,
FragmentSpreadNode,
GraphQLCompositeType,
GraphQLDirective,
GraphQLField,
GraphQLFieldMap,
GraphQLNamedType,
GraphQLSchema,
GraphQLUnionType,
InlineFragmentNode,
OperationDefinitionNode} from 'graphql';
import {
GraphQLError,
GraphQLInterfaceType,
GraphQLObjectType,
Kind,
TypeInfo,
ValidationContext,
getNamedType,
isAbstractType,
isCompositeType,
visit,
visitWithTypeInfo,
} from 'graphql'
import {
getArgumentValues,
getDirectiveValues,
getVariableValues,
} from 'graphql/execution/values.js'
export type ComplexityEstimatorArgs = {
args: { [key: string]: any }
childComplexity: number
context?: Record<string, any>
field: GraphQLField<any, any>
node: FieldNode
type: GraphQLCompositeType
}
export type ComplexityEstimator = (options: ComplexityEstimatorArgs) => number | void
// Complexity can be anything that is supported by the configured estimators
export type Complexity = any
// Map of complexities for possible types (of Union, Interface types)
type ComplexityMap = {
[typeName: string]: number
}
export interface QueryComplexityOptions {
// Pass request context to the estimators via estimationContext
context?: Record<string, any>
// The query variables. This is needed because the variables are not available
// Optional function to create a custom error
createError?: (max: number, actual: number) => GraphQLError
// An array of complexity estimators to use for estimating the complexity
estimators: Array<ComplexityEstimator>
// Optional callback function to retrieve the determined query complexity
// Will be invoked whether the query is rejected or not
// The maximum allowed query complexity, queries above this threshold will be rejected
maximumComplexity: number
// This can be used for logging or to implement rate limiting
onComplete?: (complexity: number) => void
// specify operation name only when pass multi-operation documents
operationName?: string
// in the visitor of the graphql-js library
variables?: Record<string, any>
}
function queryComplexityMessage(max: number, actual: number): string {
return `The query exceeds the maximum complexity of ${max}. ` + `Actual complexity is ${actual}`
}
export function getComplexity(options: {
context?: Record<string, any>
estimators: ComplexityEstimator[]
operationName?: string
query: DocumentNode
schema: GraphQLSchema
variables?: Record<string, any>
}): number {
const typeInfo = new TypeInfo(options.schema)
const errors: GraphQLError[] = []
const context = new ValidationContext(options.schema, options.query, typeInfo, (error) =>
errors.push(error),
)
const visitor = new QueryComplexity(context, {
// Maximum complexity does not matter since we're only interested in the calculated complexity.
context: options.context,
estimators: options.estimators,
maximumComplexity: Infinity,
operationName: options.operationName,
variables: options.variables,
})
visit(options.query, visitWithTypeInfo(typeInfo, visitor))
// Throw first error if any
if (errors.length) {
throw errors.pop()
}
return visitor.complexity
}
export default class QueryComplexity {
OperationDefinition: Record<string, any>
complexity: number
context: ValidationContext
estimators: Array<ComplexityEstimator>
includeDirectiveDef: GraphQLDirective
options: QueryComplexityOptions
requestContext?: Record<string, any>
skipDirectiveDef: GraphQLDirective
variableValues: Record<string, any>
constructor(context: ValidationContext, options: QueryComplexityOptions) {
if (!(typeof options.maximumComplexity === 'number' && options.maximumComplexity > 0)) {
throw new Error('Maximum query complexity must be a positive number')
}
this.context = context
this.complexity = 0
this.options = options
this.includeDirectiveDef = this.context.getSchema().getDirective('include')
this.skipDirectiveDef = this.context.getSchema().getDirective('skip')
this.estimators = options.estimators
this.variableValues = {}
this.requestContext = options.context
this.OperationDefinition = {
enter: this.onOperationDefinitionEnter,
leave: this.onOperationDefinitionLeave,
}
}
createError(): GraphQLError {
if (typeof this.options.createError === 'function') {
return this.options.createError(this.options.maximumComplexity, this.complexity)
}
return new GraphQLError(queryComplexityMessage(this.options.maximumComplexity, this.complexity))
}
nodeComplexity(
node: FieldNode | FragmentDefinitionNode | InlineFragmentNode | OperationDefinitionNode,
typeDef: GraphQLInterfaceType | GraphQLObjectType | GraphQLUnionType,
): number {
if (node.selectionSet) {
let fields: GraphQLFieldMap<any, any> = {}
if (typeDef instanceof GraphQLObjectType || typeDef instanceof GraphQLInterfaceType) {
fields = typeDef.getFields()
}
// Determine all possible types of the current node
let possibleTypeNames: string[]
if (isAbstractType(typeDef)) {
possibleTypeNames = this.context
.getSchema()
.getPossibleTypes(typeDef)
.map((t) => t.name)
} else {
possibleTypeNames = [typeDef.name]
}
// Collect complexities for all possible types individually
const selectionSetComplexities: ComplexityMap = node.selectionSet.selections.reduce(
(
complexities: ComplexityMap,
childNode: FieldNode | FragmentSpreadNode | InlineFragmentNode,
): ComplexityMap => {
// let nodeComplexity = 0;
let innerComplexities = complexities
let includeNode = true
let skipNode = false
for (const directive of childNode.directives ?? []) {
const directiveName = directive.name.value
switch (directiveName) {
case 'include': {
const values = getDirectiveValues(
this.includeDirectiveDef,
childNode,
this.variableValues || {},
)
if (typeof values.if === 'boolean') {
includeNode = values.if
}
break
}
case 'skip': {
const values = getDirectiveValues(
this.skipDirectiveDef,
childNode,
this.variableValues || {},
)
if (typeof values.if === 'boolean') {
skipNode = values.if
}
break
}
}
}
if (!includeNode || skipNode) {
return complexities
}
switch (childNode.kind) {
case Kind.FIELD: {
const field = fields[childNode.name.value]
// Invalid field, should be caught by other validation rules
if (!field) {
break
}
const fieldType = getNamedType(field.type)
// Get arguments
let args: { [key: string]: any }
try {
args = getArgumentValues(field, childNode, this.variableValues || {})
} catch (e) {
this.context.reportError(e)
return complexities
}
// Check if we have child complexity
let childComplexity = 0
if (isCompositeType(fieldType)) {
childComplexity = this.nodeComplexity(childNode, fieldType)
}
// Run estimators one after another and return first valid complexity
// score
const estimatorArgs: ComplexityEstimatorArgs = {
type: typeDef,
args,
childComplexity,
context: this.requestContext,
field,
node: childNode,
}
const validScore = this.estimators.find((estimator) => {
const tmpComplexity = estimator(estimatorArgs)
if (typeof tmpComplexity === 'number' && !isNaN(tmpComplexity)) {
innerComplexities = addComplexities(
tmpComplexity,
complexities,
possibleTypeNames,
)
return true
}
return false
})
if (!validScore) {
this.context.reportError(
new GraphQLError(
`No complexity could be calculated for field ${typeDef.name}.${field.name}. ` +
'At least one complexity estimator has to return a complexity score.',
),
)
return complexities
}
break
}
case Kind.FRAGMENT_SPREAD: {
const fragment = this.context.getFragment(childNode.name.value)
// Unknown fragment, should be caught by other validation rules
if (!fragment) {
break
}
const fragmentType = this.context
.getSchema()
.getType(fragment.typeCondition.name.value)
// Invalid fragment type, ignore. Should be caught by other validation rules
if (!isCompositeType(fragmentType)) {
break
}
const nodeComplexity = this.nodeComplexity(fragment, fragmentType)
if (isAbstractType(fragmentType)) {
// Add fragment complexity for all possible types
innerComplexities = addComplexities(
nodeComplexity,
complexities,
this.context
.getSchema()
.getPossibleTypes(fragmentType)
.map((t) => t.name),
)
} else {
// Add complexity for object type
innerComplexities = addComplexities(nodeComplexity, complexities, [
fragmentType.name,
])
}
break
}
case Kind.INLINE_FRAGMENT: {
let inlineFragmentType: GraphQLNamedType = typeDef
if (childNode.typeCondition && childNode.typeCondition.name) {
inlineFragmentType = this.context
.getSchema()
.getType(childNode.typeCondition.name.value)
if (!isCompositeType(inlineFragmentType)) {
break
}
}
const nodeComplexity = this.nodeComplexity(childNode, inlineFragmentType)
if (isAbstractType(inlineFragmentType)) {
// Add fragment complexity for all possible types
innerComplexities = addComplexities(
nodeComplexity,
complexities,
this.context
.getSchema()
.getPossibleTypes(inlineFragmentType)
.map((t) => t.name),
)
} else {
// Add complexity for object type
innerComplexities = addComplexities(nodeComplexity, complexities, [
inlineFragmentType.name,
])
}
break
}
default: {
innerComplexities = addComplexities(
this.nodeComplexity(childNode, typeDef),
complexities,
possibleTypeNames,
)
break
}
}
return innerComplexities
},
{},
)
// Only return max complexity of all possible types
if (!selectionSetComplexities) {
return NaN
}
return Math.max(...Object.values(selectionSetComplexities), 0)
}
return 0
}
onOperationDefinitionEnter(operation: OperationDefinitionNode): void {
if (
typeof this.options.operationName === 'string' &&
this.options.operationName !== operation.name.value
) {
return
}
// Get variable values from variables that are passed from options, merged
// with default values defined in the operation
const { coerced, errors } = getVariableValues(
this.context.getSchema(),
// We have to create a new array here because input argument is not readonly in graphql ~14.6.0
operation.variableDefinitions ? [...operation.variableDefinitions] : [],
this.options.variables ?? {},
)
if (errors && errors.length) {
// We have input validation errors, report errors and abort
errors.forEach((error) => this.context.reportError(error))
return
}
this.variableValues = coerced
switch (operation.operation) {
case 'query':
this.complexity += this.nodeComplexity(operation, this.context.getSchema().getQueryType())
break
case 'mutation':
this.complexity += this.nodeComplexity(
operation,
this.context.getSchema().getMutationType(),
)
break
case 'subscription':
this.complexity += this.nodeComplexity(
operation,
this.context.getSchema().getSubscriptionType(),
)
break
default:
throw new Error(
`Query complexity could not be calculated for operation of type ${operation.operation}`,
)
}
}
onOperationDefinitionLeave(operation: OperationDefinitionNode): GraphQLError | void {
if (
typeof this.options.operationName === 'string' &&
this.options.operationName !== operation.name.value
) {
return
}
if (this.options.onComplete) {
this.options.onComplete(this.complexity)
}
if (this.complexity > this.options.maximumComplexity) {
return this.context.reportError(this.createError())
}
}
}
/**
* Adds a complexity to the complexity map for all possible types
* @param complexity
* @param complexityMap
* @param possibleTypes
*/
function addComplexities(
complexity: number,
complexityMap: ComplexityMap,
possibleTypes: string[],
): ComplexityMap {
for (const type of possibleTypes) {
if (Object.prototype.hasOwnProperty.call(complexityMap, type)) {
complexityMap[type] += complexity
} else {
complexityMap[type] = complexity
}
}
return complexityMap
}

View File

@@ -0,0 +1,13 @@
import type { ValidationContext } from 'graphql'
import type { QueryComplexityOptions } from './QueryComplexity.js'
import QueryComplexity from './QueryComplexity.js'
export function createComplexityRule(
options: QueryComplexityOptions,
): (context: ValidationContext) => QueryComplexity {
return (context: ValidationContext): QueryComplexity => {
return new QueryComplexity(context, options)
}
}

View File

@@ -0,0 +1,14 @@
import type { ComplexityEstimator, ComplexityEstimatorArgs } from '../../QueryComplexity.js'
export const fieldExtensionsEstimator = (): ComplexityEstimator => {
return (args: ComplexityEstimatorArgs): number | void => {
if (args.field.extensions) {
// Calculate complexity score
if (typeof args.field.extensions.complexity === 'number') {
return args.childComplexity + args.field.extensions.complexity
} else if (typeof args.field.extensions.complexity === 'function') {
return args.field.extensions.complexity(args)
}
}
}
}

View File

@@ -0,0 +1,9 @@
import type { ComplexityEstimator, ComplexityEstimatorArgs } from '../../QueryComplexity.js'
export const simpleEstimator = (options?: { defaultComplexity?: number }): ComplexityEstimator => {
const defaultComplexity =
options && typeof options.defaultComplexity === 'number' ? options.defaultComplexity : 1
return (args: ComplexityEstimatorArgs): number | void => {
return defaultComplexity + args.childComplexity
}
}

View File

@@ -0,0 +1,3 @@
export { createComplexityRule } from './createComplexityRule.js'
export { fieldExtensionsEstimator } from './estimators/fieldExtensions/index.js'
export { simpleEstimator } from './estimators/simple/index.js'

View File

@@ -0,0 +1,73 @@
import { GraphQLScalarType } from 'graphql'
import { Kind, print } from 'graphql/language'
function identity(value) {
return value
}
function ensureObject(value) {
if (typeof value !== 'object' || value === null || Array.isArray(value)) {
throw new TypeError(`JSONObject cannot represent non-object value: ${value}`)
}
return value
}
function parseObject(typeName, ast, variables) {
const value = Object.create(null)
ast.fields.forEach((field) => {
// eslint-disable-next-line no-use-before-define
value[field.name.value] = parseLiteral(typeName, field.value, variables)
})
return value
}
function parseLiteral(typeName, ast, variables) {
switch (ast.kind) {
case Kind.STRING:
case Kind.BOOLEAN:
return ast.value
case Kind.INT:
case Kind.FLOAT:
return parseFloat(ast.value)
case Kind.OBJECT:
return parseObject(typeName, ast, variables)
case Kind.LIST:
return ast.values.map((n) => parseLiteral(typeName, n, variables))
case Kind.NULL:
return null
case Kind.VARIABLE:
return variables ? variables[ast.name.value] : undefined
default:
throw new TypeError(`${typeName} cannot represent value: ${print(ast)}`)
}
}
// This named export is intended for users of CommonJS. Users of ES modules
// should instead use the default export.
export const GraphQLJSON = new GraphQLScalarType({
name: 'JSON',
description:
'The `JSON` scalar type represents JSON values as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf).',
parseLiteral: (ast, variables) => parseLiteral('JSON', ast, variables),
parseValue: identity,
serialize: identity,
specifiedByURL: 'http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf',
})
export const GraphQLJSONObject = new GraphQLScalarType({
name: 'JSONObject',
description:
'The `JSONObject` scalar type represents JSON objects as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf).',
parseLiteral: (ast, variables) => {
if (ast.kind !== Kind.OBJECT) {
throw new TypeError(`JSONObject cannot represent non-object value: ${print(ast)}`)
}
return parseObject('JSONObject', ast, variables)
},
parseValue: ensureObject,
serialize: ensureObject,
specifiedByURL: 'http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf',
})

View File

@@ -0,0 +1,41 @@
import type { PayloadRequest, Where } from 'payload/types'
import type { Collection } from 'payload/types'
import { countOperation } from 'payload/operations'
import { isolateObjectProperty } from 'payload/utilities'
import type { Context } from '../types.js'
export type Resolver = (
_: unknown,
args: {
data: Record<string, unknown>
locale?: string
where?: Where
},
context: {
req: PayloadRequest
},
// eslint-disable-next-line @typescript-eslint/no-explicit-any
) => Promise<{ totalDocs: number }>
export default function countResolver(collection: Collection): Resolver {
return async function resolver(_, args, context: Context) {
let { req } = context
const locale = req.locale
const fallbackLocale = req.fallbackLocale
req = isolateObjectProperty(req, 'locale')
req = isolateObjectProperty(req, 'fallbackLocale')
req.locale = args.locale || locale
req.fallbackLocale = fallbackLocale
const options = {
collection,
req: isolateObjectProperty(req, 'transactionID'),
where: args.where,
}
const results = await countOperation(options)
return results
}
}

View File

@@ -1,4 +1,3 @@
/* eslint-disable no-use-before-define */
import type { GraphQLInputFieldConfig, GraphQLScalarType, GraphQLType } from 'graphql' import type { GraphQLInputFieldConfig, GraphQLScalarType, GraphQLType } from 'graphql'
import type { GraphQLInfo } from 'payload/config' import type { GraphQLInfo } from 'payload/config'
import type { import type {
@@ -37,11 +36,11 @@ import {
GraphQLNonNull, GraphQLNonNull,
GraphQLString, GraphQLString,
} from 'graphql' } from 'graphql'
import { GraphQLJSON } from 'graphql-type-json'
import { fieldAffectsData, optionIsObject, tabHasName } from 'payload/types' import { fieldAffectsData, optionIsObject, tabHasName } from 'payload/types'
import { toWords } from 'payload/utilities' import { toWords } from 'payload/utilities'
import { flattenTopLevelFields } from 'payload/utilities' import { flattenTopLevelFields } from 'payload/utilities'
import { GraphQLJSON } from '../packages/graphql-type-json/index.js'
import combineParentName from '../utilities/combineParentName.js' import combineParentName from '../utilities/combineParentName.js'
import formatName from '../utilities/formatName.js' import formatName from '../utilities/formatName.js'
import { groupOrTabHasRequiredSubfield } from '../utilities/groupOrTabHasRequiredSubfield.js' import { groupOrTabHasRequiredSubfield } from '../utilities/groupOrTabHasRequiredSubfield.js'

View File

@@ -41,13 +41,12 @@ import {
GraphQLUnionType, GraphQLUnionType,
} from 'graphql' } from 'graphql'
import { DateTimeResolver, EmailAddressResolver } from 'graphql-scalars' import { DateTimeResolver, EmailAddressResolver } from 'graphql-scalars'
/* eslint-disable no-use-before-define */
import { GraphQLJSON } from 'graphql-type-json'
import { tabHasName } from 'payload/types' import { tabHasName } from 'payload/types'
import { toWords } from 'payload/utilities' import { toWords } from 'payload/utilities'
import type { Context } from '../resolvers/types.js' import type { Context } from '../resolvers/types.js'
import { GraphQLJSON } from '../packages/graphql-type-json/index.js'
import combineParentName from '../utilities/combineParentName.js' import combineParentName from '../utilities/combineParentName.js'
import formatName from '../utilities/formatName.js' import formatName from '../utilities/formatName.js'
import formatOptions from '../utilities/formatOptions.js' import formatOptions from '../utilities/formatOptions.js'
@@ -301,7 +300,7 @@ function buildObjectType({
value: { value: {
type: new GraphQLUnionType({ type: new GraphQLUnionType({
name: relationshipName, name: relationshipName,
async resolveType(data, { req }) { resolveType(data, { req }) {
return graphqlResult.collections[data.collection].graphQL.type.name return graphqlResult.collections[data.collection].graphQL.type.name
}, },
types, types,

View File

@@ -8,9 +8,9 @@ import type {
} from 'payload/types' } from 'payload/types'
import { GraphQLBoolean, GraphQLNonNull, GraphQLObjectType } from 'graphql' import { GraphQLBoolean, GraphQLNonNull, GraphQLObjectType } from 'graphql'
import { GraphQLJSONObject } from 'graphql-type-json'
import { toWords } from 'payload/utilities' import { toWords } from 'payload/utilities'
import { GraphQLJSONObject } from '../packages/graphql-type-json/index.js'
import formatName from '../utilities/formatName.js' import formatName from '../utilities/formatName.js'
type OperationType = 'create' | 'delete' | 'read' | 'readVersions' | 'unlock' | 'update' type OperationType = 'create' | 'delete' | 'read' | 'readVersions' | 'unlock' | 'update'

View File

@@ -1,7 +1,6 @@
/* eslint-disable @typescript-eslint/no-use-before-define */ /* eslint-disable @typescript-eslint/no-use-before-define */
import type { Field, FieldAffectingData } from 'payload/types' import type { Field, FieldAffectingData } from 'payload/types'
/* eslint-disable no-use-before-define */
import { GraphQLInputObjectType, GraphQLList } from 'graphql' import { GraphQLInputObjectType, GraphQLList } from 'graphql'
import { fieldAffectsData, fieldHasSubFields, fieldIsPresentationalOnly } from 'payload/types' import { fieldAffectsData, fieldHasSubFields, fieldIsPresentationalOnly } from 'payload/types'
import { flattenTopLevelFields } from 'payload/utilities' import { flattenTopLevelFields } from 'payload/utilities'

View File

@@ -21,16 +21,13 @@ import type {
} from 'payload/types' } from 'payload/types'
import { GraphQLEnumType, GraphQLInputObjectType } from 'graphql' import { GraphQLEnumType, GraphQLInputObjectType } from 'graphql'
import GraphQLJSONImport from 'graphql-type-json'
import { GraphQLJSON } from '../packages/graphql-type-json/index.js'
import combineParentName from '../utilities/combineParentName.js' import combineParentName from '../utilities/combineParentName.js'
import formatName from '../utilities/formatName.js' import formatName from '../utilities/formatName.js'
import recursivelyBuildNestedPaths from './recursivelyBuildNestedPaths.js' import recursivelyBuildNestedPaths from './recursivelyBuildNestedPaths.js'
import { withOperators } from './withOperators.js' import { withOperators } from './withOperators.js'
const GraphQLJSON = (GraphQLJSONImport ||
GraphQLJSONImport.default) as unknown as typeof GraphQLJSONImport.default
type Args = { type Args = {
nestedFieldName?: string nestedFieldName?: string
parentName: string parentName: string

View File

@@ -24,6 +24,7 @@ import refresh from '../resolvers/auth/refresh.js'
import resetPassword from '../resolvers/auth/resetPassword.js' import resetPassword from '../resolvers/auth/resetPassword.js'
import unlock from '../resolvers/auth/unlock.js' import unlock from '../resolvers/auth/unlock.js'
import verifyEmail from '../resolvers/auth/verifyEmail.js' import verifyEmail from '../resolvers/auth/verifyEmail.js'
import countResolver from '../resolvers/collections/count.js'
import createResolver from '../resolvers/collections/create.js' import createResolver from '../resolvers/collections/create.js'
import getDeleteResolver from '../resolvers/collections/delete.js' import getDeleteResolver from '../resolvers/collections/delete.js'
import { docAccessResolver } from '../resolvers/collections/docAccess.js' import { docAccessResolver } from '../resolvers/collections/docAccess.js'
@@ -183,6 +184,25 @@ function initCollectionsGraphQL({ config, graphqlResult }: InitCollectionsGraphQ
resolve: findResolver(collection), resolve: findResolver(collection),
} }
graphqlResult.Query.fields[`count${pluralName}`] = {
type: new GraphQLObjectType({
name: `count${pluralName}`,
fields: {
totalDocs: { type: GraphQLInt },
},
}),
args: {
draft: { type: GraphQLBoolean },
where: { type: collection.graphQL.whereInputType },
...(config.localization
? {
locale: { type: graphqlResult.types.localeInputType },
}
: {}),
},
resolve: countResolver(collection),
}
graphqlResult.Query.fields[`docAccess${singularName}`] = { graphqlResult.Query.fields[`docAccess${singularName}`] = {
type: buildPolicyType({ type: buildPolicyType({
type: 'collection', type: 'collection',

View File

@@ -11,9 +11,9 @@ import {
GraphQLString, GraphQLString,
} from 'graphql' } from 'graphql'
import { DateTimeResolver, EmailAddressResolver } from 'graphql-scalars' import { DateTimeResolver, EmailAddressResolver } from 'graphql-scalars'
import { GraphQLJSON } from 'graphql-type-json'
import { optionIsObject } from 'payload/types' import { optionIsObject } from 'payload/types'
import { GraphQLJSON } from '../packages/graphql-type-json/index.js'
import combineParentName from '../utilities/combineParentName.js' import combineParentName from '../utilities/combineParentName.js'
import formatName from '../utilities/formatName.js' import formatName from '../utilities/formatName.js'
import operators from './operators.js' import operators from './operators.js'

View File

@@ -6,7 +6,6 @@
"emitDeclarationOnly": true, "emitDeclarationOnly": true,
"outDir": "./dist" /* Specify an output folder for all emitted files. */, "outDir": "./dist" /* Specify an output folder for all emitted files. */,
"rootDir": "./src" /* Specify the root folder within your source files. */, "rootDir": "./src" /* Specify the root folder within your source files. */,
"allowImportingTsExtensions": true
}, },
"exclude": [ "exclude": [
"dist", "dist",

View File

@@ -0,0 +1,10 @@
.tmp
**/.git
**/.hg
**/.pnp.*
**/.svn
**/.yarn/**
**/build
**/dist/**
**/node_modules
**/temp

View File

@@ -0,0 +1,37 @@
/** @type {import('prettier').Config} */
module.exports = {
extends: ['@payloadcms'],
overrides: [
{
extends: ['plugin:@typescript-eslint/disable-type-checked'],
files: ['*.js', '*.cjs', '*.json', '*.md', '*.yml', '*.yaml'],
},
{
files: ['package.json', 'tsconfig.json'],
rules: {
'perfectionist/sort-array-includes': 'off',
'perfectionist/sort-astro-attributes': 'off',
'perfectionist/sort-classes': 'off',
'perfectionist/sort-enums': 'off',
'perfectionist/sort-exports': 'off',
'perfectionist/sort-imports': 'off',
'perfectionist/sort-interfaces': 'off',
'perfectionist/sort-jsx-props': 'off',
'perfectionist/sort-keys': 'off',
'perfectionist/sort-maps': 'off',
'perfectionist/sort-named-exports': 'off',
'perfectionist/sort-named-imports': 'off',
'perfectionist/sort-object-types': 'off',
'perfectionist/sort-objects': 'off',
'perfectionist/sort-svelte-attributes': 'off',
'perfectionist/sort-union-types': 'off',
'perfectionist/sort-vue-attributes': 'off',
},
},
],
parserOptions: {
project: ['./tsconfig.json'],
tsconfigRootDir: __dirname,
},
root: true,
}

View File

@@ -0,0 +1,10 @@
.tmp
**/.git
**/.hg
**/.pnp.*
**/.svn
**/.yarn/**
**/build
**/dist/**
**/node_modules
**/temp

View File

@@ -0,0 +1,15 @@
{
"$schema": "https://json.schemastore.org/swcrc",
"sourceMaps": "inline",
"jsc": {
"target": "esnext",
"parser": {
"syntax": "typescript",
"tsx": true,
"dts": true
}
},
"module": {
"type": "commonjs"
}
}

View File

@@ -0,0 +1,56 @@
{
"name": "@payloadcms/live-preview-vue",
"version": "0.1.0",
"description": "The official live preview Vue SDK for Payload",
"repository": {
"type": "git",
"url": "https://github.com/payloadcms/payload.git",
"directory": "packages/live-preview-vue"
},
"license": "MIT",
"homepage": "https://payloadcms.com",
"author": "Payload CMS, Inc.",
"main": "./src/index.ts",
"types": "./src/index.ts",
"type": "module",
"scripts": {
"build": "pnpm copyfiles && pnpm build:swc && pnpm build:types",
"build:swc": "swc ./src -d ./dist --config-file .swcrc",
"build:types": "tsc --emitDeclarationOnly --outDir dist",
"clean": "rimraf {dist,*.tsbuildinfo}",
"copyfiles": "copyfiles -u 1 \"src/**/*.{html,css,scss,ttf,woff,woff2,eot,svg,jpg,png,json}\" dist/",
"prepublishOnly": "pnpm clean && pnpm turbo build"
},
"dependencies": {
"@payloadcms/live-preview": "workspace:^0.x"
},
"devDependencies": {
"@payloadcms/eslint-config": "workspace:*",
"vue": "^3.0.0",
"payload": "workspace:*"
},
"peerDependencies": {
"vue": "^3.0.0"
},
"exports": {
".": {
"default": "./src/index.ts",
"types": "./src/index.ts"
}
},
"publishConfig": {
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"main": "./dist/index.js",
"registry": "https://registry.npmjs.org/",
"types": "./dist/index.d.ts"
},
"files": [
"dist"
]
}

View File

@@ -0,0 +1,58 @@
import type { Ref } from 'vue'
import { ready, subscribe, unsubscribe } from '@payloadcms/live-preview'
import { onMounted, onUnmounted, ref } from 'vue'
/**
* Vue composable to implement Payload CMS Live Preview.
*
* {@link https://payloadcms.com/docs/live-preview/frontend View the documentation}
*/
export const useLivePreview = <T>(props: {
apiRoute?: string
depth?: number
initialData: T
serverURL: string
}): {
data: Ref<T>
isLoading: Ref<boolean>
} => {
const { apiRoute, depth, initialData, serverURL } = props
const data = ref(initialData) as Ref<T>
const isLoading = ref(true)
const hasSentReadyMessage = ref(false)
const onChange = (mergedData: T) => {
data.value = mergedData
isLoading.value = false
}
let subscription: (event: MessageEvent) => void
onMounted(() => {
subscription = subscribe({
apiRoute,
callback: onChange,
depth,
initialData,
serverURL,
})
if (!hasSentReadyMessage.value) {
hasSentReadyMessage.value = true
ready({
serverURL,
})
}
})
onUnmounted(() => {
unsubscribe(subscription)
})
return {
data,
isLoading,
}
}

View File

@@ -0,0 +1,25 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"composite": true, // Make sure typescript knows that this module depends on their references
"noEmit": false /* Do not emit outputs. */,
"emitDeclarationOnly": true,
"outDir": "./dist" /* Specify an output folder for all emitted files. */,
"rootDir": "./src" /* Specify the root folder within your source files. */,
"jsx": "react"
},
"exclude": [
"dist",
"build",
"tests",
"test",
"node_modules",
".eslintrc.js",
"src/**/*.spec.js",
"src/**/*.spec.jsx",
"src/**/*.spec.ts",
"src/**/*.spec.tsx"
],
"include": ["src/**/*.ts", "src/**/*.tsx", "src/**/*.d.ts", "src/**/*.json"],
"references": [{ "path": "../payload" }] // db-mongodb depends on payload
}

View File

@@ -1,6 +1,6 @@
{ {
"name": "@payloadcms/next", "name": "@payloadcms/next",
"version": "3.0.0-beta.10", "version": "3.0.0-beta.13",
"main": "./src/index.js", "main": "./src/index.js",
"types": "./src/index.js", "types": "./src/index.js",
"type": "module", "type": "module",
@@ -46,6 +46,8 @@
"@types/ws": "^8.5.10", "@types/ws": "^8.5.10",
"css-loader": "^6.10.0", "css-loader": "^6.10.0",
"css-minimizer-webpack-plugin": "^6.0.0", "css-minimizer-webpack-plugin": "^6.0.0",
"file-type": "16.5.4",
"graphql-http": "^1.22.0",
"mini-css-extract-plugin": "1.6.2", "mini-css-extract-plugin": "1.6.2",
"payload": "workspace:*", "payload": "workspace:*",
"postcss-loader": "^8.1.1", "postcss-loader": "^8.1.1",
@@ -65,8 +67,6 @@
"@types/busboy": "^1.5.3", "@types/busboy": "^1.5.3",
"busboy": "^1.6.0", "busboy": "^1.6.0",
"deep-equal": "2.2.2", "deep-equal": "2.2.2",
"graphql": "16.8.1",
"graphql-http": "^1.22.0",
"graphql-playground-html": "1.6.30", "graphql-playground-html": "1.6.30",
"path-to-regexp": "^6.2.1", "path-to-regexp": "^6.2.1",
"qs": "6.11.2", "qs": "6.11.2",
@@ -76,9 +76,11 @@
"ws": "^8.16.0" "ws": "^8.16.0"
}, },
"peerDependencies": { "peerDependencies": {
"file-type": "16.5.4",
"http-status": "1.6.2", "http-status": "1.6.2",
"next": "^14.2.0-canary.23", "next": "^14.3.0-canary.7",
"payload": "workspace:*" "payload": "workspace:*",
"graphql": "^16.8.1"
}, },
"publishConfig": { "publishConfig": {
"main": "./dist/index.js", "main": "./dist/index.js",

View File

@@ -3,6 +3,7 @@ export { GRAPHQL_PLAYGROUND_GET, GRAPHQL_POST } from '../routes/graphql/index.js
export { export {
DELETE as REST_DELETE, DELETE as REST_DELETE,
GET as REST_GET, GET as REST_GET,
OPTIONS as REST_OPTIONS,
PATCH as REST_PATCH, PATCH as REST_PATCH,
POST as REST_POST, POST as REST_POST,
} from '../routes/rest/index.js' } from '../routes/rest/index.js'

View File

@@ -1,3 +1,5 @@
export { traverseFields } from '../utilities/buildFieldSchemaMap/traverseFields.js' export { traverseFields } from '../utilities/buildFieldSchemaMap/traverseFields.js'
export { createPayloadRequest } from '../utilities/createPayloadRequest.js'
export { getNextRequestI18n } from '../utilities/getNextRequestI18n.js' export { getNextRequestI18n } from '../utilities/getNextRequestI18n.js'
export { getPayloadHMR } from '../utilities/getPayloadHMR.js' export { getPayloadHMR, reload } from '../utilities/getPayloadHMR.js'
export { headersWithCors } from '../utilities/headersWithCors.js'

View File

@@ -77,7 +77,7 @@ export const tempFileHandler: Handler = (options, fieldname, filename) => {
} }
export const memHandler: Handler = (options, fieldname, filename) => { export const memHandler: Handler = (options, fieldname, filename) => {
const buffers = [] const buffers: Buffer[] = []
const hash = crypto.createHash('md5') const hash = crypto.createHash('md5')
let fileSize = 0 let fileSize = 0
let completed = false let completed = false

View File

@@ -1,4 +1,4 @@
const ACCEPTABLE_CONTENT_TYPE = /^multipart\/['"()+-_]+(?:; ?['"()+-_]*)+$/i const ACCEPTABLE_CONTENT_TYPE = /multipart\/['"()+-_]+(?:; ?['"()+-_]*)+$/i
const UNACCEPTABLE_METHODS = new Set(['GET', 'HEAD', 'DELETE', 'OPTIONS', 'CONNECT', 'TRACE']) const UNACCEPTABLE_METHODS = new Set(['GET', 'HEAD', 'DELETE', 'OPTIONS', 'CONNECT', 'TRACE'])
const hasBody = (req: Request): boolean => { const hasBody = (req: Request): boolean => {

View File

@@ -1,4 +1,5 @@
import Busboy from 'busboy' import Busboy from 'busboy'
import httpStatus from 'http-status'
import { APIError } from 'payload/errors' import { APIError } from 'payload/errors'
import type { NextFileUploadOptions, NextFileUploadResponse } from './index.js' import type { NextFileUploadOptions, NextFileUploadResponse } from './index.js'
@@ -17,6 +18,17 @@ type ProcessMultipart = (args: {
}) => Promise<NextFileUploadResponse> }) => Promise<NextFileUploadResponse>
export const processMultipart: ProcessMultipart = async ({ options, request }) => { export const processMultipart: ProcessMultipart = async ({ options, request }) => {
let parsingRequest = true let parsingRequest = true
let fileCount = 0
let filesCompleted = 0
let allFilesHaveResolved: (value?: unknown) => void
let failedResolvingFiles: (err: Error) => void
const allFilesComplete = new Promise((res, rej) => {
allFilesHaveResolved = res
failedResolvingFiles = rej
})
const result: NextFileUploadResponse = { const result: NextFileUploadResponse = {
fields: undefined, fields: undefined,
files: undefined, files: undefined,
@@ -36,6 +48,7 @@ export const processMultipart: ProcessMultipart = async ({ options, request }) =
// Build req.files fields // Build req.files fields
busboy.on('file', (field, file, info) => { busboy.on('file', (field, file, info) => {
fileCount += 1
// Parse file name(cutting huge names, decoding, etc..). // Parse file name(cutting huge names, decoding, etc..).
const { encoding, filename: name, mimeType: mime } = info const { encoding, filename: name, mimeType: mime } = info
const filename = parseFileName(options, name) const filename = parseFileName(options, name)
@@ -73,7 +86,9 @@ export const processMultipart: ProcessMultipart = async ({ options, request }) =
debugLog(options, `Aborting upload because of size limit ${field}->${filename}.`) debugLog(options, `Aborting upload because of size limit ${field}->${filename}.`)
cleanup() cleanup()
parsingRequest = false parsingRequest = false
throw new APIError(options.responseOnLimit, 413, { size: getFileSize() }) throw new APIError(options.responseOnLimit, httpStatus.REQUEST_ENTITY_TOO_LARGE, {
size: getFileSize(),
})
} }
}) })
@@ -95,6 +110,8 @@ export const processMultipart: ProcessMultipart = async ({ options, request }) =
return debugLog(options, `Don't add file instance if original name and size are empty`) return debugLog(options, `Don't add file instance if original name and size are empty`)
} }
filesCompleted += 1
result.files = buildFields( result.files = buildFields(
result.files, result.files,
field, field,
@@ -117,19 +134,25 @@ export const processMultipart: ProcessMultipart = async ({ options, request }) =
request[waitFlushProperty] = [] request[waitFlushProperty] = []
} }
request[waitFlushProperty].push(writePromise) request[waitFlushProperty].push(writePromise)
if (filesCompleted === fileCount) {
allFilesHaveResolved()
}
}) })
file.on('error', (err) => { file.on('error', (err) => {
uploadTimer.clear() uploadTimer.clear()
debugLog(options, `File Error: ${err.message}`) debugLog(options, `File Error: ${err.message}`)
cleanup() cleanup()
failedResolvingFiles(err)
}) })
// Start upload process.
debugLog(options, `New upload started ${field}->${filename}, bytes:${getFileSize()}`) debugLog(options, `New upload started ${field}->${filename}, bytes:${getFileSize()}`)
uploadTimer.set() uploadTimer.set()
}) })
busboy.on('finish', () => { busboy.on('finish', async () => {
debugLog(options, `Busboy finished parsing request.`) debugLog(options, `Busboy finished parsing request.`)
if (options.parseNested) { if (options.parseNested) {
result.fields = processNested(result.fields) result.fields = processNested(result.fields)
@@ -137,20 +160,27 @@ export const processMultipart: ProcessMultipart = async ({ options, request }) =
} }
if (request[waitFlushProperty]) { if (request[waitFlushProperty]) {
Promise.all(request[waitFlushProperty]).then(() => { try {
delete request[waitFlushProperty] await Promise.all(request[waitFlushProperty]).then(() => {
}) delete request[waitFlushProperty]
})
} catch (err) {
debugLog(options, `Error waiting for file write promises: ${err}`)
}
} }
return result
}) })
busboy.on('error', (err) => { busboy.on('error', (err) => {
debugLog(options, `Busboy error`) debugLog(options, `Busboy error`)
parsingRequest = false parsingRequest = false
throw new APIError('Busboy error parsing multipart request', 500) throw new APIError('Busboy error parsing multipart request', httpStatus.BAD_REQUEST)
}) })
const reader = request.body.getReader() const reader = request.body.getReader()
// Start parsing request
while (parsingRequest) { while (parsingRequest) {
const { done, value } = await reader.read() const { done, value } = await reader.read()
@@ -163,5 +193,7 @@ export const processMultipart: ProcessMultipart = async ({ options, request }) =
} }
} }
if (fileCount !== 0) await allFilesComplete
return result return result
} }

View File

@@ -19,7 +19,7 @@ let tempCounter = 0
export const debugLog = (options: NextFileUploadOptions, msg: string) => { export const debugLog = (options: NextFileUploadOptions, msg: string) => {
const opts = options || {} const opts = options || {}
if (!opts.debug) return false if (!opts.debug) return false
console.log(`Express-file-upload: ${msg}`) // eslint-disable-line console.log(`Next-file-upload: ${msg}`) // eslint-disable-line
return true return true
} }
@@ -287,8 +287,9 @@ export const parseFileName: ParseFileName = (opts, fileName) => {
? opts.safeFileNames ? opts.safeFileNames
: SAFE_FILE_NAME_REGEX : SAFE_FILE_NAME_REGEX
// Parse file name extension. // Parse file name extension.
let { name, extension } = parseFileNameExtension(opts.preserveExtension, parsedName) const parsedFileName = parseFileNameExtension(opts.preserveExtension, parsedName)
if (extension.length) extension = '.' + extension.replace(nameRegex, '') if (parsedFileName.extension.length)
parsedFileName.extension = '.' + parsedFileName.extension.replace(nameRegex, '')
return name.replace(nameRegex, '').concat(extension) return parsedFileName.name.replace(nameRegex, '').concat(parsedFileName.extension)
} }

View File

@@ -1,5 +1,4 @@
import type { GraphQLFormattedError } from 'graphql' import type { GraphQLError, GraphQLFormattedError } from 'graphql'
import type { GraphQLError } from 'graphql'
import type { CollectionAfterErrorHook, Payload, SanitizedConfig } from 'payload/types' import type { CollectionAfterErrorHook, Payload, SanitizedConfig } from 'payload/types'
import { configToSchema } from '@payloadcms/graphql' import { configToSchema } from '@payloadcms/graphql'
@@ -7,12 +6,14 @@ import { createHandler } from 'graphql-http/lib/use/fetch'
import httpStatus from 'http-status' import httpStatus from 'http-status'
import { createPayloadRequest } from '../../utilities/createPayloadRequest.js' import { createPayloadRequest } from '../../utilities/createPayloadRequest.js'
import { headersWithCors } from '../../utilities/headersWithCors.js'
const handleError = async ( const handleError = async (
payload: Payload, payload: Payload,
err: any, err: any,
debug: boolean, debug: boolean,
afterErrorHook: CollectionAfterErrorHook, afterErrorHook: CollectionAfterErrorHook,
// eslint-disable-next-line @typescript-eslint/require-await
): Promise<GraphQLFormattedError> => { ): Promise<GraphQLFormattedError> => {
const status = err.originalError.status || httpStatus.INTERNAL_SERVER_ERROR const status = err.originalError.status || httpStatus.INTERNAL_SERVER_ERROR
let errorMessage = err.message let errorMessage = err.message
@@ -37,7 +38,7 @@ const handleError = async (
} }
if (afterErrorHook) { if (afterErrorHook) {
;({ response } = (await afterErrorHook(err, response, null, null)) || { response }) ;({ response } = afterErrorHook(err, response, null, null) || { response })
} }
return response return response
@@ -60,9 +61,10 @@ export const getGraphql = async (config: Promise<SanitizedConfig> | SanitizedCon
} }
if (!cached.promise) { if (!cached.promise) {
// eslint-disable-next-line no-async-promise-executor
cached.promise = new Promise(async (resolve) => { cached.promise = new Promise(async (resolve) => {
const resolvedConfig = await config const resolvedConfig = await config
const schema = await configToSchema(resolvedConfig) const schema = configToSchema(resolvedConfig)
resolve(schema) resolve(schema)
}) })
} }
@@ -118,13 +120,17 @@ export const POST =
validationRules: (request, args, defaultRules) => defaultRules.concat(validationRules(args)), validationRules: (request, args, defaultRules) => defaultRules.concat(validationRules(args)),
})(originalRequest) })(originalRequest)
const resHeaders = new Headers(apiResponse.headers) const resHeaders = headersWithCors({
headers: new Headers(apiResponse.headers),
req,
})
for (const key in headers) { for (const key in headers) {
resHeaders.append(key, headers[key]) resHeaders.append(key, headers[key])
} }
return new Response(apiResponse.body, { return new Response(apiResponse.body, {
headers: new Headers(resHeaders), headers: resHeaders,
status: apiResponse.status, status: apiResponse.status,
}) })
} }

View File

@@ -3,12 +3,18 @@ import { accessOperation } from 'payload/operations'
import type { BaseRouteHandler } from '../types.js' import type { BaseRouteHandler } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
export const access: BaseRouteHandler = async ({ req }) => { export const access: BaseRouteHandler = async ({ req }) => {
const results = await accessOperation({ const results = await accessOperation({
req, req,
}) })
return Response.json(results, { return Response.json(results, {
headers: headersWithCors({
headers: new Headers(),
req,
}),
status: httpStatus.OK, status: httpStatus.OK,
}) })
} }

View File

@@ -3,7 +3,10 @@ import { forgotPasswordOperation } from 'payload/operations'
import type { CollectionRouteHandler } from '../types.js' import type { CollectionRouteHandler } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
export const forgotPassword: CollectionRouteHandler = async ({ collection, req }) => { export const forgotPassword: CollectionRouteHandler = async ({ collection, req }) => {
const { t } = req
await forgotPasswordOperation({ await forgotPasswordOperation({
collection, collection,
data: { data: {
@@ -16,10 +19,13 @@ export const forgotPassword: CollectionRouteHandler = async ({ collection, req }
return Response.json( return Response.json(
{ {
// TODO(translate) message: t('general:success'),
message: 'Success',
}, },
{ {
headers: headersWithCors({
headers: new Headers(),
req,
}),
status: httpStatus.OK, status: httpStatus.OK,
}, },
) )

View File

@@ -2,11 +2,21 @@ import { initOperation } from 'payload/operations'
import type { CollectionRouteHandler } from '../types.js' import type { CollectionRouteHandler } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
export const init: CollectionRouteHandler = async ({ collection, req }) => { export const init: CollectionRouteHandler = async ({ collection, req }) => {
const initialized = await initOperation({ const initialized = await initOperation({
collection: collection.config.slug, collection: collection.config.slug,
req, req,
}) })
return Response.json({ initialized }) return Response.json(
{ initialized },
{
headers: headersWithCors({
headers: new Headers(),
req,
}),
},
)
} }

View File

@@ -5,8 +5,10 @@ import { isNumber } from 'payload/utilities'
import type { CollectionRouteHandler } from '../types.js' import type { CollectionRouteHandler } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
export const login: CollectionRouteHandler = async ({ collection, req }) => { export const login: CollectionRouteHandler = async ({ collection, req }) => {
const { searchParams } = req const { searchParams, t } = req
const depth = searchParams.get('depth') const depth = searchParams.get('depth')
const result = await loginOperation({ const result = await loginOperation({
@@ -31,13 +33,15 @@ export const login: CollectionRouteHandler = async ({ collection, req }) => {
return Response.json( return Response.json(
{ {
// TODO(translate) message: t('authentication:passed'),
message: 'Auth Passed',
...result, ...result,
}, },
{ {
headers: new Headers({ headers: headersWithCors({
'Set-Cookie': cookie, headers: new Headers({
'Set-Cookie': cookie,
}),
req,
}), }),
status: httpStatus.OK, status: httpStatus.OK,
}, },

View File

@@ -4,18 +4,27 @@ import { logoutOperation } from 'payload/operations'
import type { CollectionRouteHandler } from '../types.js' import type { CollectionRouteHandler } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
export const logout: CollectionRouteHandler = async ({ collection, req }) => { export const logout: CollectionRouteHandler = async ({ collection, req }) => {
const { t } = req
const result = await logoutOperation({ const result = await logoutOperation({
collection, collection,
req, req,
}) })
const headers = headersWithCors({
headers: new Headers(),
req,
})
if (!result) { if (!result) {
return Response.json( return Response.json(
{ {
message: 'Logout failed.', message: t('error:logoutFailed'),
}, },
{ {
headers,
status: httpStatus.BAD_REQUEST, status: httpStatus.BAD_REQUEST,
}, },
) )
@@ -26,15 +35,14 @@ export const logout: CollectionRouteHandler = async ({ collection, req }) => {
payload: req.payload, payload: req.payload,
}) })
headers.set('Set-Cookie', expiredCookie)
return Response.json( return Response.json(
{ {
// TODO(translate) message: t('authentication:logoutSuccessful'),
message: 'Logout successful.',
}, },
{ {
headers: new Headers({ headers,
'Set-Cookie': expiredCookie,
}),
status: httpStatus.OK, status: httpStatus.OK,
}, },
) )

View File

@@ -4,6 +4,8 @@ import { meOperation } from 'payload/operations'
import type { CollectionRouteHandler } from '../types.js' import type { CollectionRouteHandler } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
export const me: CollectionRouteHandler = async ({ collection, req }) => { export const me: CollectionRouteHandler = async ({ collection, req }) => {
const currentToken = extractJWT(req) const currentToken = extractJWT(req)
@@ -23,6 +25,10 @@ export const me: CollectionRouteHandler = async ({ collection, req }) => {
message: req.t('authentication:account'), message: req.t('authentication:account'),
}, },
{ {
headers: headersWithCors({
headers: new Headers(),
req,
}),
status: httpStatus.OK, status: httpStatus.OK,
}, },
) )

View File

@@ -5,16 +5,24 @@ import { refreshOperation } from 'payload/operations'
import type { CollectionRouteHandler } from '../types.js' import type { CollectionRouteHandler } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
export const refresh: CollectionRouteHandler = async ({ collection, req }) => { export const refresh: CollectionRouteHandler = async ({ collection, req }) => {
const { t } = req
const token = typeof req.data?.token === 'string' ? req.data.token : extractJWT(req) const token = typeof req.data?.token === 'string' ? req.data.token : extractJWT(req)
const headers = headersWithCors({
headers: new Headers(),
req,
})
if (!token) { if (!token) {
return Response.json( return Response.json(
{ {
// TODO(translate) message: t('error:tokenNotProvided'),
message: 'Token not provided.',
}, },
{ {
headers,
status: httpStatus.UNAUTHORIZED, status: httpStatus.UNAUTHORIZED,
}, },
) )
@@ -36,16 +44,15 @@ export const refresh: CollectionRouteHandler = async ({ collection, req }) => {
delete result.refreshedToken delete result.refreshedToken
} }
headers.set('Set-Cookie', cookie)
return Response.json( return Response.json(
{ {
// TODO(translate) message: t('authentication:tokenRefreshSuccessful'),
message: 'Token refresh successful',
...result, ...result,
}, },
{ {
headers: new Headers({ headers,
'Set-Cookie': cookie,
}),
status: httpStatus.OK, status: httpStatus.OK,
}, },
) )

View File

@@ -1,11 +1,23 @@
import httpStatus from 'http-status' import httpStatus from 'http-status'
import { generatePayloadCookie } from 'payload/auth' import { generatePayloadCookie } from 'payload/auth'
import { ValidationError } from 'payload/errors'
import { registerFirstUserOperation } from 'payload/operations' import { registerFirstUserOperation } from 'payload/operations'
import type { CollectionRouteHandler } from '../types.js' import type { CollectionRouteHandler } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
export const registerFirstUser: CollectionRouteHandler = async ({ collection, req }) => { export const registerFirstUser: CollectionRouteHandler = async ({ collection, req }) => {
const data = req.data const { data, t } = req
if (data?.password !== data['confirm-password']) {
throw new ValidationError([
{
field: 'confirm-password',
message: req.t('Password and confirm password fields do not match.'),
},
])
}
const result = await registerFirstUserOperation({ const result = await registerFirstUserOperation({
collection, collection,
@@ -26,14 +38,16 @@ export const registerFirstUser: CollectionRouteHandler = async ({ collection, re
return Response.json( return Response.json(
{ {
exp: result.exp, exp: result.exp,
// TODO(translate) message: t('authentication:successfullyRegisteredFirstUser'),
message: 'Successfully registered first user.',
token: result.token, token: result.token,
user: result.user, user: result.user,
}, },
{ {
headers: new Headers({ headers: headersWithCors({
'Set-Cookie': cookie, headers: new Headers({
'Set-Cookie': cookie,
}),
req,
}), }),
status: httpStatus.OK, status: httpStatus.OK,
}, },

View File

@@ -4,8 +4,10 @@ import { resetPasswordOperation } from 'payload/operations'
import type { CollectionRouteHandler } from '../types.js' import type { CollectionRouteHandler } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
export const resetPassword: CollectionRouteHandler = async ({ collection, req }) => { export const resetPassword: CollectionRouteHandler = async ({ collection, req }) => {
const { searchParams } = req const { searchParams, t } = req
const depth = searchParams.get('depth') const depth = searchParams.get('depth')
const result = await resetPasswordOperation({ const result = await resetPasswordOperation({
@@ -30,13 +32,15 @@ export const resetPassword: CollectionRouteHandler = async ({ collection, req })
return Response.json( return Response.json(
{ {
// TODO(translate) message: t('authentication:passwordResetSuccessfully'),
message: 'Password reset successfully.',
...result, ...result,
}, },
{ {
headers: new Headers({ headers: headersWithCors({
'Set-Cookie': cookie, headers: new Headers({
'Set-Cookie': cookie,
}),
req,
}), }),
status: httpStatus.OK, status: httpStatus.OK,
}, },

View File

@@ -3,7 +3,11 @@ import { unlockOperation } from 'payload/operations'
import type { CollectionRouteHandler } from '../types.js' import type { CollectionRouteHandler } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
export const unlock: CollectionRouteHandler = async ({ collection, req }) => { export const unlock: CollectionRouteHandler = async ({ collection, req }) => {
const { t } = req
await unlockOperation({ await unlockOperation({
collection, collection,
data: { email: req.data.email as string }, data: { email: req.data.email as string },
@@ -12,10 +16,13 @@ export const unlock: CollectionRouteHandler = async ({ collection, req }) => {
return Response.json( return Response.json(
{ {
// TODO(translate) message: t('general:success'),
message: 'Success',
}, },
{ {
headers: headersWithCors({
headers: new Headers(),
req,
}),
status: httpStatus.OK, status: httpStatus.OK,
}, },
) )

View File

@@ -3,7 +3,10 @@ import { verifyEmailOperation } from 'payload/operations'
import type { CollectionRouteHandlerWithID } from '../types.js' import type { CollectionRouteHandlerWithID } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
export const verifyEmail: CollectionRouteHandlerWithID = async ({ id, collection, req }) => { export const verifyEmail: CollectionRouteHandlerWithID = async ({ id, collection, req }) => {
const { t } = req
await verifyEmailOperation({ await verifyEmailOperation({
collection, collection,
req, req,
@@ -12,10 +15,13 @@ export const verifyEmail: CollectionRouteHandlerWithID = async ({ id, collection
return Response.json( return Response.json(
{ {
// TODO(translate) message: t('authentication:emailVerified'),
message: 'Email verified successfully.',
}, },
{ {
headers: headersWithCors({
headers: new Headers(),
req,
}),
status: httpStatus.OK, status: httpStatus.OK,
}, },
) )

View File

@@ -8,6 +8,7 @@ import httpStatus from 'http-status'
import type { FieldSchemaMap } from '../../utilities/buildFieldSchemaMap/types.js' import type { FieldSchemaMap } from '../../utilities/buildFieldSchemaMap/types.js'
import { buildFieldSchemaMap } from '../../utilities/buildFieldSchemaMap/index.js' import { buildFieldSchemaMap } from '../../utilities/buildFieldSchemaMap/index.js'
import { headersWithCors } from '../../utilities/headersWithCors.js'
let cached = global._payload_fieldSchemaMap let cached = global._payload_fieldSchemaMap
@@ -27,6 +28,11 @@ export const getFieldSchemaMap = (req: PayloadRequest): FieldSchemaMap => {
} }
export const buildFormState = async ({ req }: { req: PayloadRequest }) => { export const buildFormState = async ({ req }: { req: PayloadRequest }) => {
const headers = headersWithCors({
headers: new Headers(),
req,
})
try { try {
const reqData: BuildFormStateArgs = req.data as BuildFormStateArgs const reqData: BuildFormStateArgs = req.data as BuildFormStateArgs
const { collectionSlug, formState, globalSlug, locale, operation, schemaPath } = reqData const { collectionSlug, formState, globalSlug, locale, operation, schemaPath } = reqData
@@ -44,17 +50,20 @@ export const buildFormState = async ({ req }: { req: PayloadRequest }) => {
if (!canAccessAdmin) { if (!canAccessAdmin) {
return Response.json(null, { return Response.json(null, {
headers,
status: httpStatus.UNAUTHORIZED, status: httpStatus.UNAUTHORIZED,
}) })
} }
// Match the user collection to the global admin config // Match the user collection to the global admin config
} else if (adminUserSlug !== incomingUserSlug) { } else if (adminUserSlug !== incomingUserSlug) {
return Response.json(null, { return Response.json(null, {
headers,
status: httpStatus.UNAUTHORIZED, status: httpStatus.UNAUTHORIZED,
}) })
} }
} else { } else {
return Response.json(null, { return Response.json(null, {
headers,
status: httpStatus.UNAUTHORIZED, status: httpStatus.UNAUTHORIZED,
}) })
} }
@@ -84,6 +93,7 @@ export const buildFormState = async ({ req }: { req: PayloadRequest }) => {
message: 'Could not find field schema for given path', message: 'Could not find field schema for given path',
}, },
{ {
headers,
status: httpStatus.BAD_REQUEST, status: httpStatus.BAD_REQUEST,
}, },
) )
@@ -198,6 +208,7 @@ export const buildFormState = async ({ req }: { req: PayloadRequest }) => {
} }
return Response.json(result, { return Response.json(result, {
headers,
status: httpStatus.OK, status: httpStatus.OK,
}) })
} catch (err) { } catch (err) {
@@ -206,6 +217,7 @@ export const buildFormState = async ({ req }: { req: PayloadRequest }) => {
message: 'There was an error building form state', message: 'There was an error building form state',
}, },
{ {
headers,
status: httpStatus.BAD_REQUEST, status: httpStatus.BAD_REQUEST,
}, },
) )

View File

@@ -0,0 +1,22 @@
import type { Where } from 'payload/types'
import httpStatus from 'http-status'
import { countOperation } from 'payload/operations'
import type { CollectionRouteHandler } from '../types.js'
export const count: CollectionRouteHandler = async ({ collection, req }) => {
const { where } = req.query as {
where?: Where
}
const result = await countOperation({
collection,
req,
where,
})
return Response.json(result, {
status: httpStatus.OK,
})
}

View File

@@ -5,6 +5,8 @@ import { isNumber } from 'payload/utilities'
import type { CollectionRouteHandler } from '../types.js' import type { CollectionRouteHandler } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
export const create: CollectionRouteHandler = async ({ collection, req }) => { export const create: CollectionRouteHandler = async ({ collection, req }) => {
const { searchParams } = req const { searchParams } = req
const autosave = searchParams.get('autosave') === 'true' const autosave = searchParams.get('autosave') === 'true'
@@ -28,6 +30,10 @@ export const create: CollectionRouteHandler = async ({ collection, req }) => {
}), }),
}, },
{ {
headers: headersWithCors({
headers: new Headers(),
req,
}),
status: httpStatus.CREATED, status: httpStatus.CREATED,
}, },
) )

View File

@@ -7,6 +7,8 @@ import { isNumber } from 'payload/utilities'
import type { CollectionRouteHandler } from '../types.js' import type { CollectionRouteHandler } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
export const deleteDoc: CollectionRouteHandler = async ({ collection, req }) => { export const deleteDoc: CollectionRouteHandler = async ({ collection, req }) => {
const { depth, where } = req.query as { const { depth, where } = req.query as {
depth?: string depth?: string
@@ -20,6 +22,11 @@ export const deleteDoc: CollectionRouteHandler = async ({ collection, req }) =>
where, where,
}) })
const headers = headersWithCors({
headers: new Headers(),
req,
})
if (result.errors.length === 0) { if (result.errors.length === 0) {
const message = req.t('general:deletedCountSuccessfully', { const message = req.t('general:deletedCountSuccessfully', {
count: result.docs.length, count: result.docs.length,
@@ -35,6 +42,7 @@ export const deleteDoc: CollectionRouteHandler = async ({ collection, req }) =>
message, message,
}, },
{ {
headers,
status: httpStatus.OK, status: httpStatus.OK,
}, },
) )
@@ -54,6 +62,7 @@ export const deleteDoc: CollectionRouteHandler = async ({ collection, req }) =>
message, message,
}, },
{ {
headers,
status: httpStatus.BAD_REQUEST, status: httpStatus.BAD_REQUEST,
}, },
) )

View File

@@ -4,6 +4,7 @@ import { isNumber } from 'payload/utilities'
import type { CollectionRouteHandlerWithID } from '../types.js' import type { CollectionRouteHandlerWithID } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
import { sanitizeCollectionID } from '../utilities/sanitizeCollectionID.js' import { sanitizeCollectionID } from '../utilities/sanitizeCollectionID.js'
export const deleteByID: CollectionRouteHandlerWithID = async ({ export const deleteByID: CollectionRouteHandlerWithID = async ({
@@ -27,12 +28,18 @@ export const deleteByID: CollectionRouteHandlerWithID = async ({
req, req,
}) })
const headers = headersWithCors({
headers: new Headers(),
req,
})
if (!doc) { if (!doc) {
return Response.json( return Response.json(
{ {
message: req.t('general:notFound'), message: req.t('general:notFound'),
}, },
{ {
headers,
status: httpStatus.NOT_FOUND, status: httpStatus.NOT_FOUND,
}, },
) )
@@ -44,6 +51,7 @@ export const deleteByID: CollectionRouteHandlerWithID = async ({
message: req.t('general:deletedSuccessfully'), message: req.t('general:deletedSuccessfully'),
}, },
{ {
headers,
status: httpStatus.OK, status: httpStatus.OK,
}, },
) )

View File

@@ -3,6 +3,8 @@ import { docAccessOperation } from 'payload/operations'
import type { CollectionRouteHandlerWithID } from '../types.js' import type { CollectionRouteHandlerWithID } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
export const docAccess: CollectionRouteHandlerWithID = async ({ id, collection, req }) => { export const docAccess: CollectionRouteHandlerWithID = async ({ id, collection, req }) => {
const result = await docAccessOperation({ const result = await docAccessOperation({
id, id,
@@ -11,6 +13,10 @@ export const docAccess: CollectionRouteHandlerWithID = async ({ id, collection,
}) })
return Response.json(result, { return Response.json(result, {
headers: headersWithCors({
headers: new Headers(),
req,
}),
status: httpStatus.OK, status: httpStatus.OK,
}) })
} }

View File

@@ -5,6 +5,7 @@ import { isNumber } from 'payload/utilities'
import type { CollectionRouteHandlerWithID } from '../types.js' import type { CollectionRouteHandlerWithID } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
import { sanitizeCollectionID } from '../utilities/sanitizeCollectionID.js' import { sanitizeCollectionID } from '../utilities/sanitizeCollectionID.js'
export const duplicate: CollectionRouteHandlerWithID = async ({ export const duplicate: CollectionRouteHandlerWithID = async ({
@@ -41,6 +42,10 @@ export const duplicate: CollectionRouteHandlerWithID = async ({
message, message,
}, },
{ {
headers: headersWithCors({
headers: new Headers(),
req,
}),
status: httpStatus.OK, status: httpStatus.OK,
}, },
) )

View File

@@ -6,6 +6,8 @@ import { isNumber } from 'payload/utilities'
import type { CollectionRouteHandler } from '../types.js' import type { CollectionRouteHandler } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
export const find: CollectionRouteHandler = async ({ collection, req }) => { export const find: CollectionRouteHandler = async ({ collection, req }) => {
const { depth, draft, limit, page, sort, where } = req.query as { const { depth, draft, limit, page, sort, where } = req.query as {
depth?: string depth?: string
@@ -28,6 +30,10 @@ export const find: CollectionRouteHandler = async ({ collection, req }) => {
}) })
return Response.json(result, { return Response.json(result, {
headers: headersWithCors({
headers: new Headers(),
req,
}),
status: httpStatus.OK, status: httpStatus.OK,
}) })
} }

View File

@@ -4,6 +4,7 @@ import { isNumber } from 'payload/utilities'
import type { CollectionRouteHandlerWithID } from '../types.js' import type { CollectionRouteHandlerWithID } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
import { sanitizeCollectionID } from '../utilities/sanitizeCollectionID.js' import { sanitizeCollectionID } from '../utilities/sanitizeCollectionID.js'
export const findByID: CollectionRouteHandlerWithID = async ({ export const findByID: CollectionRouteHandlerWithID = async ({
@@ -29,6 +30,10 @@ export const findByID: CollectionRouteHandlerWithID = async ({
}) })
return Response.json(result, { return Response.json(result, {
headers: headersWithCors({
headers: new Headers(),
req,
}),
status: httpStatus.OK, status: httpStatus.OK,
}) })
} }

View File

@@ -4,6 +4,7 @@ import { isNumber } from 'payload/utilities'
import type { CollectionRouteHandlerWithID } from '../types.js' import type { CollectionRouteHandlerWithID } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
import { sanitizeCollectionID } from '../utilities/sanitizeCollectionID.js' import { sanitizeCollectionID } from '../utilities/sanitizeCollectionID.js'
export const findVersionByID: CollectionRouteHandlerWithID = async ({ export const findVersionByID: CollectionRouteHandlerWithID = async ({
@@ -28,6 +29,10 @@ export const findVersionByID: CollectionRouteHandlerWithID = async ({
}) })
return Response.json(result, { return Response.json(result, {
headers: headersWithCors({
headers: new Headers(),
req,
}),
status: httpStatus.OK, status: httpStatus.OK,
}) })
} }

View File

@@ -6,6 +6,8 @@ import { isNumber } from 'payload/utilities'
import type { CollectionRouteHandler } from '../types.js' import type { CollectionRouteHandler } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
export const findVersions: CollectionRouteHandler = async ({ collection, req }) => { export const findVersions: CollectionRouteHandler = async ({ collection, req }) => {
const { depth, limit, page, sort, where } = req.query as { const { depth, limit, page, sort, where } = req.query as {
depth?: string depth?: string
@@ -26,6 +28,10 @@ export const findVersions: CollectionRouteHandler = async ({ collection, req })
}) })
return Response.json(result, { return Response.json(result, {
headers: headersWithCors({
headers: new Headers(),
req,
}),
status: httpStatus.OK, status: httpStatus.OK,
}) })
} }

View File

@@ -5,6 +5,7 @@ import { isNumber } from 'payload/utilities'
import type { CollectionRouteHandlerWithID } from '../types.js' import type { CollectionRouteHandlerWithID } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
import { routeError } from '../routeError.js' import { routeError } from '../routeError.js'
export const preview: CollectionRouteHandlerWithID = async ({ id, collection, req }) => { export const preview: CollectionRouteHandlerWithID = async ({ id, collection, req }) => {
@@ -35,8 +36,9 @@ export const preview: CollectionRouteHandlerWithID = async ({ id, collection, re
token, token,
}) })
} catch (err) { } catch (err) {
routeError({ return routeError({
collection, collection,
config: req.payload.config,
err, err,
req, req,
}) })
@@ -44,6 +46,10 @@ export const preview: CollectionRouteHandlerWithID = async ({ id, collection, re
} }
return Response.json(previewURL, { return Response.json(previewURL, {
headers: headersWithCors({
headers: new Headers(),
req,
}),
status: httpStatus.OK, status: httpStatus.OK,
}) })
} }

View File

@@ -4,6 +4,7 @@ import { isNumber } from 'payload/utilities'
import type { CollectionRouteHandlerWithID } from '../types.js' import type { CollectionRouteHandlerWithID } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
import { sanitizeCollectionID } from '../utilities/sanitizeCollectionID.js' import { sanitizeCollectionID } from '../utilities/sanitizeCollectionID.js'
export const restoreVersion: CollectionRouteHandlerWithID = async ({ export const restoreVersion: CollectionRouteHandlerWithID = async ({
@@ -33,6 +34,10 @@ export const restoreVersion: CollectionRouteHandlerWithID = async ({
message: req.t('version:restoredSuccessfully'), message: req.t('version:restoredSuccessfully'),
}, },
{ {
headers: headersWithCors({
headers: new Headers(),
req,
}),
status: httpStatus.OK, status: httpStatus.OK,
}, },
) )

View File

@@ -7,6 +7,8 @@ import { isNumber } from 'payload/utilities'
import type { CollectionRouteHandler } from '../types.js' import type { CollectionRouteHandler } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
export const update: CollectionRouteHandler = async ({ collection, req }) => { export const update: CollectionRouteHandler = async ({ collection, req }) => {
const { depth, draft, where } = req.query as { const { depth, draft, where } = req.query as {
depth?: string depth?: string
@@ -23,6 +25,11 @@ export const update: CollectionRouteHandler = async ({ collection, req }) => {
where, where,
}) })
const headers = headersWithCors({
headers: new Headers(),
req,
})
if (result.errors.length === 0) { if (result.errors.length === 0) {
const message = req.t('general:updatedCountSuccessfully', { const message = req.t('general:updatedCountSuccessfully', {
count: result.docs.length, count: result.docs.length,
@@ -38,6 +45,7 @@ export const update: CollectionRouteHandler = async ({ collection, req }) => {
message, message,
}, },
{ {
headers,
status: httpStatus.OK, status: httpStatus.OK,
}, },
) )
@@ -56,6 +64,7 @@ export const update: CollectionRouteHandler = async ({ collection, req }) => {
message, message,
}, },
{ {
headers,
status: httpStatus.BAD_REQUEST, status: httpStatus.BAD_REQUEST,
}, },
) )

View File

@@ -4,6 +4,7 @@ import { isNumber } from 'payload/utilities'
import type { CollectionRouteHandlerWithID } from '../types.js' import type { CollectionRouteHandlerWithID } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
import { sanitizeCollectionID } from '../utilities/sanitizeCollectionID.js' import { sanitizeCollectionID } from '../utilities/sanitizeCollectionID.js'
export const updateByID: CollectionRouteHandlerWithID = async ({ export const updateByID: CollectionRouteHandlerWithID = async ({
@@ -43,6 +44,10 @@ export const updateByID: CollectionRouteHandlerWithID = async ({
message, message,
}, },
{ {
headers: headersWithCors({
headers: new Headers(),
req,
}),
status: httpStatus.OK, status: httpStatus.OK,
}, },
) )

View File

@@ -1,13 +1,16 @@
import type { Collection, PayloadRequest } from 'payload/types' import type { Collection, PayloadRequest } from 'payload/types'
import getFileType from 'file-type'
import fsPromises from 'fs/promises' import fsPromises from 'fs/promises'
import httpStatus from 'http-status' import httpStatus from 'http-status'
import path from 'path' import path from 'path'
import { APIError } from 'payload/errors' import { APIError } from 'payload/errors'
import { streamFile } from '../../../next-stream-file/index.js' import { streamFile } from '../../../next-stream-file/index.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
import { routeError } from '../routeError.js' import { routeError } from '../routeError.js'
import { checkFileAccess } from './checkFileAccess.js' import { checkFileAccess } from './checkFileAccess.js'
import { getFileTypeFallback } from './getFileTypeFallback.js'
// /:collectionSlug/file/:filename // /:collectionSlug/file/:filename
type Args = { type Args = {
@@ -24,13 +27,6 @@ export const getFile = async ({ collection, filename, req }: Args): Promise<Resp
) )
} }
if (collection.config.upload.disableLocalStorage && !collection.config.upload.handlers) {
throw new APIError(
`This collection has local storage disabled: ${collection.config.slug}`,
httpStatus.BAD_REQUEST,
)
}
await checkFileAccess({ await checkFileAccess({
collection, collection,
filename, filename,
@@ -48,25 +44,35 @@ export const getFile = async ({ collection, filename, req }: Args): Promise<Resp
}) })
} }
return response if (response instanceof Response) return response
} }
const fileDir = collection.config.upload?.staticDir || collection.config.slug const fileDir = collection.config.upload?.staticDir || collection.config.slug
const filePath = path.resolve(`${fileDir}/${filename}`) const filePath = path.resolve(`${fileDir}/${filename}`)
const stats = await fsPromises.stat(filePath) const stats = await fsPromises.stat(filePath)
const data = streamFile(filePath) const data = streamFile(filePath)
const headers = new Headers({
'Content-Length': stats.size + '',
})
const fileTypeResult = (await getFileType.fromFile(filePath)) || getFileTypeFallback(filePath)
headers.set('Content-Type', fileTypeResult.mime)
return new Response(data, { return new Response(data, {
headers: new Headers({ headers: headersWithCors({
'content-length': stats.size + '', headers,
req,
}), }),
status: httpStatus.OK, status: httpStatus.OK,
}) })
} catch (error) { } catch (err) {
return routeError({ return routeError({
collection, collection,
err: error, config: req.payload.config,
err,
req, req,
}) })
} }

View File

@@ -0,0 +1,28 @@
type ReturnType = {
ext: string
mime: string
}
const extensionMap: {
[ext: string]: string
} = {
css: 'text/css',
csv: 'text/csv',
htm: 'text/html',
html: 'text/html',
js: 'application/javascript',
json: 'application/json',
md: 'text/markdown',
svg: 'image/svg+xml',
xml: 'application/xml',
yml: 'application/x-yaml',
}
export const getFileTypeFallback = (path: string): ReturnType => {
const ext = path.split('.').pop() || 'txt'
return {
ext,
mime: extensionMap[ext] || 'text/plain',
}
}

View File

@@ -3,6 +3,8 @@ import { docAccessOperationGlobal } from 'payload/operations'
import type { GlobalRouteHandler } from '../types.js' import type { GlobalRouteHandler } from '../types.js'
import { headersWithCors } from '../../../utilities/headersWithCors.js'
export const docAccess: GlobalRouteHandler = async ({ globalConfig, req }) => { export const docAccess: GlobalRouteHandler = async ({ globalConfig, req }) => {
const result = await docAccessOperationGlobal({ const result = await docAccessOperationGlobal({
globalConfig, globalConfig,
@@ -10,6 +12,10 @@ export const docAccess: GlobalRouteHandler = async ({ globalConfig, req }) => {
}) })
return Response.json(result, { return Response.json(result, {
headers: headersWithCors({
headers: new Headers(),
req,
}),
status: httpStatus.OK, status: httpStatus.OK,
}) })
} }

Some files were not shown because too many files have changed in this diff Show More