Compare commits
10 Commits
feat/folde
...
feat/separ
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2b46de51b7 | ||
|
|
b57451594f | ||
|
|
11d5eb3e60 | ||
|
|
6435b128e8 | ||
|
|
f4b07c6b45 | ||
|
|
373655d544 | ||
|
|
5c3ba9f061 | ||
|
|
e49e9c7b0d | ||
|
|
761a5fb5e9 | ||
|
|
52cf7f96c8 |
2
.github/actions/setup/action.yml
vendored
2
.github/actions/setup/action.yml
vendored
@@ -6,7 +6,7 @@ inputs:
|
||||
node-version:
|
||||
description: Node.js version
|
||||
required: true
|
||||
default: 23.11.0
|
||||
default: 22.6.0
|
||||
pnpm-version:
|
||||
description: Pnpm version
|
||||
required: true
|
||||
|
||||
14
.github/workflows/main.yml
vendored
14
.github/workflows/main.yml
vendored
@@ -16,7 +16,7 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
NODE_VERSION: 23.11.0
|
||||
NODE_VERSION: 22.6.0
|
||||
PNPM_VERSION: 9.7.1
|
||||
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
|
||||
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
|
||||
@@ -62,6 +62,12 @@ jobs:
|
||||
echo "templates: ${{ steps.filter.outputs.templates }}"
|
||||
|
||||
lint:
|
||||
# Follows same github's ci skip: [skip lint], [lint skip], [no lint]
|
||||
if: >
|
||||
github.event_name == 'pull_request' &&
|
||||
!contains(github.event.pull_request.title, '[skip lint]') &&
|
||||
!contains(github.event.pull_request.title, '[lint skip]') &&
|
||||
!contains(github.event.pull_request.title, '[no lint]')
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -75,8 +81,10 @@ jobs:
|
||||
pnpm-version: ${{ env.PNPM_VERSION }}
|
||||
pnpm-install-cache-key: pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
|
||||
- name: Lint
|
||||
run: pnpm lint -- --quiet
|
||||
- name: Lint staged
|
||||
run: |
|
||||
git diff --name-only --diff-filter=d origin/${GITHUB_BASE_REF}...${GITHUB_SHA}
|
||||
npx lint-staged --diff="origin/${GITHUB_BASE_REF}...${GITHUB_SHA}"
|
||||
|
||||
build:
|
||||
needs: changes
|
||||
|
||||
2
.github/workflows/post-release-templates.yml
vendored
2
.github/workflows/post-release-templates.yml
vendored
@@ -7,7 +7,7 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
NODE_VERSION: 23.11.0
|
||||
NODE_VERSION: 22.6.0
|
||||
PNPM_VERSION: 9.7.1
|
||||
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
|
||||
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
|
||||
|
||||
2
.github/workflows/post-release.yml
vendored
2
.github/workflows/post-release.yml
vendored
@@ -12,7 +12,7 @@ on:
|
||||
default: ''
|
||||
|
||||
env:
|
||||
NODE_VERSION: 23.11.0
|
||||
NODE_VERSION: 22.6.0
|
||||
PNPM_VERSION: 9.7.1
|
||||
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
|
||||
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
|
||||
|
||||
2
.github/workflows/publish-prerelease.yml
vendored
2
.github/workflows/publish-prerelease.yml
vendored
@@ -7,7 +7,7 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
NODE_VERSION: 23.11.0
|
||||
NODE_VERSION: 22.6.0
|
||||
PNPM_VERSION: 9.7.1
|
||||
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
|
||||
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
|
||||
|
||||
9
.idea/runConfigurations/_template__of_JavaScriptTestRunnerJest.xml
generated
Normal file
9
.idea/runConfigurations/_template__of_JavaScriptTestRunnerJest.xml
generated
Normal file
@@ -0,0 +1,9 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="true" type="JavaScriptTestRunnerJest">
|
||||
<node-interpreter value="project" />
|
||||
<node-options value="--no-deprecation" />
|
||||
<envs />
|
||||
<scope-kind value="ALL" />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
||||
@@ -1 +1 @@
|
||||
v23.11.0
|
||||
v22.6.0
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
pnpm 9.7.1
|
||||
nodejs 23.11.0
|
||||
nodejs 22.6.0
|
||||
|
||||
14
.vscode/launch.json
vendored
14
.vscode/launch.json
vendored
@@ -63,13 +63,6 @@
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
},
|
||||
{
|
||||
"command": "pnpm tsx --no-deprecation test/dev.ts query-presets",
|
||||
"cwd": "${workspaceFolder}",
|
||||
"name": "Run Dev Query Presets",
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
},
|
||||
{
|
||||
"command": "pnpm tsx --no-deprecation test/dev.ts login-with-username",
|
||||
"cwd": "${workspaceFolder}",
|
||||
@@ -118,13 +111,6 @@
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
},
|
||||
{
|
||||
"command": "pnpm tsx --no-deprecation test/dev.ts folder-view",
|
||||
"cwd": "${workspaceFolder}",
|
||||
"name": "Run Dev Folder View",
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
},
|
||||
{
|
||||
"command": "pnpm tsx --no-deprecation test/dev.ts localization",
|
||||
"cwd": "${workspaceFolder}",
|
||||
|
||||
8
.vscode/settings.json
vendored
8
.vscode/settings.json
vendored
@@ -7,6 +7,9 @@
|
||||
},
|
||||
"editor.formatOnSaveMode": "file",
|
||||
"eslint.rules.customizations": [
|
||||
// Defaultt all ESLint errors to 'warn' to differentate from TypeScript's 'error' level
|
||||
{ "rule": "*", "severity": "warn" },
|
||||
|
||||
// Silence some warnings that will get auto-fixed
|
||||
{ "rule": "perfectionist/*", "severity": "off", "fixable": true },
|
||||
{ "rule": "curly", "severity": "off", "fixable": true },
|
||||
@@ -21,8 +24,5 @@
|
||||
"runtimeArgs": ["--no-deprecation"]
|
||||
},
|
||||
// Essentially disables bun test buttons
|
||||
"bun.test.filePattern": "bun.test.ts",
|
||||
"playwright.env": {
|
||||
"NODE_OPTIONS": "--no-deprecation --no-experimental-strip-types"
|
||||
}
|
||||
"bun.test.filePattern": "bun.test.ts"
|
||||
}
|
||||
|
||||
@@ -35,9 +35,9 @@ export const MyGroupField: Field = {
|
||||
|
||||
| Option | Description |
|
||||
| ---------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`name`** | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) |
|
||||
| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) |
|
||||
| **`fields`** \* | Array of field types to nest within this Group. |
|
||||
| **`label`** | Used as a heading in the Admin Panel and to name the generated GraphQL type. Required when name is undefined, defaults to name converted to words. |
|
||||
| **`label`** | Used as a heading in the Admin Panel and to name the generated GraphQL type. |
|
||||
| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) |
|
||||
| **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. |
|
||||
| **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). |
|
||||
@@ -86,7 +86,7 @@ export const ExampleCollection: CollectionConfig = {
|
||||
slug: 'example-collection',
|
||||
fields: [
|
||||
{
|
||||
name: 'pageMeta',
|
||||
name: 'pageMeta', // required
|
||||
type: 'group', // required
|
||||
interfaceName: 'Meta', // optional
|
||||
fields: [
|
||||
@@ -110,38 +110,3 @@ export const ExampleCollection: CollectionConfig = {
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## Presentational group fields
|
||||
|
||||
You can also use the Group field to create a presentational group of fields. This is useful when you want to group fields together visually without affecting the data structure.
|
||||
The label will be required when a `name` is not provided.
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
export const ExampleCollection: CollectionConfig = {
|
||||
slug: 'example-collection',
|
||||
fields: [
|
||||
{
|
||||
label: 'Page meta',
|
||||
type: 'group', // required
|
||||
fields: [
|
||||
{
|
||||
name: 'title',
|
||||
type: 'text',
|
||||
required: true,
|
||||
minLength: 20,
|
||||
maxLength: 100,
|
||||
},
|
||||
{
|
||||
name: 'description',
|
||||
type: 'textarea',
|
||||
required: true,
|
||||
minLength: 40,
|
||||
maxLength: 160,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
@@ -150,7 +150,7 @@ The `filterOptions` property can either be a `Where` query, or a function return
|
||||
| `id` | The `id` of the current document being edited. Will be `undefined` during the `create` operation or when called on a `Filter` component within the list view. |
|
||||
| `relationTo` | The collection `slug` to filter against, limited to this field's `relationTo` property. |
|
||||
| `req` | The Payload Request, which contains references to `payload`, `user`, `locale`, and more. |
|
||||
| `siblingData` | An object containing document data that is scoped to only fields within the same parent of this field. Will be an empty object when called on a `Filter` component within the list view. |
|
||||
| `siblingData` | An object containing document data that is scoped to only fields within the same parent of this field. Will be an emprt object when called on a `Filter` component within the list view. |
|
||||
| `user` | An object containing the currently authenticated user. |
|
||||
|
||||
## Example
|
||||
|
||||
@@ -81,7 +81,7 @@ To install a Database Adapter, you can run **one** of the following commands:
|
||||
|
||||
#### 2. Copy Payload files into your Next.js app folder
|
||||
|
||||
Payload installs directly in your Next.js `/app` folder, and you'll need to place some files into that folder for Payload to run. You can copy these files from the [Blank Template](<https://github.com/payloadcms/payload/tree/main/templates/blank/src/app/(payload)>) on GitHub. Once you have the required Payload files in place in your `/app` folder, you should have something like this:
|
||||
Payload installs directly in your Next.js `/app` folder, and you'll need to place some files into that folder for Payload to run. You can copy these files from the [Blank Template](https://github.com/payloadcms/payload/tree/main/templates/blank/src/app/(payload)) on GitHub. Once you have the required Payload files in place in your `/app` folder, you should have something like this:
|
||||
|
||||
```plaintext
|
||||
app/
|
||||
|
||||
@@ -63,50 +63,19 @@ const config = buildConfig({
|
||||
export default config
|
||||
```
|
||||
|
||||
## Enabling Content Source Maps
|
||||
|
||||
Now in your Next.js app, you need to add the `encodeSourceMaps` query parameter to your API requests. This will tell Payload to include the Content Source Maps in the API response.
|
||||
|
||||
<Banner type="warning">
|
||||
**Note:** For performance reasons, this should only be done when in draft mode
|
||||
or on preview deployments.
|
||||
</Banner>
|
||||
|
||||
#### REST API
|
||||
|
||||
If you're using the REST API, include the `?encodeSourceMaps=true` search parameter.
|
||||
Now in your Next.js app, include the `?encodeSourceMaps=true` parameter in any of your API requests. For performance reasons, this should only be done when in draft mode or on preview deployments.
|
||||
|
||||
```ts
|
||||
if (isDraftMode || process.env.VERCEL_ENV === 'preview') {
|
||||
const res = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_PAYLOAD_CMS_URL}/api/pages?encodeSourceMaps=true&where[slug][equals]=${slug}`,
|
||||
`${process.env.NEXT_PUBLIC_PAYLOAD_CMS_URL}/api/pages?where[slug][equals]=${slug}&encodeSourceMaps=true`,
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
#### Local API
|
||||
|
||||
If you're using the Local API, include the `encodeSourceMaps` via the `context` property.
|
||||
|
||||
```ts
|
||||
if (isDraftMode || process.env.VERCEL_ENV === 'preview') {
|
||||
const res = await payload.find({
|
||||
collection: 'pages',
|
||||
where: {
|
||||
slug: {
|
||||
equals: slug,
|
||||
},
|
||||
},
|
||||
context: {
|
||||
encodeSourceMaps: true,
|
||||
},
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
And that's it! You are now ready to enter Edit Mode and begin visually editing your content.
|
||||
|
||||
## Edit Mode
|
||||
#### Edit Mode
|
||||
|
||||
To see Content Link on your site, you first need to visit any preview deployment on Vercel and login using the Vercel Toolbar. When Content Source Maps are detected on the page, a pencil icon will appear in the toolbar. Clicking this icon will enable Edit Mode, highlighting all editable fields on the page in blue.
|
||||
|
||||
@@ -125,9 +94,7 @@ const { cleaned, encoded } = vercelStegaSplit(text)
|
||||
|
||||
### Blocks and array fields
|
||||
|
||||
All `blocks` and `array` fields by definition do not have plain text strings to encode. For this reason, they are automatically given an additional `_encodedSourceMap` property, which you can use to enable Content Link on entire _sections_ of your site.
|
||||
|
||||
You can then specify the editing container by adding the `data-vercel-edit-target` HTML attribute to any top-level element of your block.
|
||||
All `blocks` and `array` fields by definition do not have plain text strings to encode. For this reason, they are given an additional `_encodedSourceMap` property, which you can use to enable Content Link on entire _sections_ of your site. You can then specify the editing container by adding the `data-vercel-edit-target` HTML attribute to any top-level element of your block.
|
||||
|
||||
```ts
|
||||
<div data-vercel-edit-target>
|
||||
|
||||
@@ -309,3 +309,7 @@ import {
|
||||
...
|
||||
} from '@payloadcms/plugin-stripe/types';
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
The [Templates Directory](https://github.com/payloadcms/payload/tree/main/templates) contains an official [E-commerce Template](https://github.com/payloadcms/payload/tree/main/templates/ecommerce) which demonstrates exactly how to configure this plugin in Payload and implement it on your front-end. You can also check out [How to Build An E-Commerce Site With Next.js](https://payloadcms.com/blog/how-to-build-an-e-commerce-site-with-nextjs) post for a bit more context around this template.
|
||||
|
||||
@@ -55,11 +55,10 @@ All collection `find` queries are paginated automatically. Responses are returne
|
||||
|
||||
All Payload APIs support the pagination controls below. With them, you can create paginated lists of documents within your application:
|
||||
|
||||
| Control | Default | Description |
|
||||
| ------------ | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `limit` | `10` | Limits the number of documents returned per page - set to `0` to show all documents, we automatically disabled pagination for you when `limit` is `0` for optimisation |
|
||||
| `pagination` | `true` | Set to `false` to disable pagination and return all documents |
|
||||
| `page` | `1` | Get a specific page number |
|
||||
| Control | Description |
|
||||
| ------- | --------------------------------------- |
|
||||
| `limit` | Limits the number of documents returned |
|
||||
| `page` | Get a specific page number |
|
||||
|
||||
### Disabling pagination within Local API
|
||||
|
||||
|
||||
@@ -84,7 +84,6 @@ pnpm add @payloadcms/storage-s3
|
||||
- The `config` object can be any [`S3ClientConfig`](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3) object (from [`@aws-sdk/client-s3`](https://github.com/aws/aws-sdk-js-v3)). _This is highly dependent on your AWS setup_. Check the AWS documentation for more information.
|
||||
- When enabled, this package will automatically set `disableLocalStorage` to `true` for each collection.
|
||||
- When deploying to Vercel, server uploads are limited with 4.5MB. Set `clientUploads` to `true` to do uploads directly on the client. You must allow CORS PUT method for the bucket to your website.
|
||||
- Configure `signedDownloads` (either globally of per-collection in `collections`) to use [presigned URLs](https://docs.aws.amazon.com/AmazonS3/latest/userguide/using-presigned-url.html) for files downloading. This can improve performance for large files (like videos) while still respecting your access control.
|
||||
|
||||
```ts
|
||||
import { s3Storage } from '@payloadcms/storage-s3'
|
||||
|
||||
@@ -74,13 +74,21 @@ export const rootEslintConfig = [
|
||||
'no-console': 'off',
|
||||
'perfectionist/sort-object-types': 'off',
|
||||
'perfectionist/sort-objects': 'off',
|
||||
'payload/no-relative-monorepo-imports': 'off',
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
export default [
|
||||
...rootEslintConfig,
|
||||
{
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
...rootParserOptions,
|
||||
projectService: true,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ['packages/eslint-config/**/*.ts'],
|
||||
rules: {
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { BeforeSync, DocToSync } from '@payloadcms/plugin-search/types'
|
||||
|
||||
export const beforeSyncWithSearch: BeforeSync = async ({ req, originalDoc, searchDoc }) => {
|
||||
export const beforeSyncWithSearch: BeforeSync = async ({ originalDoc, searchDoc, payload }) => {
|
||||
const {
|
||||
doc: { relationTo: collection },
|
||||
} = searchDoc
|
||||
|
||||
const { slug, id, categories, title, meta } = originalDoc
|
||||
const { slug, id, categories, title, meta, excerpt } = originalDoc
|
||||
|
||||
const modifiedDoc: DocToSync = {
|
||||
...searchDoc,
|
||||
@@ -20,40 +20,24 @@ export const beforeSyncWithSearch: BeforeSync = async ({ req, originalDoc, searc
|
||||
}
|
||||
|
||||
if (categories && Array.isArray(categories) && categories.length > 0) {
|
||||
const populatedCategories: { id: string | number; title: string }[] = []
|
||||
for (const category of categories) {
|
||||
if (!category) {
|
||||
continue
|
||||
}
|
||||
// get full categories and keep a flattened copy of their most important properties
|
||||
try {
|
||||
const mappedCategories = categories.map((category) => {
|
||||
const { id, title } = category
|
||||
|
||||
if (typeof category === 'object') {
|
||||
populatedCategories.push(category)
|
||||
continue
|
||||
}
|
||||
|
||||
const doc = await req.payload.findByID({
|
||||
collection: 'categories',
|
||||
id: category,
|
||||
disableErrors: true,
|
||||
depth: 0,
|
||||
select: { title: true },
|
||||
req,
|
||||
return {
|
||||
relationTo: 'categories',
|
||||
id,
|
||||
title,
|
||||
}
|
||||
})
|
||||
|
||||
if (doc !== null) {
|
||||
populatedCategories.push(doc)
|
||||
} else {
|
||||
console.error(
|
||||
`Failed. Category not found when syncing collection '${collection}' with id: '${id}' to search.`,
|
||||
)
|
||||
}
|
||||
modifiedDoc.categories = mappedCategories
|
||||
} catch (err) {
|
||||
console.error(
|
||||
`Failed. Category not found when syncing collection '${collection}' with id: '${id}' to search.`,
|
||||
)
|
||||
}
|
||||
|
||||
modifiedDoc.categories = populatedCategories.map((each) => ({
|
||||
relationTo: 'categories',
|
||||
categoryID: String(each.id),
|
||||
title: each.title,
|
||||
}))
|
||||
}
|
||||
|
||||
return modifiedDoc
|
||||
|
||||
@@ -52,7 +52,7 @@ export const searchFields: Field[] = [
|
||||
type: 'text',
|
||||
},
|
||||
{
|
||||
name: 'categoryID',
|
||||
name: 'id',
|
||||
type: 'text',
|
||||
},
|
||||
{
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "payload-monorepo",
|
||||
"version": "3.38.0",
|
||||
"version": "3.36.1",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
@@ -74,9 +74,9 @@
|
||||
"docker:start": "docker compose -f test/docker-compose.yml up -d",
|
||||
"docker:stop": "docker compose -f test/docker-compose.yml down",
|
||||
"force:build": "pnpm run build:core:force",
|
||||
"lint": "turbo run lint --log-order=grouped --continue",
|
||||
"lint": "turbo run lint --concurrency 1 --continue",
|
||||
"lint-staged": "lint-staged",
|
||||
"lint:fix": "turbo run lint:fix --log-order=grouped --continue",
|
||||
"lint:fix": "turbo run lint:fix --concurrency 1 --continue",
|
||||
"obliterate-playwright-cache-macos": "rm -rf ~/Library/Caches/ms-playwright && find /System/Volumes/Data/private/var/folders -type d -name 'playwright*' -exec rm -rf {} +",
|
||||
"prepare": "husky",
|
||||
"prepare-run-test-against-prod": "pnpm bf && rm -rf test/packed && rm -rf test/node_modules && rm -rf app && rm -f test/pnpm-lock.yaml && pnpm run script:pack --all --no-build --dest test/packed && pnpm runts test/setupProd.ts && cd test && pnpm i --ignore-workspace && cd ..",
|
||||
|
||||
18
packages/admin-bar/eslint.config.js
Normal file
18
packages/admin-bar/eslint.config.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
||||
|
||||
/** @typedef {import('eslint').Linter.Config} Config */
|
||||
|
||||
/** @type {Config[]} */
|
||||
export const index = [
|
||||
...rootEslintConfig,
|
||||
{
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
...rootParserOptions,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
export default index
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/admin-bar",
|
||||
"version": "3.38.0",
|
||||
"version": "3.36.1",
|
||||
"description": "An admin bar for React apps using Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
19
packages/create-payload-app/eslint.config.js
Normal file
19
packages/create-payload-app/eslint.config.js
Normal file
@@ -0,0 +1,19 @@
|
||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
||||
|
||||
/** @typedef {import('eslint').Linter.Config} Config */
|
||||
|
||||
/** @type {Config[]} */
|
||||
export const index = [
|
||||
...rootEslintConfig,
|
||||
{
|
||||
ignores: ['bin/cli.js'],
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
...rootParserOptions,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
export default index
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "create-payload-app",
|
||||
"version": "3.38.0",
|
||||
"version": "3.36.1",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -22,9 +22,7 @@ const updateEnvExampleVariables = (
|
||||
|
||||
const [key] = line.split('=')
|
||||
|
||||
if (!key) {
|
||||
return
|
||||
}
|
||||
if (!key) {return}
|
||||
|
||||
if (key === 'DATABASE_URI' || key === 'POSTGRES_URL' || key === 'MONGODB_URI') {
|
||||
const dbChoice = databaseType ? dbChoiceRecord[databaseType] : null
|
||||
|
||||
18
packages/db-mongodb/eslint.config.js
Normal file
18
packages/db-mongodb/eslint.config.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
||||
|
||||
/** @typedef {import('eslint').Linter.Config} Config */
|
||||
|
||||
/** @type {Config[]} */
|
||||
export const index = [
|
||||
...rootEslintConfig,
|
||||
{
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
...rootParserOptions,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
export default index
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-mongodb",
|
||||
"version": "3.38.0",
|
||||
"version": "3.36.1",
|
||||
"description": "The officially supported MongoDB database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -372,61 +372,36 @@ const group: FieldSchemaGenerator<GroupField> = (
|
||||
buildSchemaOptions,
|
||||
parentIsLocalized,
|
||||
): void => {
|
||||
if (fieldAffectsData(field)) {
|
||||
const formattedBaseSchema = formatBaseSchema({ buildSchemaOptions, field, parentIsLocalized })
|
||||
const formattedBaseSchema = formatBaseSchema({ buildSchemaOptions, field, parentIsLocalized })
|
||||
|
||||
// carry indexSortableFields through to versions if drafts enabled
|
||||
const indexSortableFields =
|
||||
buildSchemaOptions.indexSortableFields &&
|
||||
field.name === 'version' &&
|
||||
buildSchemaOptions.draftsEnabled
|
||||
// carry indexSortableFields through to versions if drafts enabled
|
||||
const indexSortableFields =
|
||||
buildSchemaOptions.indexSortableFields &&
|
||||
field.name === 'version' &&
|
||||
buildSchemaOptions.draftsEnabled
|
||||
|
||||
const baseSchema: SchemaTypeOptions<any> = {
|
||||
...formattedBaseSchema,
|
||||
type: buildSchema({
|
||||
buildSchemaOptions: {
|
||||
disableUnique: buildSchemaOptions.disableUnique,
|
||||
draftsEnabled: buildSchemaOptions.draftsEnabled,
|
||||
indexSortableFields,
|
||||
options: {
|
||||
_id: false,
|
||||
id: false,
|
||||
minimize: false,
|
||||
},
|
||||
const baseSchema: SchemaTypeOptions<any> = {
|
||||
...formattedBaseSchema,
|
||||
type: buildSchema({
|
||||
buildSchemaOptions: {
|
||||
disableUnique: buildSchemaOptions.disableUnique,
|
||||
draftsEnabled: buildSchemaOptions.draftsEnabled,
|
||||
indexSortableFields,
|
||||
options: {
|
||||
_id: false,
|
||||
id: false,
|
||||
minimize: false,
|
||||
},
|
||||
configFields: field.fields,
|
||||
parentIsLocalized: parentIsLocalized || field.localized,
|
||||
payload,
|
||||
}),
|
||||
}
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(
|
||||
field,
|
||||
baseSchema,
|
||||
payload.config.localization,
|
||||
parentIsLocalized,
|
||||
),
|
||||
})
|
||||
} else {
|
||||
field.fields.forEach((subField) => {
|
||||
if (fieldIsVirtual(subField)) {
|
||||
return
|
||||
}
|
||||
|
||||
const addFieldSchema = getSchemaGenerator(subField.type)
|
||||
|
||||
if (addFieldSchema) {
|
||||
addFieldSchema(
|
||||
subField,
|
||||
schema,
|
||||
payload,
|
||||
buildSchemaOptions,
|
||||
(parentIsLocalized || field.localized) ?? false,
|
||||
)
|
||||
}
|
||||
})
|
||||
},
|
||||
configFields: field.fields,
|
||||
parentIsLocalized: parentIsLocalized || field.localized,
|
||||
payload,
|
||||
}),
|
||||
}
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, payload.config.localization, parentIsLocalized),
|
||||
})
|
||||
}
|
||||
|
||||
const json: FieldSchemaGenerator<JSONField> = (
|
||||
|
||||
@@ -57,8 +57,12 @@ const relationshipSort = ({
|
||||
return false
|
||||
}
|
||||
|
||||
for (let i = 0; i < segments.length; i++) {
|
||||
const segment = segments[i]
|
||||
for (const [i, segment] of segments.entries()) {
|
||||
if (versions && i === 0 && segment === 'version') {
|
||||
segments.shift()
|
||||
continue
|
||||
}
|
||||
|
||||
const field = currentFields.find((each) => each.name === segment)
|
||||
|
||||
if (!field) {
|
||||
@@ -67,10 +71,6 @@ const relationshipSort = ({
|
||||
|
||||
if ('fields' in field) {
|
||||
currentFields = field.flattenedFields
|
||||
if (field.name === 'version' && versions && i === 0) {
|
||||
segments.shift()
|
||||
i--
|
||||
}
|
||||
} else if (
|
||||
(field.type === 'relationship' || field.type === 'upload') &&
|
||||
i !== segments.length - 1
|
||||
@@ -106,7 +106,7 @@ const relationshipSort = ({
|
||||
as: `__${path}`,
|
||||
foreignField: '_id',
|
||||
from: foreignCollection.Model.collection.name,
|
||||
localField: versions ? `version.${relationshipPath}` : relationshipPath,
|
||||
localField: relationshipPath,
|
||||
pipeline: [
|
||||
{
|
||||
$project: {
|
||||
|
||||
@@ -105,7 +105,6 @@ export const sanitizeQueryValue = ({
|
||||
| undefined => {
|
||||
let formattedValue = val
|
||||
let formattedOperator = operator
|
||||
|
||||
if (['array', 'blocks', 'group', 'tab'].includes(field.type) && path.includes('.')) {
|
||||
const segments = path.split('.')
|
||||
segments.shift()
|
||||
|
||||
@@ -151,7 +151,6 @@ export const queryDrafts: QueryDrafts = async function queryDrafts(
|
||||
query: versionQuery,
|
||||
session: paginationOptions.options?.session ?? undefined,
|
||||
sort: paginationOptions.sort as object,
|
||||
sortAggregation,
|
||||
useEstimatedCount: paginationOptions.useEstimatedCount,
|
||||
})
|
||||
} else {
|
||||
|
||||
@@ -128,6 +128,7 @@ const traverseFields = ({
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
case 'blocks': {
|
||||
const blocksSelect = select[field.name] as SelectType
|
||||
|
||||
|
||||
@@ -425,7 +425,6 @@ export const transform = ({
|
||||
for (const locale of config.localization.localeCodes) {
|
||||
sanitizeDate({
|
||||
field,
|
||||
locale,
|
||||
ref: fieldRef,
|
||||
value: fieldRef[locale],
|
||||
})
|
||||
|
||||
18
packages/db-postgres/eslint.config.js
Normal file
18
packages/db-postgres/eslint.config.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
||||
|
||||
/** @typedef {import('eslint').Linter.Config} Config */
|
||||
|
||||
/** @type {Config[]} */
|
||||
export const index = [
|
||||
...rootEslintConfig,
|
||||
{
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
...rootParserOptions,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
export default index
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-postgres",
|
||||
"version": "3.38.0",
|
||||
"version": "3.36.1",
|
||||
"description": "The officially supported Postgres database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
18
packages/db-sqlite/eslint.config.js
Normal file
18
packages/db-sqlite/eslint.config.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
||||
|
||||
/** @typedef {import('eslint').Linter.Config} Config */
|
||||
|
||||
/** @type {Config[]} */
|
||||
export const index = [
|
||||
...rootEslintConfig,
|
||||
{
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
...rootParserOptions,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
export default index
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-sqlite",
|
||||
"version": "3.38.0",
|
||||
"version": "3.36.1",
|
||||
"description": "The officially supported SQLite database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
18
packages/db-vercel-postgres/eslint.config.js
Normal file
18
packages/db-vercel-postgres/eslint.config.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
||||
|
||||
/** @typedef {import('eslint').Linter.Config} Config */
|
||||
|
||||
/** @type {Config[]} */
|
||||
export const index = [
|
||||
...rootEslintConfig,
|
||||
{
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
...rootParserOptions,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
export default index
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-vercel-postgres",
|
||||
"version": "3.38.0",
|
||||
"version": "3.36.1",
|
||||
"description": "Vercel Postgres adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/drizzle",
|
||||
"version": "3.38.0",
|
||||
"version": "3.36.1",
|
||||
"description": "A library of shared functions used by different payload database adapters",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
@@ -53,7 +53,6 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"console-table-printer": "2.12.1",
|
||||
"dequal": "2.0.3",
|
||||
"drizzle-orm": "0.36.1",
|
||||
"prompts": "2.4.2",
|
||||
"to-snake-case": "1.0.0",
|
||||
|
||||
@@ -4,7 +4,7 @@ import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import buildQuery from './queries/buildQuery.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
export const count: Count = async function count(
|
||||
|
||||
@@ -5,7 +5,7 @@ import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import buildQuery from './queries/buildQuery.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
export const countGlobalVersions: CountGlobalVersions = async function countGlobalVersions(
|
||||
|
||||
@@ -5,7 +5,7 @@ import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import buildQuery from './queries/buildQuery.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
export const countVersions: CountVersions = async function countVersions(
|
||||
|
||||
@@ -23,10 +23,10 @@ export async function createGlobal<T extends Record<string, unknown>>(
|
||||
data,
|
||||
db,
|
||||
fields: globalConfig.flattenedFields,
|
||||
ignoreResult: returning === false,
|
||||
operation: 'create',
|
||||
req,
|
||||
tableName,
|
||||
ignoreResult: returning === false,
|
||||
})
|
||||
|
||||
if (returning === false) {
|
||||
|
||||
@@ -17,11 +17,11 @@ export async function createGlobalVersion<T extends TypeWithID>(
|
||||
globalSlug,
|
||||
publishedLocale,
|
||||
req,
|
||||
returning,
|
||||
select,
|
||||
snapshot,
|
||||
updatedAt,
|
||||
versionData,
|
||||
returning,
|
||||
}: CreateGlobalVersionArgs,
|
||||
) {
|
||||
const db = await getTransaction(this, req)
|
||||
@@ -42,11 +42,11 @@ export async function createGlobalVersion<T extends TypeWithID>(
|
||||
},
|
||||
db,
|
||||
fields: buildVersionGlobalFields(this.payload.config, global, true),
|
||||
ignoreResult: returning === false ? 'idOnly' : false,
|
||||
operation: 'create',
|
||||
req,
|
||||
select,
|
||||
tableName,
|
||||
ignoreResult: returning === false ? 'idOnly' : false,
|
||||
})
|
||||
|
||||
const table = this.tables[tableName]
|
||||
|
||||
@@ -18,11 +18,11 @@ export async function createVersion<T extends TypeWithID>(
|
||||
parent,
|
||||
publishedLocale,
|
||||
req,
|
||||
returning,
|
||||
select,
|
||||
snapshot,
|
||||
updatedAt,
|
||||
versionData,
|
||||
returning,
|
||||
}: CreateVersionArgs<T>,
|
||||
) {
|
||||
const db = await getTransaction(this, req)
|
||||
|
||||
@@ -6,7 +6,7 @@ import toSnakeCase from 'to-snake-case'
|
||||
import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { buildFindManyArgs } from './find/buildFindManyArgs.js'
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import buildQuery from './queries/buildQuery.js'
|
||||
import { selectDistinct } from './queries/selectDistinct.js'
|
||||
import { transform } from './transform/read/index.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
@@ -4,10 +4,9 @@ import { inArray } from 'drizzle-orm'
|
||||
|
||||
import type { DrizzleAdapter } from '../types.js'
|
||||
|
||||
import { buildQuery } from '../queries/buildQuery.js'
|
||||
import buildQuery from '../queries/buildQuery.js'
|
||||
import { selectDistinct } from '../queries/selectDistinct.js'
|
||||
import { transform } from '../transform/read/index.js'
|
||||
import { getNameFromDrizzleTable } from '../utilities/getNameFromDrizzleTable.js'
|
||||
import { getTransaction } from '../utilities/getTransaction.js'
|
||||
import { buildFindManyArgs } from './buildFindManyArgs.js'
|
||||
|
||||
@@ -76,26 +75,6 @@ export const findMany = async function find({
|
||||
tableName,
|
||||
versions,
|
||||
})
|
||||
|
||||
if (orderBy) {
|
||||
for (const key in selectFields) {
|
||||
const column = selectFields[key]
|
||||
if (column.primary) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (
|
||||
!orderBy.some(
|
||||
(col) =>
|
||||
col.column.name === column.name &&
|
||||
getNameFromDrizzleTable(col.column.table) === getNameFromDrizzleTable(column.table),
|
||||
)
|
||||
) {
|
||||
delete selectFields[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const selectDistinctResult = await selectDistinct({
|
||||
adapter,
|
||||
db,
|
||||
|
||||
@@ -19,17 +19,12 @@ import toSnakeCase from 'to-snake-case'
|
||||
import type { BuildQueryJoinAliases, DrizzleAdapter } from '../types.js'
|
||||
import type { Result } from './buildFindManyArgs.js'
|
||||
|
||||
import { buildQuery } from '../queries/buildQuery.js'
|
||||
import buildQuery from '../queries/buildQuery.js'
|
||||
import { getTableAlias } from '../queries/getTableAlias.js'
|
||||
import { operatorMap } from '../queries/operatorMap.js'
|
||||
import { getArrayRelationName } from '../utilities/getArrayRelationName.js'
|
||||
import { getNameFromDrizzleTable } from '../utilities/getNameFromDrizzleTable.js'
|
||||
import { jsonAggBuildObject } from '../utilities/json.js'
|
||||
import { rawConstraint } from '../utilities/rawConstraint.js'
|
||||
import {
|
||||
InternalBlockTableNameIndex,
|
||||
resolveBlockTableName,
|
||||
} from '../utilities/validateExistingBlockIsIdentical.js'
|
||||
|
||||
const flattenAllWherePaths = (where: Where, paths: string[]) => {
|
||||
for (const k in where) {
|
||||
@@ -201,12 +196,7 @@ export const traverseFields = ({
|
||||
}
|
||||
}
|
||||
|
||||
const relationName = getArrayRelationName({
|
||||
field,
|
||||
path: `${path}${field.name}`,
|
||||
tableName: arrayTableName,
|
||||
})
|
||||
|
||||
const relationName = field.dbName ? `_${arrayTableName}` : `${path}${field.name}`
|
||||
currentArgs.with[relationName] = withArray
|
||||
|
||||
traverseFields({
|
||||
@@ -254,7 +244,7 @@ export const traverseFields = ({
|
||||
|
||||
;(field.blockReferences ?? field.blocks).forEach((_block) => {
|
||||
const block = typeof _block === 'string' ? adapter.payload.blocks[_block] : _block
|
||||
const blockKey = `_blocks_${block.slug}${!block[InternalBlockTableNameIndex] ? '' : `_${block[InternalBlockTableNameIndex]}`}`
|
||||
const blockKey = `_blocks_${block.slug}`
|
||||
|
||||
let blockSelect: boolean | SelectType | undefined
|
||||
|
||||
@@ -294,9 +284,8 @@ export const traverseFields = ({
|
||||
with: {},
|
||||
}
|
||||
|
||||
const tableName = resolveBlockTableName(
|
||||
block,
|
||||
adapter.tableNameMap.get(`${topLevelTableName}_blocks_${toSnakeCase(block.slug)}`),
|
||||
const tableName = adapter.tableNameMap.get(
|
||||
`${topLevelTableName}_blocks_${toSnakeCase(block.slug)}`,
|
||||
)
|
||||
|
||||
if (typeof blockSelect === 'object') {
|
||||
|
||||
@@ -23,7 +23,7 @@ export { migrateFresh } from './migrateFresh.js'
|
||||
export { migrateRefresh } from './migrateRefresh.js'
|
||||
export { migrateReset } from './migrateReset.js'
|
||||
export { migrateStatus } from './migrateStatus.js'
|
||||
export { buildQuery } from './queries/buildQuery.js'
|
||||
export { default as buildQuery } from './queries/buildQuery.js'
|
||||
export { operatorMap } from './queries/operatorMap.js'
|
||||
export type { Operators } from './queries/operatorMap.js'
|
||||
export { parseParams } from './queries/parseParams.js'
|
||||
|
||||
@@ -42,36 +42,33 @@ export const migrate: DrizzleAdapter['migrate'] = async function migrate(
|
||||
limit: 0,
|
||||
sort: '-name',
|
||||
}))
|
||||
|
||||
if (migrationsInDB.find((m) => m.batch === -1)) {
|
||||
const { confirm: runMigrations } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
initial: false,
|
||||
message:
|
||||
"It looks like you've run Payload in dev mode, meaning you've dynamically pushed changes to your database.\n\n" +
|
||||
"If you'd like to run migrations, data loss will occur. Would you like to proceed?",
|
||||
},
|
||||
{
|
||||
onCancel: () => {
|
||||
process.exit(0)
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
if (!runMigrations) {
|
||||
process.exit(0)
|
||||
}
|
||||
// ignore the dev migration so that the latest batch number increments correctly
|
||||
migrationsInDB = migrationsInDB.filter((m) => m.batch !== -1)
|
||||
}
|
||||
|
||||
if (Number(migrationsInDB?.[0]?.batch) > 0) {
|
||||
latestBatch = Number(migrationsInDB[0]?.batch)
|
||||
}
|
||||
}
|
||||
|
||||
if (migrationsInDB.find((m) => m.batch === -1)) {
|
||||
const { confirm: runMigrations } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
initial: false,
|
||||
message:
|
||||
"It looks like you've run Payload in dev mode, meaning you've dynamically pushed changes to your database.\n\n" +
|
||||
"If you'd like to run migrations, data loss will occur. Would you like to proceed?",
|
||||
},
|
||||
{
|
||||
onCancel: () => {
|
||||
process.exit(0)
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
if (!runMigrations) {
|
||||
process.exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
const newBatch = latestBatch + 1
|
||||
|
||||
// Execute 'up' function for each migration sequentially
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { FlattenedField } from 'payload'
|
||||
import type { FlattenedBlock, FlattenedField } from 'payload'
|
||||
|
||||
type Args = {
|
||||
doc: Record<string, unknown>
|
||||
@@ -54,7 +54,7 @@ export const traverseFields = ({ doc, fields, locale, path, rows }: Args) => {
|
||||
// Can ignore string blocks, as those were added in v3 and don't need to be migrated
|
||||
const matchedBlock = field.blocks.find(
|
||||
(block) => typeof block !== 'string' && block.slug === row.blockType,
|
||||
)
|
||||
) as FlattenedBlock | undefined
|
||||
|
||||
if (matchedBlock) {
|
||||
return traverseFields({
|
||||
@@ -75,7 +75,7 @@ export const traverseFields = ({ doc, fields, locale, path, rows }: Args) => {
|
||||
// Can ignore string blocks, as those were added in v3 and don't need to be migrated
|
||||
const matchedBlock = field.blocks.find(
|
||||
(block) => typeof block !== 'string' && block.slug === row.blockType,
|
||||
)
|
||||
) as FlattenedBlock | undefined
|
||||
|
||||
if (matchedBlock) {
|
||||
return traverseFields({
|
||||
|
||||
@@ -1,126 +1,49 @@
|
||||
export type Groups =
|
||||
| 'addColumn'
|
||||
| 'addConstraint'
|
||||
| 'alterType'
|
||||
| 'createIndex'
|
||||
| 'createTable'
|
||||
| 'createType'
|
||||
| 'disableRowSecurity'
|
||||
| 'dropColumn'
|
||||
| 'dropConstraint'
|
||||
| 'dropIndex'
|
||||
| 'dropTable'
|
||||
| 'dropType'
|
||||
| 'notNull'
|
||||
| 'renameColumn'
|
||||
| 'setDefault'
|
||||
|
||||
/**
|
||||
* Convert an "ADD COLUMN" statement to an "ALTER COLUMN" statement.
|
||||
* Works with or without a schema name.
|
||||
*
|
||||
* Examples:
|
||||
* 'ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer NOT NULL;'
|
||||
* => 'ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;'
|
||||
*
|
||||
* 'ALTER TABLE "public"."pages_blocks_my_block" ADD COLUMN "person_id" integer NOT NULL;'
|
||||
* => 'ALTER TABLE "public"."pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;'
|
||||
* Convert an "ADD COLUMN" statement to an "ALTER COLUMN" statement
|
||||
* example: ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer NOT NULL;
|
||||
* to: ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
|
||||
* @param sql
|
||||
*/
|
||||
function convertAddColumnToAlterColumn(sql) {
|
||||
// Regular expression to match the ADD COLUMN statement with its constraints
|
||||
const regex = /ALTER TABLE ((?:"[^"]+"\.)?"[^"]+") ADD COLUMN ("[^"]+") [^;]*?NOT NULL;/i
|
||||
const regex = /ALTER TABLE ("[^"]+")\.(".*?") ADD COLUMN ("[^"]+") [\w\s]+ NOT NULL;/
|
||||
|
||||
// Replace the matched part with "ALTER COLUMN ... SET NOT NULL;"
|
||||
return sql.replace(regex, 'ALTER TABLE $1 ALTER COLUMN $2 SET NOT NULL;')
|
||||
return sql.replace(regex, 'ALTER TABLE $1.$2 ALTER COLUMN $3 SET NOT NULL;')
|
||||
}
|
||||
|
||||
export const groupUpSQLStatements = (list: string[]): Record<Groups, string[]> => {
|
||||
const groups = {
|
||||
/**
|
||||
* example: ALTER TABLE "posts" ADD COLUMN "category_id" integer
|
||||
*/
|
||||
addColumn: 'ADD COLUMN',
|
||||
// example: ALTER TABLE "posts" ADD COLUMN "category_id" integer
|
||||
|
||||
/**
|
||||
* example:
|
||||
* DO $$ BEGIN
|
||||
* ALTER TABLE "pages_blocks_my_block" ADD CONSTRAINT "pages_blocks_my_block_person_id_users_id_fk" FOREIGN KEY ("person_id") REFERENCES "users"("id") ON DELETE cascade ON UPDATE no action;
|
||||
* EXCEPTION
|
||||
* WHEN duplicate_object THEN null;
|
||||
* END $$;
|
||||
*/
|
||||
addConstraint: 'ADD CONSTRAINT',
|
||||
//example:
|
||||
// DO $$ BEGIN
|
||||
// ALTER TABLE "pages_blocks_my_block" ADD CONSTRAINT "pages_blocks_my_block_person_id_users_id_fk" FOREIGN KEY ("person_id") REFERENCES "users"("id") ON DELETE cascade ON UPDATE no action;
|
||||
// EXCEPTION
|
||||
// WHEN duplicate_object THEN null;
|
||||
// END $$;
|
||||
|
||||
/**
|
||||
* example: CREATE TABLE IF NOT EXISTS "payload_locked_documents" (
|
||||
* "id" serial PRIMARY KEY NOT NULL,
|
||||
* "global_slug" varchar,
|
||||
* "updated_at" timestamp(3) with time zone DEFAULT now() NOT NULL,
|
||||
* "created_at" timestamp(3) with time zone DEFAULT now() NOT NULL
|
||||
* );
|
||||
*/
|
||||
createTable: 'CREATE TABLE',
|
||||
|
||||
/**
|
||||
* example: ALTER TABLE "_posts_v_rels" DROP COLUMN IF EXISTS "posts_id";
|
||||
*/
|
||||
dropColumn: 'DROP COLUMN',
|
||||
// example: ALTER TABLE "_posts_v_rels" DROP COLUMN IF EXISTS "posts_id";
|
||||
|
||||
/**
|
||||
* example: ALTER TABLE "_posts_v_rels" DROP CONSTRAINT "_posts_v_rels_posts_fk";
|
||||
*/
|
||||
dropConstraint: 'DROP CONSTRAINT',
|
||||
// example: ALTER TABLE "_posts_v_rels" DROP CONSTRAINT "_posts_v_rels_posts_fk";
|
||||
|
||||
/**
|
||||
* example: DROP TABLE "pages_rels";
|
||||
*/
|
||||
dropTable: 'DROP TABLE',
|
||||
// example: DROP TABLE "pages_rels";
|
||||
|
||||
/**
|
||||
* example: ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
|
||||
*/
|
||||
notNull: 'NOT NULL',
|
||||
|
||||
/**
|
||||
* example: CREATE TYPE "public"."enum__pages_v_published_locale" AS ENUM('en', 'es');
|
||||
*/
|
||||
createType: 'CREATE TYPE',
|
||||
|
||||
/**
|
||||
* example: ALTER TYPE "public"."enum_pages_blocks_cta" ADD VALUE 'copy';
|
||||
*/
|
||||
alterType: 'ALTER TYPE',
|
||||
|
||||
/**
|
||||
* example: ALTER TABLE "categories_rels" DISABLE ROW LEVEL SECURITY;
|
||||
*/
|
||||
disableRowSecurity: 'DISABLE ROW LEVEL SECURITY;',
|
||||
|
||||
/**
|
||||
* example: DROP INDEX IF EXISTS "pages_title_idx";
|
||||
*/
|
||||
dropIndex: 'DROP INDEX IF EXISTS',
|
||||
|
||||
/**
|
||||
* example: ALTER TABLE "pages" ALTER COLUMN "_status" SET DEFAULT 'draft';
|
||||
*/
|
||||
setDefault: 'SET DEFAULT',
|
||||
|
||||
/**
|
||||
* example: CREATE INDEX IF NOT EXISTS "payload_locked_documents_global_slug_idx" ON "payload_locked_documents" USING btree ("global_slug");
|
||||
*/
|
||||
createIndex: 'INDEX IF NOT EXISTS',
|
||||
|
||||
/**
|
||||
* example: DROP TYPE "public"."enum__pages_v_published_locale";
|
||||
*/
|
||||
dropType: 'DROP TYPE',
|
||||
|
||||
/**
|
||||
* columns were renamed from camelCase to snake_case
|
||||
* example: ALTER TABLE "forms" RENAME COLUMN "confirmationType" TO "confirmation_type";
|
||||
*/
|
||||
renameColumn: 'RENAME COLUMN',
|
||||
// example: ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
|
||||
}
|
||||
|
||||
const result = Object.keys(groups).reduce((result, group: Groups) => {
|
||||
@@ -128,17 +51,7 @@ export const groupUpSQLStatements = (list: string[]): Record<Groups, string[]> =
|
||||
return result
|
||||
}, {}) as Record<Groups, string[]>
|
||||
|
||||
// push multi-line changes to a single grouping
|
||||
let isCreateTable = false
|
||||
|
||||
for (const line of list) {
|
||||
if (isCreateTable) {
|
||||
result.createTable.push(line)
|
||||
if (line.includes(');')) {
|
||||
isCreateTable = false
|
||||
}
|
||||
continue
|
||||
}
|
||||
Object.entries(groups).some(([key, value]) => {
|
||||
if (line.endsWith('NOT NULL;')) {
|
||||
// split up the ADD COLUMN and ALTER COLUMN NOT NULL statements
|
||||
@@ -151,11 +64,7 @@ export const groupUpSQLStatements = (list: string[]): Record<Groups, string[]> =
|
||||
return true
|
||||
}
|
||||
if (line.includes(value)) {
|
||||
let statement = line
|
||||
if (key === 'dropConstraint') {
|
||||
statement = line.replace('" DROP CONSTRAINT "', '" DROP CONSTRAINT IF EXISTS "')
|
||||
}
|
||||
result[key].push(statement)
|
||||
result[key].push(line)
|
||||
return true
|
||||
}
|
||||
})
|
||||
|
||||
@@ -20,17 +20,6 @@ type Args = {
|
||||
req?: Partial<PayloadRequest>
|
||||
}
|
||||
|
||||
const runStatementGroup = async ({ adapter, db, debug, statements }) => {
|
||||
const addColumnsStatement = statements.join('\n')
|
||||
|
||||
if (debug) {
|
||||
adapter.payload.logger.info(debug)
|
||||
adapter.payload.logger.info(addColumnsStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(addColumnsStatement))
|
||||
}
|
||||
|
||||
/**
|
||||
* Moves upload and relationship columns from the join table and into the tables while moving data
|
||||
* This is done in the following order:
|
||||
@@ -47,11 +36,21 @@ const runStatementGroup = async ({ adapter, db, debug, statements }) => {
|
||||
*/
|
||||
export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
|
||||
const adapter = payload.db as unknown as BasePostgresAdapter
|
||||
const db = await getTransaction(adapter, req)
|
||||
const dir = payload.db.migrationDir
|
||||
|
||||
// get the drizzle migrateUpSQL from drizzle using the last schema
|
||||
const { generateDrizzleJson, generateMigration, upSnapshot } = adapter.requireDrizzleKit()
|
||||
const drizzleJsonAfter = generateDrizzleJson(adapter.schema) as DrizzleSnapshotJSON
|
||||
|
||||
const toSnapshot: Record<string, unknown> = {}
|
||||
|
||||
for (const key of Object.keys(adapter.schema).filter(
|
||||
(key) => !key.startsWith('payload_locked_documents'),
|
||||
)) {
|
||||
toSnapshot[key] = adapter.schema[key]
|
||||
}
|
||||
|
||||
const drizzleJsonAfter = generateDrizzleJson(toSnapshot) as DrizzleSnapshotJSON
|
||||
|
||||
// Get the previous migration snapshot
|
||||
const previousSnapshot = fs
|
||||
@@ -83,62 +82,16 @@ export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
|
||||
|
||||
const sqlUpStatements = groupUpSQLStatements(generatedSQL)
|
||||
|
||||
const db = await getTransaction(adapter, req)
|
||||
const addColumnsStatement = sqlUpStatements.addColumn.join('\n')
|
||||
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'CREATING TYPES' : null,
|
||||
statements: sqlUpStatements.createType,
|
||||
})
|
||||
if (debug) {
|
||||
payload.logger.info('CREATING NEW RELATIONSHIP COLUMNS')
|
||||
payload.logger.info(addColumnsStatement)
|
||||
}
|
||||
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'ALTERING TYPES' : null,
|
||||
statements: sqlUpStatements.alterType,
|
||||
})
|
||||
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'CREATING TABLES' : null,
|
||||
statements: sqlUpStatements.createTable,
|
||||
})
|
||||
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'RENAMING COLUMNS' : null,
|
||||
statements: sqlUpStatements.renameColumn,
|
||||
})
|
||||
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'CREATING NEW RELATIONSHIP COLUMNS' : null,
|
||||
statements: sqlUpStatements.addColumn,
|
||||
})
|
||||
|
||||
// SET DEFAULTS
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'SETTING DEFAULTS' : null,
|
||||
statements: sqlUpStatements.setDefault,
|
||||
})
|
||||
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'CREATING INDEXES' : null,
|
||||
statements: sqlUpStatements.createIndex,
|
||||
})
|
||||
await db.execute(sql.raw(addColumnsStatement))
|
||||
|
||||
for (const collection of payload.config.collections) {
|
||||
if (collection.slug === 'payload-locked-documents') {
|
||||
continue
|
||||
}
|
||||
const tableName = adapter.tableNameMap.get(toSnakeCase(collection.slug))
|
||||
const pathsToQuery: PathsToQuery = new Set()
|
||||
|
||||
@@ -284,58 +237,52 @@ export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
|
||||
}
|
||||
|
||||
// ADD CONSTRAINT
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'ADDING CONSTRAINTS' : null,
|
||||
statements: sqlUpStatements.addConstraint,
|
||||
})
|
||||
const addConstraintsStatement = sqlUpStatements.addConstraint.join('\n')
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info('ADDING CONSTRAINTS')
|
||||
payload.logger.info(addConstraintsStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(addConstraintsStatement))
|
||||
|
||||
// NOT NULL
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'NOT NULL CONSTRAINTS' : null,
|
||||
statements: sqlUpStatements.notNull,
|
||||
})
|
||||
const notNullStatements = sqlUpStatements.notNull.join('\n')
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info('NOT NULL CONSTRAINTS')
|
||||
payload.logger.info(notNullStatements)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(notNullStatements))
|
||||
|
||||
// DROP TABLE
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'DROPPING TABLES' : null,
|
||||
statements: sqlUpStatements.dropTable,
|
||||
})
|
||||
const dropTablesStatement = sqlUpStatements.dropTable.join('\n')
|
||||
|
||||
// DROP INDEX
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'DROPPING INDEXES' : null,
|
||||
statements: sqlUpStatements.dropIndex,
|
||||
})
|
||||
if (debug) {
|
||||
payload.logger.info('DROPPING TABLES')
|
||||
payload.logger.info(dropTablesStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(dropTablesStatement))
|
||||
|
||||
// DROP CONSTRAINT
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'DROPPING CONSTRAINTS' : null,
|
||||
statements: sqlUpStatements.dropConstraint,
|
||||
})
|
||||
const dropConstraintsStatement = sqlUpStatements.dropConstraint.join('\n')
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info('DROPPING CONSTRAINTS')
|
||||
payload.logger.info(dropConstraintsStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(dropConstraintsStatement))
|
||||
|
||||
// DROP COLUMN
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'DROPPING COLUMNS' : null,
|
||||
statements: sqlUpStatements.dropColumn,
|
||||
})
|
||||
const dropColumnsStatement = sqlUpStatements.dropColumn.join('\n')
|
||||
|
||||
// DROP TYPES
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'DROPPING TYPES' : null,
|
||||
statements: sqlUpStatements.dropType,
|
||||
})
|
||||
if (debug) {
|
||||
payload.logger.info('DROPPING COLUMNS')
|
||||
payload.logger.info(dropColumnsStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(dropColumnsStatement))
|
||||
}
|
||||
|
||||
@@ -56,7 +56,7 @@ export const migrateRelationships = async ({
|
||||
${where} ORDER BY parent_id LIMIT 500 OFFSET ${offset * 500};
|
||||
`
|
||||
|
||||
paginationResult = await db.execute(sql.raw(`${paginationStatement}`))
|
||||
paginationResult = await adapter.drizzle.execute(sql.raw(`${paginationStatement}`))
|
||||
|
||||
if (paginationResult.rows.length === 0) {
|
||||
return
|
||||
@@ -72,7 +72,7 @@ export const migrateRelationships = async ({
|
||||
payload.logger.info(statement)
|
||||
}
|
||||
|
||||
const result = await db.execute(sql.raw(`${statement}`))
|
||||
const result = await adapter.drizzle.execute(sql.raw(`${statement}`))
|
||||
|
||||
const docsToResave: DocsToResave = {}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { SQL, Table } from 'drizzle-orm'
|
||||
import type { Table } from 'drizzle-orm'
|
||||
import type { FlattenedField, Sort } from 'payload'
|
||||
|
||||
import { asc, desc } from 'drizzle-orm'
|
||||
@@ -16,7 +16,6 @@ type Args = {
|
||||
joins: BuildQueryJoinAliases
|
||||
locale?: string
|
||||
parentIsLocalized: boolean
|
||||
rawSort?: SQL
|
||||
selectFields: Record<string, GenericColumn>
|
||||
sort?: Sort
|
||||
tableName: string
|
||||
@@ -32,7 +31,6 @@ export const buildOrderBy = ({
|
||||
joins,
|
||||
locale,
|
||||
parentIsLocalized,
|
||||
rawSort,
|
||||
selectFields,
|
||||
sort,
|
||||
tableName,
|
||||
@@ -76,23 +74,17 @@ export const buildOrderBy = ({
|
||||
value: sortProperty,
|
||||
})
|
||||
if (sortTable?.[sortTableColumnName]) {
|
||||
let order = sortDirection === 'asc' ? asc : desc
|
||||
|
||||
if (rawSort) {
|
||||
order = () => rawSort
|
||||
}
|
||||
|
||||
orderBy.push({
|
||||
column:
|
||||
aliasTable && tableName === getNameFromDrizzleTable(sortTable)
|
||||
? aliasTable[sortTableColumnName]
|
||||
: sortTable[sortTableColumnName],
|
||||
order,
|
||||
order: sortDirection === 'asc' ? asc : desc,
|
||||
})
|
||||
|
||||
selectFields[sortTableColumnName] = sortTable[sortTableColumnName]
|
||||
}
|
||||
} catch (_) {
|
||||
} catch (err) {
|
||||
// continue
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,8 +37,7 @@ export type BuildQueryResult = {
|
||||
selectFields: Record<string, GenericColumn>
|
||||
where: SQL
|
||||
}
|
||||
|
||||
export const buildQuery = function buildQuery({
|
||||
const buildQuery = function buildQuery({
|
||||
adapter,
|
||||
aliasTable,
|
||||
fields,
|
||||
@@ -80,7 +79,6 @@ export const buildQuery = function buildQuery({
|
||||
joins,
|
||||
locale,
|
||||
parentIsLocalized,
|
||||
rawSort: context.rawSort,
|
||||
selectFields,
|
||||
sort: context.sort,
|
||||
tableName,
|
||||
@@ -93,3 +91,5 @@ export const buildQuery = function buildQuery({
|
||||
where,
|
||||
}
|
||||
}
|
||||
|
||||
export default buildQuery
|
||||
|
||||
@@ -19,7 +19,6 @@ import type { DrizzleAdapter, GenericColumn } from '../types.js'
|
||||
import type { BuildQueryJoinAliases } from './buildQuery.js'
|
||||
|
||||
import { isPolymorphicRelationship } from '../utilities/isPolymorphicRelationship.js'
|
||||
import { resolveBlockTableName } from '../utilities/validateExistingBlockIsIdentical.js'
|
||||
import { addJoinTable } from './addJoinTable.js'
|
||||
import { getTableAlias } from './getTableAlias.js'
|
||||
|
||||
@@ -194,9 +193,8 @@ export const getTableColumnFromPath = ({
|
||||
(block) => typeof block !== 'string' && block.slug === blockType,
|
||||
) as FlattenedBlock | undefined)
|
||||
|
||||
newTableName = resolveBlockTableName(
|
||||
block,
|
||||
adapter.tableNameMap.get(`${tableName}_blocks_${toSnakeCase(block.slug)}`),
|
||||
newTableName = adapter.tableNameMap.get(
|
||||
`${tableName}_blocks_${toSnakeCase(block.slug)}`,
|
||||
)
|
||||
|
||||
const { newAliasTable } = getTableAlias({ adapter, tableName: newTableName })
|
||||
@@ -222,11 +220,7 @@ export const getTableColumnFromPath = ({
|
||||
const hasBlockField = (field.blockReferences ?? field.blocks).some((_block) => {
|
||||
const block = typeof _block === 'string' ? adapter.payload.blocks[_block] : _block
|
||||
|
||||
newTableName = resolveBlockTableName(
|
||||
block,
|
||||
adapter.tableNameMap.get(`${tableName}_blocks_${toSnakeCase(block.slug)}`),
|
||||
)
|
||||
|
||||
newTableName = adapter.tableNameMap.get(`${tableName}_blocks_${toSnakeCase(block.slug)}`)
|
||||
constraintPath = `${constraintPath}${field.name}.%.`
|
||||
|
||||
let result: TableColumn
|
||||
@@ -280,7 +274,7 @@ export const getTableColumnFromPath = ({
|
||||
tableName: newTableName,
|
||||
value,
|
||||
})
|
||||
} catch (_) {
|
||||
} catch (error) {
|
||||
// this is fine, not every block will have the field
|
||||
}
|
||||
if (!result) {
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
notInArray,
|
||||
or,
|
||||
type SQL,
|
||||
type SQLWrapper,
|
||||
} from 'drizzle-orm'
|
||||
|
||||
type OperatorKeys =
|
||||
@@ -34,7 +35,7 @@ type OperatorKeys =
|
||||
| 'not_like'
|
||||
| 'or'
|
||||
|
||||
export type Operators = Record<OperatorKeys, (column: Column, value: unknown) => SQL>
|
||||
export type Operators = Record<OperatorKeys, (column: Column, value: SQLWrapper | unknown) => SQL>
|
||||
|
||||
export const operatorMap: Operators = {
|
||||
and,
|
||||
|
||||
@@ -14,7 +14,7 @@ import { buildAndOrConditions } from './buildAndOrConditions.js'
|
||||
import { getTableColumnFromPath } from './getTableColumnFromPath.js'
|
||||
import { sanitizeQueryValue } from './sanitizeQueryValue.js'
|
||||
|
||||
export type QueryContext = { rawSort?: SQL; sort: Sort }
|
||||
export type QueryContext = { sort: Sort }
|
||||
|
||||
type Args = {
|
||||
adapter: DrizzleAdapter
|
||||
@@ -348,7 +348,6 @@ export function parseParams({
|
||||
}
|
||||
if (geoConstraints.length) {
|
||||
context.sort = relationOrPath
|
||||
context.rawSort = sql`${table[columnName]} <-> ST_SetSRID(ST_MakePoint(${lng}, ${lat}), 4326)`
|
||||
constraints.push(and(...geoConstraints))
|
||||
}
|
||||
break
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import type { QueryPromise, SQL } from 'drizzle-orm'
|
||||
import type { PgSelect } from 'drizzle-orm/pg-core'
|
||||
import type { SQLiteColumn, SQLiteSelect } from 'drizzle-orm/sqlite-core'
|
||||
|
||||
import type {
|
||||
|
||||
@@ -32,7 +32,6 @@ type Args = {
|
||||
* ie. indexes, multiple columns, etc
|
||||
*/
|
||||
baseIndexes?: Record<string, RawIndex>
|
||||
blocksTableNameMap: Record<string, number>
|
||||
buildNumbers?: boolean
|
||||
buildRelationships?: boolean
|
||||
compoundIndexes?: SanitizedCompoundIndex[]
|
||||
@@ -71,7 +70,6 @@ export const buildTable = ({
|
||||
baseColumns = {},
|
||||
baseForeignKeys = {},
|
||||
baseIndexes = {},
|
||||
blocksTableNameMap,
|
||||
compoundIndexes,
|
||||
disableNotNull,
|
||||
disableRelsTableUnique = false,
|
||||
@@ -122,7 +120,6 @@ export const buildTable = ({
|
||||
hasManyTextField,
|
||||
} = traverseFields({
|
||||
adapter,
|
||||
blocksTableNameMap,
|
||||
columns,
|
||||
disableNotNull,
|
||||
disableRelsTableUnique,
|
||||
|
||||
@@ -56,7 +56,6 @@ export const buildRawSchema = ({
|
||||
|
||||
buildTable({
|
||||
adapter,
|
||||
blocksTableNameMap: {},
|
||||
compoundIndexes: collection.sanitizedIndexes,
|
||||
disableNotNull: !!collection?.versions?.drafts,
|
||||
disableUnique: false,
|
||||
@@ -76,7 +75,6 @@ export const buildRawSchema = ({
|
||||
|
||||
buildTable({
|
||||
adapter,
|
||||
blocksTableNameMap: {},
|
||||
compoundIndexes: buildVersionCompoundIndexes({ indexes: collection.sanitizedIndexes }),
|
||||
disableNotNull: !!collection.versions?.drafts,
|
||||
disableUnique: true,
|
||||
@@ -98,7 +96,6 @@ export const buildRawSchema = ({
|
||||
|
||||
buildTable({
|
||||
adapter,
|
||||
blocksTableNameMap: {},
|
||||
disableNotNull: !!global?.versions?.drafts,
|
||||
disableUnique: false,
|
||||
fields: global.flattenedFields,
|
||||
@@ -121,7 +118,6 @@ export const buildRawSchema = ({
|
||||
|
||||
buildTable({
|
||||
adapter,
|
||||
blocksTableNameMap: {},
|
||||
disableNotNull: !!global.versions?.drafts,
|
||||
disableUnique: true,
|
||||
fields: versionFields,
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import type { FlattenedField } from 'payload'
|
||||
import type { CompoundIndex, FlattenedField } from 'payload'
|
||||
|
||||
import { InvalidConfiguration } from 'payload'
|
||||
import {
|
||||
array,
|
||||
fieldAffectsData,
|
||||
fieldIsVirtual,
|
||||
fieldShouldBeLocalized,
|
||||
@@ -22,20 +23,14 @@ import type {
|
||||
|
||||
import { createTableName } from '../createTableName.js'
|
||||
import { buildIndexName } from '../utilities/buildIndexName.js'
|
||||
import { getArrayRelationName } from '../utilities/getArrayRelationName.js'
|
||||
import { hasLocalesTable } from '../utilities/hasLocalesTable.js'
|
||||
import {
|
||||
InternalBlockTableNameIndex,
|
||||
setInternalBlockIndex,
|
||||
validateExistingBlockIsIdentical,
|
||||
} from '../utilities/validateExistingBlockIsIdentical.js'
|
||||
import { validateExistingBlockIsIdentical } from '../utilities/validateExistingBlockIsIdentical.js'
|
||||
import { buildTable } from './build.js'
|
||||
import { idToUUID } from './idToUUID.js'
|
||||
import { withDefault } from './withDefault.js'
|
||||
|
||||
type Args = {
|
||||
adapter: DrizzleAdapter
|
||||
blocksTableNameMap: Record<string, number>
|
||||
columnPrefix?: string
|
||||
columns: Record<string, RawColumn>
|
||||
disableNotNull: boolean
|
||||
@@ -76,7 +71,6 @@ type Result = {
|
||||
|
||||
export const traverseFields = ({
|
||||
adapter,
|
||||
blocksTableNameMap,
|
||||
columnPrefix,
|
||||
columns,
|
||||
disableNotNull,
|
||||
@@ -255,7 +249,6 @@ export const traverseFields = ({
|
||||
baseColumns,
|
||||
baseForeignKeys,
|
||||
baseIndexes,
|
||||
blocksTableNameMap,
|
||||
disableNotNull: disableNotNullFromHere,
|
||||
disableRelsTableUnique: true,
|
||||
disableUnique,
|
||||
@@ -295,11 +288,7 @@ export const traverseFields = ({
|
||||
}
|
||||
}
|
||||
|
||||
const relationName = getArrayRelationName({
|
||||
field,
|
||||
path: fieldName,
|
||||
tableName: arrayTableName,
|
||||
})
|
||||
const relationName = field.dbName ? `_${arrayTableName}` : fieldName
|
||||
|
||||
relationsToBuild.set(relationName, {
|
||||
type: 'many',
|
||||
@@ -375,7 +364,7 @@ export const traverseFields = ({
|
||||
;(field.blockReferences ?? field.blocks).forEach((_block) => {
|
||||
const block = typeof _block === 'string' ? adapter.payload.blocks[_block] : _block
|
||||
|
||||
let blockTableName = createTableName({
|
||||
const blockTableName = createTableName({
|
||||
adapter,
|
||||
config: block,
|
||||
parentTableName: rootTableName,
|
||||
@@ -383,27 +372,6 @@ export const traverseFields = ({
|
||||
throwValidationError,
|
||||
versionsCustomName: versions,
|
||||
})
|
||||
|
||||
if (typeof blocksTableNameMap[blockTableName] === 'undefined') {
|
||||
blocksTableNameMap[blockTableName] = 1
|
||||
} else if (
|
||||
!validateExistingBlockIsIdentical({
|
||||
block,
|
||||
localized: field.localized,
|
||||
rootTableName,
|
||||
table: adapter.rawTables[blockTableName],
|
||||
tableLocales: adapter.rawTables[`${blockTableName}${adapter.localesSuffix}`],
|
||||
})
|
||||
) {
|
||||
blocksTableNameMap[blockTableName]++
|
||||
setInternalBlockIndex(block, blocksTableNameMap[blockTableName])
|
||||
blockTableName = `${blockTableName}_${blocksTableNameMap[blockTableName]}`
|
||||
}
|
||||
let relationName = `_blocks_${block.slug}`
|
||||
if (typeof block[InternalBlockTableNameIndex] !== 'undefined') {
|
||||
relationName = `_blocks_${block.slug}_${block[InternalBlockTableNameIndex]}`
|
||||
}
|
||||
|
||||
if (!adapter.rawTables[blockTableName]) {
|
||||
const baseColumns: Record<string, RawColumn> = {
|
||||
_order: {
|
||||
@@ -483,7 +451,6 @@ export const traverseFields = ({
|
||||
baseColumns,
|
||||
baseForeignKeys,
|
||||
baseIndexes,
|
||||
blocksTableNameMap,
|
||||
disableNotNull: disableNotNullFromHere,
|
||||
disableRelsTableUnique: true,
|
||||
disableUnique,
|
||||
@@ -534,7 +501,7 @@ export const traverseFields = ({
|
||||
},
|
||||
],
|
||||
references: ['id'],
|
||||
relationName,
|
||||
relationName: `_blocks_${block.slug}`,
|
||||
to: rootTableName,
|
||||
},
|
||||
}
|
||||
@@ -582,10 +549,18 @@ export const traverseFields = ({
|
||||
})
|
||||
|
||||
adapter.rawRelations[blockTableName] = blockRelations
|
||||
} else if (process.env.NODE_ENV !== 'production' && !versions) {
|
||||
validateExistingBlockIsIdentical({
|
||||
block,
|
||||
localized: field.localized,
|
||||
parentIsLocalized: parentIsLocalized || field.localized,
|
||||
rootTableName,
|
||||
table: adapter.rawTables[blockTableName],
|
||||
tableLocales: adapter.rawTables[`${blockTableName}${adapter.localesSuffix}`],
|
||||
})
|
||||
}
|
||||
|
||||
// blocks relationships are defined from the collection or globals table down to the block, bypassing any subBlocks
|
||||
rootRelationsToBuild.set(relationName, {
|
||||
rootRelationsToBuild.set(`_blocks_${block.slug}`, {
|
||||
type: 'many',
|
||||
// blocks are not localized on the parent table
|
||||
localized: false,
|
||||
@@ -649,7 +624,6 @@ export const traverseFields = ({
|
||||
hasManyTextField: groupHasManyTextField,
|
||||
} = traverseFields({
|
||||
adapter,
|
||||
blocksTableNameMap,
|
||||
columnPrefix: `${columnName}_`,
|
||||
columns,
|
||||
disableNotNull: disableNotNullFromHere,
|
||||
@@ -866,7 +840,6 @@ export const traverseFields = ({
|
||||
baseColumns,
|
||||
baseForeignKeys,
|
||||
baseIndexes,
|
||||
blocksTableNameMap,
|
||||
disableNotNull,
|
||||
disableUnique,
|
||||
fields: [],
|
||||
|
||||
@@ -12,7 +12,7 @@ export const commitTransaction: CommitTransaction = async function commitTransac
|
||||
|
||||
try {
|
||||
await this.sessions[id].resolve()
|
||||
} catch (_) {
|
||||
} catch (err: unknown) {
|
||||
await this.sessions[id].reject()
|
||||
}
|
||||
|
||||
|
||||
@@ -49,7 +49,6 @@ export const transform = <T extends Record<string, unknown> | TypeWithID>({
|
||||
}
|
||||
|
||||
const blocks = createBlocksMap(data)
|
||||
|
||||
const deletions = []
|
||||
|
||||
const result = traverseFields<T>({
|
||||
|
||||
@@ -6,8 +6,6 @@ import toSnakeCase from 'to-snake-case'
|
||||
import type { DrizzleAdapter } from '../../types.js'
|
||||
import type { BlocksMap } from '../../utilities/createBlocksMap.js'
|
||||
|
||||
import { getArrayRelationName } from '../../utilities/getArrayRelationName.js'
|
||||
import { resolveBlockTableName } from '../../utilities/validateExistingBlockIsIdentical.js'
|
||||
import { transformHasManyNumber } from './hasManyNumber.js'
|
||||
import { transformHasManyText } from './hasManyText.js'
|
||||
import { transformRelationship } from './relationship.js'
|
||||
@@ -123,7 +121,9 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
`${currentTableName}_${tablePath}${toSnakeCase(field.name)}`,
|
||||
)
|
||||
|
||||
fieldData = table[getArrayRelationName({ field, path: fieldName, tableName: arrayTableName })]
|
||||
if (field.dbName) {
|
||||
fieldData = table[`_${arrayTableName}`]
|
||||
}
|
||||
|
||||
if (Array.isArray(fieldData)) {
|
||||
if (isLocalized) {
|
||||
@@ -249,9 +249,8 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
(block) => typeof block !== 'string' && block.slug === row.blockType,
|
||||
) as FlattenedBlock | undefined)
|
||||
|
||||
const tableName = resolveBlockTableName(
|
||||
block,
|
||||
adapter.tableNameMap.get(`${topLevelTableName}_blocks_${toSnakeCase(block.slug)}`),
|
||||
const tableName = adapter.tableNameMap.get(
|
||||
`${topLevelTableName}_blocks_${toSnakeCase(block.slug)}`,
|
||||
)
|
||||
|
||||
if (block) {
|
||||
@@ -329,11 +328,8 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
delete row._index
|
||||
}
|
||||
|
||||
const tableName = resolveBlockTableName(
|
||||
block,
|
||||
adapter.tableNameMap.get(
|
||||
`${topLevelTableName}_blocks_${toSnakeCase(block.slug)}`,
|
||||
),
|
||||
const tableName = adapter.tableNameMap.get(
|
||||
`${topLevelTableName}_blocks_${toSnakeCase(block.slug)}`,
|
||||
)
|
||||
|
||||
acc.push(
|
||||
@@ -670,6 +666,10 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
withinArrayOrBlockLocale: locale || withinArrayOrBlockLocale,
|
||||
})
|
||||
|
||||
if ('_order' in ref) {
|
||||
delete ref._order
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ import toSnakeCase from 'to-snake-case'
|
||||
import type { DrizzleAdapter } from '../../types.js'
|
||||
import type { BlockRowToInsert, RelationshipToDelete } from './types.js'
|
||||
|
||||
import { resolveBlockTableName } from '../../utilities/validateExistingBlockIsIdentical.js'
|
||||
import { traverseFields } from './traverseFields.js'
|
||||
|
||||
type Args = {
|
||||
@@ -67,6 +66,10 @@ export const transformBlocks = ({
|
||||
}
|
||||
const blockType = toSnakeCase(blockRow.blockType)
|
||||
|
||||
if (!blocks[blockType]) {
|
||||
blocks[blockType] = []
|
||||
}
|
||||
|
||||
const newRow: BlockRowToInsert = {
|
||||
arrays: {},
|
||||
locales: {},
|
||||
@@ -83,14 +86,7 @@ export const transformBlocks = ({
|
||||
newRow.row._locale = withinArrayOrBlockLocale
|
||||
}
|
||||
|
||||
const blockTableName = resolveBlockTableName(
|
||||
matchedBlock,
|
||||
adapter.tableNameMap.get(`${baseTableName}_blocks_${blockType}`),
|
||||
)
|
||||
|
||||
if (!blocks[blockTableName]) {
|
||||
blocks[blockTableName] = []
|
||||
}
|
||||
const blockTableName = adapter.tableNameMap.get(`${baseTableName}_blocks_${blockType}`)
|
||||
|
||||
const hasUUID = adapter.tables[blockTableName]._uuid
|
||||
|
||||
@@ -128,6 +124,6 @@ export const transformBlocks = ({
|
||||
withinArrayOrBlockLocale,
|
||||
})
|
||||
|
||||
blocks[blockTableName].push(newRow)
|
||||
blocks[blockType].push(newRow)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -22,7 +22,7 @@ export const transformRelationship = ({ baseRow, data, field, relationships }: A
|
||||
if (Array.isArray(field.relationTo) && valueIsValueWithRelation(relation)) {
|
||||
relationRow[`${relation.relationTo}ID`] = relation.value
|
||||
relationships.push(relationRow)
|
||||
} else if (typeof field.relationTo === 'string') {
|
||||
} else {
|
||||
relationRow[`${field.relationTo}ID`] = relation
|
||||
if (relation) {
|
||||
relationships.push(relationRow)
|
||||
|
||||
@@ -8,7 +8,6 @@ import type { DrizzleAdapter } from '../../types.js'
|
||||
import type { ArrayRowToInsert, BlockRowToInsert, RelationshipToDelete } from './types.js'
|
||||
|
||||
import { isArrayOfRows } from '../../utilities/isArrayOfRows.js'
|
||||
import { resolveBlockTableName } from '../../utilities/validateExistingBlockIsIdentical.js'
|
||||
import { transformArray } from './array.js'
|
||||
import { transformBlocks } from './blocks.js'
|
||||
import { transformNumbers } from './numbers.js'
|
||||
@@ -176,17 +175,7 @@ export const traverseFields = ({
|
||||
|
||||
if (field.type === 'blocks') {
|
||||
;(field.blockReferences ?? field.blocks).forEach((block) => {
|
||||
const matchedBlock =
|
||||
typeof block === 'string'
|
||||
? adapter.payload.config.blocks.find((each) => each.slug === block)
|
||||
: block
|
||||
|
||||
blocksToDelete.add(
|
||||
resolveBlockTableName(
|
||||
matchedBlock,
|
||||
adapter.tableNameMap.get(`${baseTableName}_blocks_${toSnakeCase(matchedBlock.slug)}`),
|
||||
),
|
||||
)
|
||||
blocksToDelete.add(toSnakeCase(typeof block === 'string' ? block : block.slug))
|
||||
})
|
||||
|
||||
if (isLocalized) {
|
||||
|
||||
@@ -28,7 +28,7 @@ export type RowToInsert = {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
blocks: {
|
||||
[tableName: string]: BlockRowToInsert[]
|
||||
[blockType: string]: BlockRowToInsert[]
|
||||
}
|
||||
blocksToDelete: Set<string>
|
||||
locales: {
|
||||
|
||||
@@ -9,7 +9,7 @@ import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
export async function updateGlobal<T extends Record<string, unknown>>(
|
||||
this: DrizzleAdapter,
|
||||
{ slug, data, req, returning, select }: UpdateGlobalArgs,
|
||||
{ slug, data, req, select, returning }: UpdateGlobalArgs,
|
||||
): Promise<T> {
|
||||
const db = await getTransaction(this, req)
|
||||
const globalConfig = this.payload.globals.config.find((config) => config.slug === slug)
|
||||
@@ -23,10 +23,10 @@ export async function updateGlobal<T extends Record<string, unknown>>(
|
||||
data,
|
||||
db,
|
||||
fields: globalConfig.flattenedFields,
|
||||
ignoreResult: returning === false,
|
||||
req,
|
||||
select,
|
||||
tableName,
|
||||
ignoreResult: returning === false,
|
||||
})
|
||||
|
||||
if (returning === false) {
|
||||
|
||||
@@ -10,7 +10,7 @@ import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import buildQuery from './queries/buildQuery.js'
|
||||
import { upsertRow } from './upsertRow/index.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
@@ -21,10 +21,10 @@ export async function updateGlobalVersion<T extends TypeWithID>(
|
||||
global,
|
||||
locale,
|
||||
req,
|
||||
returning,
|
||||
select,
|
||||
versionData,
|
||||
where: whereArg,
|
||||
returning,
|
||||
}: UpdateGlobalVersionArgs<T>,
|
||||
) {
|
||||
const db = await getTransaction(this, req)
|
||||
@@ -53,12 +53,12 @@ export async function updateGlobalVersion<T extends TypeWithID>(
|
||||
data: versionData,
|
||||
db,
|
||||
fields,
|
||||
ignoreResult: returning === false,
|
||||
operation: 'update',
|
||||
req,
|
||||
select,
|
||||
tableName,
|
||||
where,
|
||||
ignoreResult: returning === false,
|
||||
})
|
||||
|
||||
if (returning === false) {
|
||||
|
||||
@@ -5,7 +5,7 @@ import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import buildQuery from './queries/buildQuery.js'
|
||||
import { selectDistinct } from './queries/selectDistinct.js'
|
||||
import { upsertRow } from './upsertRow/index.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
@@ -5,7 +5,7 @@ import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import buildQuery from './queries/buildQuery.js'
|
||||
import { selectDistinct } from './queries/selectDistinct.js'
|
||||
import { upsertRow } from './upsertRow/index.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
@@ -10,7 +10,7 @@ import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import buildQuery from './queries/buildQuery.js'
|
||||
import { upsertRow } from './upsertRow/index.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
@@ -21,10 +21,10 @@ export async function updateVersion<T extends TypeWithID>(
|
||||
collection,
|
||||
locale,
|
||||
req,
|
||||
returning,
|
||||
select,
|
||||
versionData,
|
||||
where: whereArg,
|
||||
returning,
|
||||
}: UpdateVersionArgs<T>,
|
||||
) {
|
||||
const db = await getTransaction(this, req)
|
||||
@@ -50,13 +50,13 @@ export async function updateVersion<T extends TypeWithID>(
|
||||
data: versionData,
|
||||
db,
|
||||
fields,
|
||||
ignoreResult: returning === false,
|
||||
joinQuery: false,
|
||||
operation: 'update',
|
||||
req,
|
||||
select,
|
||||
tableName,
|
||||
where,
|
||||
ignoreResult: returning === false,
|
||||
})
|
||||
|
||||
if (returning === false) {
|
||||
|
||||
@@ -134,16 +134,16 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
|
||||
// If there are blocks, add parent to each, and then
|
||||
// store by table name and rows
|
||||
Object.keys(rowToInsert.blocks).forEach((tableName) => {
|
||||
rowToInsert.blocks[tableName].forEach((blockRow) => {
|
||||
Object.keys(rowToInsert.blocks).forEach((blockName) => {
|
||||
rowToInsert.blocks[blockName].forEach((blockRow) => {
|
||||
blockRow.row._parentID = insertedRow.id
|
||||
if (!blocksToInsert[tableName]) {
|
||||
blocksToInsert[tableName] = []
|
||||
if (!blocksToInsert[blockName]) {
|
||||
blocksToInsert[blockName] = []
|
||||
}
|
||||
if (blockRow.row.uuid) {
|
||||
delete blockRow.row.uuid
|
||||
}
|
||||
blocksToInsert[tableName].push(blockRow)
|
||||
blocksToInsert[blockName].push(blockRow)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -258,11 +258,12 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
const insertedBlockRows: Record<string, Record<string, unknown>[]> = {}
|
||||
|
||||
if (operation === 'update') {
|
||||
for (const tableName of rowToInsert.blocksToDelete) {
|
||||
const blockTable = adapter.tables[tableName]
|
||||
for (const blockName of rowToInsert.blocksToDelete) {
|
||||
const blockTableName = adapter.tableNameMap.get(`${tableName}_blocks_${blockName}`)
|
||||
const blockTable = adapter.tables[blockTableName]
|
||||
await adapter.deleteWhere({
|
||||
db,
|
||||
tableName,
|
||||
tableName: blockTableName,
|
||||
where: eq(blockTable._parentID, insertedRow.id),
|
||||
})
|
||||
}
|
||||
@@ -271,14 +272,15 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
// When versions are enabled, this is used to track mapping between blocks/arrays ObjectID to their numeric generated representation, then we use it for nested to arrays/blocks select hasMany in versions.
|
||||
const arraysBlocksUUIDMap: Record<string, number | string> = {}
|
||||
|
||||
for (const [tableName, blockRows] of Object.entries(blocksToInsert)) {
|
||||
insertedBlockRows[tableName] = await adapter.insert({
|
||||
for (const [blockName, blockRows] of Object.entries(blocksToInsert)) {
|
||||
const blockTableName = adapter.tableNameMap.get(`${tableName}_blocks_${blockName}`)
|
||||
insertedBlockRows[blockName] = await adapter.insert({
|
||||
db,
|
||||
tableName,
|
||||
tableName: blockTableName,
|
||||
values: blockRows.map(({ row }) => row),
|
||||
})
|
||||
|
||||
insertedBlockRows[tableName].forEach((row, i) => {
|
||||
insertedBlockRows[blockName].forEach((row, i) => {
|
||||
blockRows[i].row = row
|
||||
if (
|
||||
typeof row._uuid === 'string' &&
|
||||
@@ -308,7 +310,7 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
if (blockLocaleRowsToInsert.length > 0) {
|
||||
await adapter.insert({
|
||||
db,
|
||||
tableName: `${tableName}${adapter.localesSuffix}`,
|
||||
tableName: `${blockTableName}${adapter.localesSuffix}`,
|
||||
values: blockLocaleRowsToInsert,
|
||||
})
|
||||
}
|
||||
@@ -317,7 +319,7 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
adapter,
|
||||
arrays: blockRows.map(({ arrays }) => arrays),
|
||||
db,
|
||||
parentRows: insertedBlockRows[tableName],
|
||||
parentRows: insertedBlockRows[blockName],
|
||||
uuidMap: arraysBlocksUUIDMap,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ export const insertArrays = async ({
|
||||
|
||||
// Add any sub arrays that need to be created
|
||||
// We will call this recursively below
|
||||
arrayRows.forEach((arrayRow) => {
|
||||
arrayRows.forEach((arrayRow, i) => {
|
||||
if (Object.keys(arrayRow.arrays).length > 0) {
|
||||
rowsByTable[tableName].arrays.push(arrayRow.arrays)
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ type BaseArgs = {
|
||||
* When true, skips reading the data back from the database and returns the input data
|
||||
* @default false
|
||||
*/
|
||||
ignoreResult?: 'idOnly' | boolean
|
||||
ignoreResult?: boolean | 'idOnly'
|
||||
joinQuery?: JoinQuery
|
||||
path?: string
|
||||
req?: Partial<PayloadRequest>
|
||||
|
||||
@@ -7,11 +7,7 @@ export const createBlocksMap = (data: Record<string, unknown>): BlocksMap => {
|
||||
|
||||
Object.entries(data).forEach(([key, rows]) => {
|
||||
if (key.startsWith('_blocks_') && Array.isArray(rows)) {
|
||||
let blockType = key.replace('_blocks_', '')
|
||||
const parsed = blockType.split('_')
|
||||
if (parsed.length === 2 && Number.isInteger(Number(parsed[1]))) {
|
||||
blockType = parsed[0]
|
||||
}
|
||||
const blockType = key.replace('_blocks_', '')
|
||||
|
||||
rows.forEach((row) => {
|
||||
if ('_path' in row) {
|
||||
|
||||
@@ -267,11 +267,8 @@ declare module '${this.packageName}' {
|
||||
*/
|
||||
`
|
||||
|
||||
const importTypes = `import type {} from '${this.packageName}'`
|
||||
|
||||
let code = [
|
||||
warning,
|
||||
importTypes,
|
||||
...importDeclarationsSanitized,
|
||||
schemaDeclaration,
|
||||
...enumDeclarations,
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
import type { ArrayField } from 'payload'
|
||||
|
||||
export const getArrayRelationName = ({
|
||||
field,
|
||||
path,
|
||||
tableName,
|
||||
}: {
|
||||
field: ArrayField
|
||||
path: string
|
||||
tableName: string
|
||||
}) => {
|
||||
if (field.dbName && path.length > 63) {
|
||||
return `_${tableName}`
|
||||
}
|
||||
|
||||
return path
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { dequal } from 'dequal'
|
||||
import { deepStrictEqual } from 'assert'
|
||||
import prompts from 'prompts'
|
||||
|
||||
import type { BasePostgresAdapter } from '../postgres/types.js'
|
||||
@@ -23,18 +23,18 @@ export const pushDevSchema = async (adapter: DrizzleAdapter) => {
|
||||
const localeCodes =
|
||||
adapter.payload.config.localization && adapter.payload.config.localization.localeCodes
|
||||
|
||||
const equal = dequal(previousSchema, {
|
||||
localeCodes,
|
||||
rawTables: adapter.rawTables,
|
||||
})
|
||||
try {
|
||||
deepStrictEqual(previousSchema, {
|
||||
localeCodes,
|
||||
rawTables: adapter.rawTables,
|
||||
})
|
||||
|
||||
if (equal) {
|
||||
if (adapter.logger) {
|
||||
adapter.payload.logger.info('No changes detected in schema, skipping schema push.')
|
||||
}
|
||||
|
||||
return
|
||||
} else {
|
||||
} catch {
|
||||
previousSchema.localeCodes = localeCodes
|
||||
previousSchema.rawTables = adapter.rawTables
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { Block, Field, FlattenedBlock } from 'payload'
|
||||
import type { Block, Field } from 'payload'
|
||||
|
||||
import { InvalidConfiguration } from 'payload'
|
||||
import {
|
||||
fieldAffectsData,
|
||||
fieldHasSubFields,
|
||||
@@ -82,16 +83,14 @@ const getFlattenedFieldNames = (args: {
|
||||
}, [])
|
||||
}
|
||||
|
||||
/**
|
||||
* returns true if all the fields in a block are identical to the existing table
|
||||
*/
|
||||
export const validateExistingBlockIsIdentical = ({
|
||||
block,
|
||||
localized,
|
||||
parentIsLocalized,
|
||||
rootTableName,
|
||||
table,
|
||||
tableLocales,
|
||||
}: Args): boolean => {
|
||||
}: Args): void => {
|
||||
const fieldNames = getFlattenedFieldNames({
|
||||
fields: block.fields,
|
||||
parentIsLocalized: parentIsLocalized || localized,
|
||||
@@ -111,21 +110,18 @@ export const validateExistingBlockIsIdentical = ({
|
||||
})
|
||||
|
||||
if (missingField) {
|
||||
return false
|
||||
throw new InvalidConfiguration(
|
||||
`The table ${rootTableName} has multiple blocks with slug ${
|
||||
block.slug
|
||||
}, but the schemas do not match. One block includes the field ${
|
||||
typeof missingField === 'string' ? missingField : missingField.name
|
||||
}, while the other block does not.`,
|
||||
)
|
||||
}
|
||||
|
||||
return Boolean(localized) === Boolean(table.columns._locale)
|
||||
}
|
||||
|
||||
export const InternalBlockTableNameIndex = Symbol('InternalBlockTableNameIndex')
|
||||
export const setInternalBlockIndex = (block: FlattenedBlock, index: number) => {
|
||||
block[InternalBlockTableNameIndex] = index
|
||||
}
|
||||
|
||||
export const resolveBlockTableName = (block: FlattenedBlock, originalTableName: string) => {
|
||||
if (!block[InternalBlockTableNameIndex]) {
|
||||
return originalTableName
|
||||
if (Boolean(localized) !== Boolean(table.columns._locale)) {
|
||||
throw new InvalidConfiguration(
|
||||
`The table ${rootTableName} has multiple blocks with slug ${block.slug}, but the schemas do not match. One is localized, but another is not. Block schemas of the same name must match exactly.`,
|
||||
)
|
||||
}
|
||||
|
||||
return `${originalTableName}_${block[InternalBlockTableNameIndex]}`
|
||||
}
|
||||
|
||||
18
packages/email-nodemailer/eslint.config.js
Normal file
18
packages/email-nodemailer/eslint.config.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
||||
|
||||
/** @typedef {import('eslint').Linter.Config} Config */
|
||||
|
||||
/** @type {Config[]} */
|
||||
export const index = [
|
||||
...rootEslintConfig,
|
||||
{
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
...rootParserOptions,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
export default index
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/email-nodemailer",
|
||||
"version": "3.38.0",
|
||||
"version": "3.36.1",
|
||||
"description": "Payload Nodemailer Email Adapter",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
18
packages/email-resend/eslint.config.js
Normal file
18
packages/email-resend/eslint.config.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
||||
|
||||
/** @typedef {import('eslint').Linter.Config} Config */
|
||||
|
||||
/** @type {Config[]} */
|
||||
export const index = [
|
||||
...rootEslintConfig,
|
||||
{
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
...rootParserOptions,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
export default index
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/email-resend",
|
||||
"version": "3.38.0",
|
||||
"version": "3.36.1",
|
||||
"description": "Payload Resend Email Adapter",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -10,7 +10,6 @@ import globals from 'globals'
|
||||
import importX from 'eslint-plugin-import-x'
|
||||
import typescriptParser from '@typescript-eslint/parser'
|
||||
import { deepMerge } from './deepMerge.js'
|
||||
import reactCompiler from 'eslint-plugin-react-compiler'
|
||||
|
||||
const baseRules = {
|
||||
// This rule makes no sense when overriding class methods. This is used a lot in richtext-lexical.
|
||||
@@ -126,52 +125,6 @@ export const rootEslintConfig = [
|
||||
ecmaFeatures: {
|
||||
jsx: true,
|
||||
},
|
||||
projectService: {
|
||||
// This is necessary because `tsconfig.base.json` defines `"rootDir": "${configDir}/src"`,
|
||||
// And the following files aren't in src because they aren't transpiled.
|
||||
// This is typescript-eslint's way of adding files that aren't included in tsconfig.
|
||||
// See: https://typescript-eslint.io/troubleshooting/typed-linting/#i-get-errors-telling-me--was-not-found-by-the-project-service-consider-either-including-it-in-the-tsconfigjson-or-including-it-in-allowdefaultproject
|
||||
// The best practice is to have a tsconfig.json that covers ALL files and is used for
|
||||
// typechecking (with noEmit), and a `tsconfig.build.json` that is used for the build
|
||||
// (or alternatively, swc, tsup or tsdown). That's what we should ideally do, in which case
|
||||
// this hardcoded list wouldn't be necessary. Note that these files don't currently go
|
||||
// through ts, only through eslint.
|
||||
allowDefaultProject: [
|
||||
'../payload/bin.js',
|
||||
'../payload/bundle.js',
|
||||
'../next/babel.config.cjs',
|
||||
'../next/bundleScss.js',
|
||||
'../ui/babel.config.cjs',
|
||||
'../ui/bundle.js',
|
||||
'../graphql/bin.js',
|
||||
'../richtext-lexical/babel.config.cjs',
|
||||
'../richtext-lexical/bundle.js',
|
||||
'../richtext-lexical/scripts/translateNewKeys.ts',
|
||||
'../db-postgres/bundle.js',
|
||||
'../db-postgres/relationships-v2-v3.mjs',
|
||||
'../db-postgres/scripts/renamePredefinedMigrations.ts',
|
||||
'../db-sqlite/bundle.js',
|
||||
'../db-vercel-postgres/relationships-v2-v3.mjs',
|
||||
'../db-vercel-postgres/scripts/renamePredefinedMigrations.ts',
|
||||
'../plugin-cloud-storage/azure.d.ts',
|
||||
'../plugin-cloud-storage/azure.js',
|
||||
'../plugin-cloud-storage/gcs.d.ts',
|
||||
'../plugin-cloud-storage/gcs.js',
|
||||
'../plugin-cloud-storage/s3.d.ts',
|
||||
'../plugin-cloud-storage/s3.js',
|
||||
'../plugin-redirects/types.d.ts',
|
||||
'../plugin-redirects/types.js',
|
||||
'../translations/scripts/translateNewKeys/applyEslintFixes.ts',
|
||||
'../translations/scripts/translateNewKeys/findMissingKeys.ts',
|
||||
'../translations/scripts/translateNewKeys/generateTsObjectLiteral.ts',
|
||||
'../translations/scripts/translateNewKeys/index.ts',
|
||||
'../translations/scripts/translateNewKeys/run.ts',
|
||||
'../translations/scripts/translateNewKeys/sortKeys.ts',
|
||||
'../translations/scripts/translateNewKeys/translateText.ts',
|
||||
'../create-payload-app/bin/cli.js',
|
||||
],
|
||||
},
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
ecmaVersion: 'latest',
|
||||
sourceType: 'module',
|
||||
@@ -253,10 +206,6 @@ export const rootEslintConfig = [
|
||||
},
|
||||
files: ['*.config.ts', 'config.ts'],
|
||||
},
|
||||
{
|
||||
name: 'React Compiler',
|
||||
...reactCompiler.configs.recommended,
|
||||
},
|
||||
]
|
||||
|
||||
export default rootEslintConfig
|
||||
|
||||
@@ -36,7 +36,6 @@
|
||||
"eslint-plugin-jest-dom": "5.5.0",
|
||||
"eslint-plugin-jsx-a11y": "6.10.2",
|
||||
"eslint-plugin-perfectionist": "3.9.1",
|
||||
"eslint-plugin-react-compiler": "19.0.0-beta-e993439-20250405",
|
||||
"eslint-plugin-react-hooks": "0.0.0-experimental-d331ba04-20250307",
|
||||
"eslint-plugin-regexp": "2.7.0",
|
||||
"globals": "16.0.0",
|
||||
|
||||
18
packages/graphql/eslint.config.js
Normal file
18
packages/graphql/eslint.config.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
||||
|
||||
/** @typedef {import('eslint').Linter.Config} Config */
|
||||
|
||||
/** @type {Config[]} */
|
||||
export const index = [
|
||||
...rootEslintConfig,
|
||||
{
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
...rootParserOptions,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
export default index
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/graphql",
|
||||
"version": "3.38.0",
|
||||
"version": "3.36.1",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -145,37 +145,27 @@ export function buildMutationInputType({
|
||||
},
|
||||
}),
|
||||
group: (inputObjectTypeConfig: InputObjectTypeConfig, field: GroupField) => {
|
||||
if (fieldAffectsData(field)) {
|
||||
const requiresAtLeastOneField = groupOrTabHasRequiredSubfield(field)
|
||||
const fullName = combineParentName(parentName, toWords(field.name, true))
|
||||
let type: GraphQLType = buildMutationInputType({
|
||||
name: fullName,
|
||||
config,
|
||||
fields: field.fields,
|
||||
graphqlResult,
|
||||
parentIsLocalized: parentIsLocalized || field.localized,
|
||||
parentName: fullName,
|
||||
})
|
||||
const requiresAtLeastOneField = groupOrTabHasRequiredSubfield(field)
|
||||
const fullName = combineParentName(parentName, toWords(field.name, true))
|
||||
let type: GraphQLType = buildMutationInputType({
|
||||
name: fullName,
|
||||
config,
|
||||
fields: field.fields,
|
||||
graphqlResult,
|
||||
parentIsLocalized: parentIsLocalized || field.localized,
|
||||
parentName: fullName,
|
||||
})
|
||||
|
||||
if (!type) {
|
||||
return inputObjectTypeConfig
|
||||
}
|
||||
if (!type) {
|
||||
return inputObjectTypeConfig
|
||||
}
|
||||
|
||||
if (requiresAtLeastOneField) {
|
||||
type = new GraphQLNonNull(type)
|
||||
}
|
||||
return {
|
||||
...inputObjectTypeConfig,
|
||||
[formatName(field.name)]: { type },
|
||||
}
|
||||
} else {
|
||||
return field.fields.reduce((acc, subField: CollapsibleField) => {
|
||||
const addSubField = fieldToSchemaMap[subField.type]
|
||||
if (addSubField) {
|
||||
return addSubField(acc, subField)
|
||||
}
|
||||
return acc
|
||||
}, inputObjectTypeConfig)
|
||||
if (requiresAtLeastOneField) {
|
||||
type = new GraphQLNonNull(type)
|
||||
}
|
||||
return {
|
||||
...inputObjectTypeConfig,
|
||||
[formatName(field.name)]: { type },
|
||||
}
|
||||
},
|
||||
json: (inputObjectTypeConfig: InputObjectTypeConfig, field: JSONField) => ({
|
||||
|
||||
@@ -10,11 +10,11 @@ export const buildPaginatedListType = (name, docType) =>
|
||||
hasNextPage: { type: new GraphQLNonNull(GraphQLBoolean) },
|
||||
hasPrevPage: { type: new GraphQLNonNull(GraphQLBoolean) },
|
||||
limit: { type: new GraphQLNonNull(GraphQLInt) },
|
||||
nextPage: { type: GraphQLInt },
|
||||
nextPage: { type: new GraphQLNonNull(GraphQLInt) },
|
||||
offset: { type: GraphQLInt },
|
||||
page: { type: new GraphQLNonNull(GraphQLInt) },
|
||||
pagingCounter: { type: new GraphQLNonNull(GraphQLInt) },
|
||||
prevPage: { type: GraphQLInt },
|
||||
prevPage: { type: new GraphQLNonNull(GraphQLInt) },
|
||||
totalDocs: { type: new GraphQLNonNull(GraphQLInt) },
|
||||
totalPages: { type: new GraphQLNonNull(GraphQLInt) },
|
||||
},
|
||||
|
||||
@@ -41,7 +41,7 @@ import {
|
||||
} from 'graphql'
|
||||
import { DateTimeResolver, EmailAddressResolver } from 'graphql-scalars'
|
||||
import { combineQueries, createDataloaderCacheKey, MissingEditorProp, toWords } from 'payload'
|
||||
import { fieldAffectsData, tabHasName } from 'payload/shared'
|
||||
import { tabHasName } from 'payload/shared'
|
||||
|
||||
import type { Context } from '../resolvers/types.js'
|
||||
|
||||
@@ -302,64 +302,44 @@ export const fieldToSchemaMap: FieldToSchemaMap = {
|
||||
field,
|
||||
forceNullable,
|
||||
graphqlResult,
|
||||
newlyCreatedBlockType,
|
||||
objectTypeConfig,
|
||||
parentIsLocalized,
|
||||
parentName,
|
||||
}) => {
|
||||
if (fieldAffectsData(field)) {
|
||||
const interfaceName =
|
||||
field?.interfaceName || combineParentName(parentName, toWords(field.name, true))
|
||||
const interfaceName =
|
||||
field?.interfaceName || combineParentName(parentName, toWords(field.name, true))
|
||||
|
||||
if (!graphqlResult.types.groupTypes[interfaceName]) {
|
||||
const objectType = buildObjectType({
|
||||
name: interfaceName,
|
||||
config,
|
||||
fields: field.fields,
|
||||
forceNullable: isFieldNullable({ field, forceNullable, parentIsLocalized }),
|
||||
graphqlResult,
|
||||
parentIsLocalized: field.localized || parentIsLocalized,
|
||||
parentName: interfaceName,
|
||||
})
|
||||
if (!graphqlResult.types.groupTypes[interfaceName]) {
|
||||
const objectType = buildObjectType({
|
||||
name: interfaceName,
|
||||
config,
|
||||
fields: field.fields,
|
||||
forceNullable: isFieldNullable({ field, forceNullable, parentIsLocalized }),
|
||||
graphqlResult,
|
||||
parentIsLocalized: field.localized || parentIsLocalized,
|
||||
parentName: interfaceName,
|
||||
})
|
||||
|
||||
if (Object.keys(objectType.getFields()).length) {
|
||||
graphqlResult.types.groupTypes[interfaceName] = objectType
|
||||
}
|
||||
if (Object.keys(objectType.getFields()).length) {
|
||||
graphqlResult.types.groupTypes[interfaceName] = objectType
|
||||
}
|
||||
}
|
||||
|
||||
if (!graphqlResult.types.groupTypes[interfaceName]) {
|
||||
return objectTypeConfig
|
||||
}
|
||||
if (!graphqlResult.types.groupTypes[interfaceName]) {
|
||||
return objectTypeConfig
|
||||
}
|
||||
|
||||
return {
|
||||
...objectTypeConfig,
|
||||
[formatName(field.name)]: {
|
||||
type: graphqlResult.types.groupTypes[interfaceName],
|
||||
resolve: (parent, args, context: Context) => {
|
||||
return {
|
||||
...parent[field.name],
|
||||
_id: parent._id ?? parent.id,
|
||||
}
|
||||
},
|
||||
return {
|
||||
...objectTypeConfig,
|
||||
[formatName(field.name)]: {
|
||||
type: graphqlResult.types.groupTypes[interfaceName],
|
||||
resolve: (parent, args, context: Context) => {
|
||||
return {
|
||||
...parent[field.name],
|
||||
_id: parent._id ?? parent.id,
|
||||
}
|
||||
},
|
||||
}
|
||||
} else {
|
||||
return field.fields.reduce((objectTypeConfigWithCollapsibleFields, subField) => {
|
||||
const addSubField: GenericFieldToSchemaMap = fieldToSchemaMap[subField.type]
|
||||
if (addSubField) {
|
||||
return addSubField({
|
||||
config,
|
||||
field: subField,
|
||||
forceNullable,
|
||||
graphqlResult,
|
||||
newlyCreatedBlockType,
|
||||
objectTypeConfig: objectTypeConfigWithCollapsibleFields,
|
||||
parentIsLocalized,
|
||||
parentName,
|
||||
})
|
||||
}
|
||||
return objectTypeConfigWithCollapsibleFields
|
||||
}, objectTypeConfig)
|
||||
},
|
||||
}
|
||||
},
|
||||
join: ({ collectionSlug, field, graphqlResult, objectTypeConfig, parentName }) => {
|
||||
|
||||
@@ -17,7 +17,7 @@ export const recursivelyBuildNestedPaths = ({ field, nestedFieldName2, parentNam
|
||||
if (field.type === 'tabs') {
|
||||
// if the tab has a name, treat it as a group
|
||||
// otherwise, treat it as a row
|
||||
return field.tabs.reduce((tabSchema, tab: any) => {
|
||||
return (field.tabs as Tab[]).reduce((tabSchema, tab: any) => {
|
||||
tabSchema.push(
|
||||
...recursivelyBuildNestedPaths({
|
||||
field: {
|
||||
|
||||
18
packages/live-preview-react/eslint.config.js
Normal file
18
packages/live-preview-react/eslint.config.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
||||
|
||||
/** @typedef {import('eslint').Linter.Config} Config */
|
||||
|
||||
/** @type {Config[]} */
|
||||
export const index = [
|
||||
...rootEslintConfig,
|
||||
{
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
...rootParserOptions,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
export default index
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/live-preview-react",
|
||||
"version": "3.38.0",
|
||||
"version": "3.36.1",
|
||||
"description": "The official React SDK for Payload Live Preview",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user