Compare commits
85 Commits
v3.0.0-bet
...
v3.0.0-bet
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2a8bd4c775 | ||
|
|
ac10bad723 | ||
|
|
142616e6ad | ||
|
|
dd3d985091 | ||
|
|
de3d7c95e7 | ||
|
|
570422ff9a | ||
|
|
53c41bdfd8 | ||
|
|
e5c34ead16 | ||
|
|
6e561b11ca | ||
|
|
f7146362df | ||
|
|
ec9d1cda2d | ||
|
|
657326b528 | ||
|
|
538b7ee616 | ||
|
|
828f5d866d | ||
|
|
e375f6e727 | ||
|
|
cc9b877e88 | ||
|
|
dc12047723 | ||
|
|
12fb691e4f | ||
|
|
0962850086 | ||
|
|
78c8bb81a1 | ||
|
|
419b274bb1 | ||
|
|
ef818fd5c8 | ||
|
|
0aaf3af1ea | ||
|
|
18b0806b5b | ||
|
|
3d9051ad34 | ||
|
|
e4ef47b938 | ||
|
|
c7e7dc71d3 | ||
|
|
375671c162 | ||
|
|
23b495b145 | ||
|
|
27d743e2a8 | ||
|
|
8c9ff3d54b | ||
|
|
5c447252e7 | ||
|
|
a76be81368 | ||
|
|
5d97d57e70 | ||
|
|
de7ff1f8c6 | ||
|
|
3d714d3e72 | ||
|
|
2bbb02b9c0 | ||
|
|
0533e7f5db | ||
|
|
23c5ef428d | ||
|
|
f046a04510 | ||
|
|
4cda7d2363 | ||
|
|
ea48cfbfe9 | ||
|
|
1aeb912762 | ||
|
|
ce2cb35d71 | ||
|
|
d3ec68ac2f | ||
|
|
05bf52aac3 | ||
|
|
fed7f2fa5b | ||
|
|
686b0865b2 | ||
|
|
dfb4c8eb4c | ||
|
|
ad7a387e19 | ||
|
|
d05be016ce | ||
|
|
ec3bb71e7c | ||
|
|
825d8b83d1 | ||
|
|
83022f6d55 | ||
|
|
4bbc593dc5 | ||
|
|
03440f5eca | ||
|
|
0fa6611260 | ||
|
|
a2d68f84e1 | ||
|
|
49c0709fed | ||
|
|
350a4a0718 | ||
|
|
6349cd42e9 | ||
|
|
c2b2f10676 | ||
|
|
95ebead464 | ||
|
|
3eed8b11cb | ||
|
|
404008dc4e | ||
|
|
c7c6fca537 | ||
|
|
9de3ffdcfe | ||
|
|
1eefb12070 | ||
|
|
2d8b752ef2 | ||
|
|
3e5c31a024 | ||
|
|
631431e006 | ||
|
|
492d920133 | ||
|
|
f754edc375 | ||
|
|
d2571e10d6 | ||
|
|
a687cb9c5b | ||
|
|
cf6634111f | ||
|
|
1ee19d3016 | ||
|
|
9beaa281dc | ||
|
|
5174c7092f | ||
|
|
d894ac75f0 | ||
|
|
af0105ced5 | ||
|
|
93e81314df | ||
|
|
163d1c85da | ||
|
|
cb9b80aaf9 | ||
|
|
cad1906725 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -154,6 +154,7 @@ out
|
||||
# Nuxt.js build / generate output
|
||||
.nuxt
|
||||
dist
|
||||
dist_optimized
|
||||
|
||||
# Gatsby files
|
||||
.cache/
|
||||
|
||||
@@ -31,7 +31,7 @@ The following options are available:
|
||||
| **`hidden`** | Set to true or a function, called with the current user, returning true to exclude this Collection from navigation and admin routing. |
|
||||
| **`hooks`** | Admin-specific hooks for this Collection. [More details](../hooks/collections). |
|
||||
| **`useAsTitle`** | Specify a top-level field to use for a document title throughout the Admin Panel. If no field is defined, the ID of the document is used as the title. |
|
||||
| **`description`** | Text or React component to display below the Collection label in the List View to give editors more information. |
|
||||
| **`description`** | Text to display below the Collection label in the List View to give editors more information. Alternatively, you can use the `admin.components.Description` to render a React component. [More details](#components). |
|
||||
| **`defaultColumns`** | Array of field names that correspond to which columns to show by default in this Collection's List View. |
|
||||
| **`hideAPIURL`** | Hides the "API URL" meta field while editing documents within this Collection. |
|
||||
| **`enableRichTextLink`** | The [Rich Text](../fields/rich-text) field features a `Link` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
|
||||
@@ -69,7 +69,8 @@ The following options are available:
|
||||
| **`beforeList`** | An array of components to inject _before_ the built-in List View |
|
||||
| **`beforeListTable`** | An array of components to inject _before_ the built-in List View's table |
|
||||
| **`afterList`** | An array of components to inject _after_ the built-in List View |
|
||||
| **`afterListTable`** | An array of components to inject _after_ the built-in List View's table |
|
||||
| **`afterListTable`** | An array of components to inject _after_ the built-in List View's table
|
||||
| **`Description`** | A component to render below the Collection label in the List View. An alternative to the `admin.description` property. |
|
||||
| **`edit.SaveButton`** | Replace the default Save Button with a Custom Component. [Drafts](../versions/drafts) must be disabled. |
|
||||
| **`edit.SaveDraftButton`** | Replace the default Save Draft Button with a Custom Component. [Drafts](../versions/drafts) must be enabled and autosave must be disabled. |
|
||||
| **`edit.PublishButton`** | Replace the default Publish Button with a Custom Component. [Drafts](../versions/drafts) must be enabled. |
|
||||
|
||||
@@ -196,6 +196,48 @@ import { MyFieldComponent } from 'my-external-package/client'
|
||||
|
||||
which is a valid way to access MyFieldComponent that can be resolved by the consuming project.
|
||||
|
||||
### Custom Components from unknown locations
|
||||
|
||||
By default, any component paths from known locations are added to the import map. However, if you need to add any components from unknown locations to the import map, you can do so by adding them to the `admin.dependencies` array in your Payload Config. This is mostly only relevant for plugin authors and not for regular Payload users.
|
||||
|
||||
Example:
|
||||
|
||||
```ts
|
||||
export default {
|
||||
// ...
|
||||
admin: {
|
||||
// ...
|
||||
dependencies: {
|
||||
myTestComponent: { // myTestComponent is the key - can be anything
|
||||
path: '/components/TestComponent.js#TestComponent',
|
||||
type: 'component',
|
||||
clientProps: {
|
||||
test: 'hello',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
This way, `TestComponent` is added to the import map, no matter if it's referenced in a known location or not. On the client, you can then use the component like this:
|
||||
|
||||
```tsx
|
||||
'use client'
|
||||
|
||||
import { RenderComponent, useConfig } from '@payloadcms/ui'
|
||||
import React from 'react'
|
||||
|
||||
export const CustomView = () => {
|
||||
const { config } = useConfig()
|
||||
return (
|
||||
<div>
|
||||
<RenderComponent mappedComponent={config.admin.dependencies?.myTestComponent} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
## Root Components
|
||||
|
||||
Root Components are those that effect the [Admin Panel](./overview) generally, such as the logo or the main nav.
|
||||
|
||||
@@ -36,7 +36,6 @@ Here is one of the simplest possible Payload configs:
|
||||
```ts
|
||||
import { buildConfig } from 'payload'
|
||||
import { mongooseAdapter } from '@payloadcms/db-mongodb'
|
||||
// import { postgresAdapter } from '@payloadcms/db-postgres'
|
||||
|
||||
export default buildConfig({
|
||||
secret: process.env.PAYLOAD_SECRET,
|
||||
@@ -72,6 +71,7 @@ The following options are available:
|
||||
| **`db`** \* | The Database Adapter which will be used by Payload. [More details](../database/overview). |
|
||||
| **`serverURL`** | A string used to define the absolute URL of your app. This includes the protocol, for example `https://example.com`. No paths allowed, only protocol, domain and (optionally) port. |
|
||||
| **`collections`** | An array of Collections for Payload to manage. [More details](./collections). |
|
||||
| **`compatibility`** | Compatibility flags for earlier versions of Payload. [More details](#compatibility-flags). |
|
||||
| **`globals`** | An array of Globals for Payload to manage. [More details](./globals). |
|
||||
| **`cors`** | Cross-origin resource sharing (CORS) is a mechanism that accept incoming requests from given domains. You can also customize the `Access-Control-Allow-Headers` header. [More details](#cors). |
|
||||
| **`localization`** | Opt-in to translate your content into multiple locales. [More details](./localization). |
|
||||
@@ -254,3 +254,13 @@ import type { Config, SanitizedConfig } from 'payload'
|
||||
The Payload Config only lives on the server and is not allowed to contain any client-side code. That way, you can load up the Payload Config in any server environment or standalone script, without having to use Bundlers or Node.js loaders to handle importing client-only modules (e.g. scss files or React Components) without any errors.
|
||||
|
||||
Behind the curtains, the Next.js-based Admin Panel generates a ClientConfig, which strips away any server-only code and enriches the config with React Components.
|
||||
|
||||
## Compatibility flags
|
||||
|
||||
The Payload Config can accept compatibility flags for running the newest versions but with older databases. You should only use these flags if you need to, and should confirm that you need to prior to enabling these flags.
|
||||
|
||||
`allowLocalizedWithinLocalized`
|
||||
|
||||
Payload localization works on a field-by-field basis. As you can nest fields within other fields, you could potentially nest a localized field within a localized field—but this would be redundant and unnecessary. There would be no reason to define a localized field within a localized parent field, given that the entire data structure from the parent field onward would be localized.
|
||||
|
||||
By default, Payload will remove the `localized: true` property from sub-fields if a parent field is localized. Set this compatibility flag to `true` only if you have an existing Payload MongoDB database from pre-3.0, and you have nested localized fields that you would like to maintain without migrating.
|
||||
|
||||
@@ -8,18 +8,20 @@ keywords: Postgres, documentation, typescript, Content Management System, cms, h
|
||||
|
||||
To use Payload with Postgres, install the package `@payloadcms/db-postgres`. It leverages Drizzle ORM and `node-postgres` to interact with a Postgres database that you provide.
|
||||
|
||||
Alternatively, the `@payloadcms/db-vercel-postgres` package is also available and is optimized for use with Vercel.
|
||||
|
||||
It automatically manages changes to your database for you in development mode, and exposes a full suite of migration controls for you to leverage in order to keep other database environments in sync with your schema. DDL transformations are automatically generated.
|
||||
|
||||
To configure Payload to use Postgres, pass the `postgresAdapter` to your Payload Config as follows:
|
||||
|
||||
### Usage
|
||||
|
||||
`@payloadcms/db-postgres`:
|
||||
|
||||
```ts
|
||||
import { postgresAdapter } from '@payloadcms/db-postgres'
|
||||
|
||||
export default buildConfig({
|
||||
// Your config goes here
|
||||
collections: [
|
||||
// Collections go here
|
||||
],
|
||||
// Configure the Postgres adapter here
|
||||
db: postgresAdapter({
|
||||
// Postgres-specific arguments go here.
|
||||
@@ -31,11 +33,28 @@ export default buildConfig({
|
||||
})
|
||||
```
|
||||
|
||||
`@payloadcms/db-vercel-postgres`:
|
||||
|
||||
```ts
|
||||
import { vercelPostgresAdapter } from '@payloadcms/db-vercel-postgres'
|
||||
|
||||
export default buildConfig({
|
||||
// Automatically uses proces.env.POSTGRES_URL if no options are provided.
|
||||
db: vercelPostgresAdapter(),
|
||||
// Optionally, can accept the same options as the @vercel/postgres package.
|
||||
db: vercelPostgresAdapter({
|
||||
pool: {
|
||||
connectionString: process.env.DATABASE_URL
|
||||
},
|
||||
}),
|
||||
})
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
| Option | Description |
|
||||
|-----------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `pool` \* | [Pool connection options](https://orm.drizzle.team/docs/quick-postgresql/node-postgres) that will be passed to Drizzle and `node-postgres`. |
|
||||
| --------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `pool` \* | [Pool connection options](https://orm.drizzle.team/docs/quick-postgresql/node-postgres) that will be passed to Drizzle and `node-postgres` or to `@vercel/postgres` |
|
||||
| `push` | Disable Drizzle's [`db push`](https://orm.drizzle.team/kit-docs/overview#prototyping-with-db-push) in development mode. By default, `push` is enabled for development mode only. |
|
||||
| `migrationDir` | Customize the directory that migrations are stored. |
|
||||
| `logger` | The instance of the logger to be passed to drizzle. By default Payload's will be used. |
|
||||
|
||||
@@ -162,7 +162,7 @@ All of Payload's GraphQL functionality is abstracted into a separate package. Pa
|
||||
|
||||
This is the UI library that Payload's Admin Panel uses. All components are exported from this package and can be re-used as you build extensions to the Payload admin UI, or want to use Payload components in your own React apps. Some exports are server components and some are client components.
|
||||
|
||||
`@payloadcms/db-postgres`, `@payloadcms/db-mongodb`
|
||||
`@payloadcms/db-postgres`, `@payloadcms/db-vercel-postgres`, `@payloadcms/db-mongodb`
|
||||
|
||||
You can choose which Database Adapter you'd like to use for your project, and no matter which you choose, the entire data layer for Payload is contained within these packages. You can only use one at a time for any given project.
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@ IMPORTANT: This will overwrite all slate data. We recommend doing the following
|
||||
1. Take a backup of your entire database. If anything goes wrong and you do not have a backup, you are on your own and will not receive any support.
|
||||
2. Make every richText field a lexical editor. This script will only convert lexical richText fields with old Slate data
|
||||
3. Add the SlateToLexicalFeature (as seen below) first, and test it out by loading up the Admin Panel, to see if the migrator works as expected. You might have to build some custom converters for some fields first in order to convert custom Slate nodes. The SlateToLexicalFeature is where the converters are stored. Only fields with this feature added will be migrated.
|
||||
4. If this works as expected, add the `disableHooks: true` prop everywhere you're initializing `SlateToLexicalFeature`. Example: `SlateToLexicalFeature({ disableHooks: true })`. Once you did that, you're ready to run the migration script.
|
||||
|
||||
```ts
|
||||
import { migrateSlateToLexical } from '@payloadcms/richtext-lexical/migrate'
|
||||
|
||||
@@ -92,6 +92,7 @@ _An asterisk denotes that an option is required._
|
||||
| Option | Description |
|
||||
| ------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`adminThumbnail`** | Set the way that the [Admin Panel](../admin/overview) will display thumbnails for this Collection. [More](#admin-thumbnails) |
|
||||
| **`bulkUpload`** | Allow users to upload in bulk from the list view, default is true |
|
||||
| **`crop`** | Set to `false` to disable the cropping tool in the [Admin Panel](../admin/overview). Crop is enabled by default. [More](#crop-and-focal-point-selector) |
|
||||
| **`disableLocalStorage`** | Completely disable uploading files to disk locally. [More](#disabling-local-upload-storage) |
|
||||
| **`displayPreview`** | Enable displaying preview of the uploaded file in Upload fields related to this Collection. Can be locally overridden by `displayPreview` option in Upload field. [More](/docs/fields/upload#config-options). |
|
||||
|
||||
@@ -40,6 +40,7 @@ export const rootEslintConfig = [
|
||||
{
|
||||
ignores: [
|
||||
...defaultESLintIgnores,
|
||||
'packages/eslint-*/**',
|
||||
'test/live-preview/next-app',
|
||||
'packages/**/*.spec.ts',
|
||||
'templates/**',
|
||||
|
||||
@@ -5,6 +5,8 @@ import config from '@payload-config'
|
||||
/* DO NOT MODIFY IT BECAUSE IT COULD BE REWRITTEN AT ANY TIME. */
|
||||
import { NotFoundPage, generatePageMetadata } from '@payloadcms/next/views'
|
||||
|
||||
import { importMap } from '../importMap.js'
|
||||
|
||||
type Args = {
|
||||
params: {
|
||||
segments: string[]
|
||||
@@ -17,6 +19,7 @@ type Args = {
|
||||
export const generateMetadata = ({ params, searchParams }: Args): Promise<Metadata> =>
|
||||
generatePageMetadata({ config, params, searchParams })
|
||||
|
||||
const NotFound = ({ params, searchParams }: Args) => NotFoundPage({ config, params, searchParams })
|
||||
const NotFound = ({ params, searchParams }: Args) =>
|
||||
NotFoundPage({ config, importMap, params, searchParams })
|
||||
|
||||
export default NotFound
|
||||
|
||||
@@ -5,6 +5,8 @@ import config from '@payload-config'
|
||||
/* DO NOT MODIFY IT BECAUSE IT COULD BE REWRITTEN AT ANY TIME. */
|
||||
import { RootPage, generatePageMetadata } from '@payloadcms/next/views'
|
||||
|
||||
import { importMap } from '../importMap.js'
|
||||
|
||||
type Args = {
|
||||
params: {
|
||||
segments: string[]
|
||||
@@ -17,6 +19,7 @@ type Args = {
|
||||
export const generateMetadata = ({ params, searchParams }: Args): Promise<Metadata> =>
|
||||
generatePageMetadata({ config, params, searchParams })
|
||||
|
||||
const Page = ({ params, searchParams }: Args) => RootPage({ config, params, searchParams })
|
||||
const Page = ({ params, searchParams }: Args) =>
|
||||
RootPage({ config, importMap, params, searchParams })
|
||||
|
||||
export default Page
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
import { TenantFieldComponent as TenantFieldComponent_0 } from '@/fields/TenantField/components/Field'
|
||||
import { TenantSelectorRSC as TenantSelectorRSC_1 } from '@/components/TenantSelector'
|
||||
|
||||
export const importMap = {
|
||||
'@/fields/TenantField/components/Field#TenantFieldComponent': TenantFieldComponent_0,
|
||||
'@/components/TenantSelector#TenantSelectorRSC': TenantSelectorRSC_1,
|
||||
}
|
||||
@@ -1,18 +1,21 @@
|
||||
/* THIS FILE WAS GENERATED AUTOMATICALLY BY PAYLOAD. */
|
||||
import configPromise from "@payload-config";
|
||||
import "@payloadcms/next/css";
|
||||
import { RootLayout } from "@payloadcms/next/layouts";
|
||||
import configPromise from '@payload-config'
|
||||
import '@payloadcms/next/css'
|
||||
import { RootLayout } from '@payloadcms/next/layouts'
|
||||
/* DO NOT MODIFY IT BECAUSE IT COULD BE REWRITTEN AT ANY TIME. */
|
||||
import React from "react";
|
||||
import React from 'react'
|
||||
|
||||
import "./custom.scss";
|
||||
import { importMap } from './admin/importMap.js'
|
||||
import './custom.scss'
|
||||
|
||||
type Args = {
|
||||
children: React.ReactNode;
|
||||
};
|
||||
children: React.ReactNode
|
||||
}
|
||||
|
||||
const Layout = ({ children }: Args) => (
|
||||
<RootLayout config={configPromise}>{children}</RootLayout>
|
||||
);
|
||||
<RootLayout config={configPromise} importMap={importMap}>
|
||||
{children}
|
||||
</RootLayout>
|
||||
)
|
||||
|
||||
export default Layout;
|
||||
export default Layout
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { Access } from 'payload'
|
||||
|
||||
import type { User } from '../../../../payload-types'
|
||||
import type { User } from '../../../payload-types'
|
||||
|
||||
import { isSuperAdmin } from '../../../access/isSuperAdmin'
|
||||
import { getTenantAdminTenantAccessIDs } from '../../../utilities/getTenantAccessIDs'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
import type { User } from '../../../payload-types'
|
||||
import type { User } from '../../payload-types'
|
||||
|
||||
import { getTenantAdminTenantAccessIDs } from '../../utilities/getTenantAccessIDs'
|
||||
import { createAccess } from './access/create'
|
||||
@@ -37,32 +37,6 @@ const Users: CollectionConfig = {
|
||||
{
|
||||
name: 'tenant',
|
||||
type: 'relationship',
|
||||
filterOptions: ({ user }) => {
|
||||
if (!user) {
|
||||
// Would like to query where exists true on id
|
||||
// but that is not working
|
||||
return {
|
||||
id: {
|
||||
like: '',
|
||||
},
|
||||
}
|
||||
}
|
||||
if (user?.roles?.includes('super-admin')) {
|
||||
// Would like to query where exists true on id
|
||||
// but that is not working
|
||||
return {
|
||||
id: {
|
||||
like: '',
|
||||
},
|
||||
}
|
||||
}
|
||||
const adminTenantAccessIDs = getTenantAdminTenantAccessIDs(user as User)
|
||||
return {
|
||||
id: {
|
||||
in: adminTenantAccessIDs,
|
||||
},
|
||||
}
|
||||
},
|
||||
index: true,
|
||||
relationTo: 'tenants',
|
||||
required: true,
|
||||
|
||||
@@ -2,17 +2,18 @@
|
||||
import type { Option } from '@payloadcms/ui/elements/ReactSelect'
|
||||
import type { OptionObject } from 'payload'
|
||||
|
||||
import { getTenantAdminTenantAccessIDs } from '@/utilities/getTenantAccessIDs'
|
||||
import { SelectInput, useAuth } from '@payloadcms/ui'
|
||||
import * as qs from 'qs-esm'
|
||||
import React from 'react'
|
||||
|
||||
import type { Tenant, User } from '../../../payload-types.js'
|
||||
import type { Tenant, User } from '../../payload-types'
|
||||
|
||||
import './index.scss'
|
||||
|
||||
export const TenantSelector = ({ initialCookie }: { initialCookie?: string }) => {
|
||||
const { user } = useAuth<User>()
|
||||
const [options, setOptions] = React.useState<OptionObject[]>([])
|
||||
const [value, setValue] = React.useState<string | undefined>(initialCookie)
|
||||
|
||||
const isSuperAdmin = user?.roles?.includes('super-admin')
|
||||
const tenantIDs =
|
||||
@@ -28,18 +29,6 @@ export const TenantSelector = ({ initialCookie }: { initialCookie?: string }) =>
|
||||
document.cookie = name + '=' + (value || '') + expires + '; path=/'
|
||||
}
|
||||
|
||||
React.useEffect(() => {
|
||||
const fetchTenants = async () => {
|
||||
const res = await fetch(`/api/tenants?depth=0&limit=100&sort=name`, {
|
||||
credentials: 'include',
|
||||
}).then((res) => res.json())
|
||||
|
||||
setOptions(res.docs.map((doc: Tenant) => ({ label: doc.name, value: doc.id })))
|
||||
}
|
||||
|
||||
void fetchTenants()
|
||||
}, [])
|
||||
|
||||
const handleChange = React.useCallback((option: Option | Option[]) => {
|
||||
if (!option) {
|
||||
setCookie('payload-tenant', undefined)
|
||||
@@ -50,7 +39,44 @@ export const TenantSelector = ({ initialCookie }: { initialCookie?: string }) =>
|
||||
}
|
||||
}, [])
|
||||
|
||||
if (isSuperAdmin || tenantIDs.length > 1) {
|
||||
React.useEffect(() => {
|
||||
const fetchTenants = async () => {
|
||||
const adminOfTenants = getTenantAdminTenantAccessIDs(user ?? null)
|
||||
|
||||
const queryString = qs.stringify(
|
||||
{
|
||||
depth: 0,
|
||||
limit: 100,
|
||||
sort: 'name',
|
||||
where: {
|
||||
id: {
|
||||
in: adminOfTenants,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
addQueryPrefix: true,
|
||||
},
|
||||
)
|
||||
|
||||
const res = await fetch(`/api/tenants${queryString}`, {
|
||||
credentials: 'include',
|
||||
}).then((res) => res.json())
|
||||
|
||||
const optionsToSet = res.docs.map((doc: Tenant) => ({ label: doc.name, value: doc.id }))
|
||||
|
||||
if (optionsToSet.length === 1) {
|
||||
setCookie('payload-tenant', optionsToSet[0].value)
|
||||
}
|
||||
setOptions(optionsToSet)
|
||||
}
|
||||
|
||||
if (user) {
|
||||
void fetchTenants()
|
||||
}
|
||||
}, [user])
|
||||
|
||||
if ((isSuperAdmin || tenantIDs.length > 1) && options.length > 1) {
|
||||
return (
|
||||
<div className="tenant-selector">
|
||||
<SelectInput
|
||||
@@ -59,7 +85,7 @@ export const TenantSelector = ({ initialCookie }: { initialCookie?: string }) =>
|
||||
onChange={handleChange}
|
||||
options={options}
|
||||
path="setTenant"
|
||||
value={value}
|
||||
value={options.find((opt) => opt.value === initialCookie)?.value}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
'use client'
|
||||
import { RelationshipField, useField } from '@payloadcms/ui'
|
||||
import React from 'react'
|
||||
|
||||
type Props = {
|
||||
initialValue?: string
|
||||
path: string
|
||||
readOnly: boolean
|
||||
}
|
||||
export function TenantFieldComponentClient({ initialValue, path, readOnly }: Props) {
|
||||
const { formInitializing, setValue } = useField({ path })
|
||||
const hasSetInitialValue = React.useRef(false)
|
||||
|
||||
React.useEffect(() => {
|
||||
if (!hasSetInitialValue.current && !formInitializing && initialValue) {
|
||||
setValue(initialValue)
|
||||
hasSetInitialValue.current = true
|
||||
}
|
||||
}, [initialValue, setValue, formInitializing])
|
||||
|
||||
return (
|
||||
<RelationshipField
|
||||
field={{
|
||||
name: path,
|
||||
type: 'relationship',
|
||||
_path: path,
|
||||
label: 'Tenant',
|
||||
relationTo: 'tenants',
|
||||
required: true,
|
||||
}}
|
||||
readOnly={readOnly}
|
||||
/>
|
||||
)
|
||||
}
|
||||
@@ -1,26 +1,32 @@
|
||||
'use client'
|
||||
import { RelationshipField, useAuth, useFieldProps } from '@payloadcms/ui'
|
||||
import type { Payload } from 'payload'
|
||||
|
||||
import { cookies as getCookies, headers as getHeaders } from 'next/headers'
|
||||
import React from 'react'
|
||||
|
||||
import type { User } from '../../../../payload-types.js'
|
||||
import { TenantFieldComponentClient } from './Field.client'
|
||||
|
||||
export const TenantFieldComponent = () => {
|
||||
const { user } = useAuth<User>()
|
||||
const { path, readOnly } = useFieldProps()
|
||||
export const TenantFieldComponent: React.FC<{
|
||||
path: string
|
||||
payload: Payload
|
||||
readOnly: boolean
|
||||
}> = async (args) => {
|
||||
const cookies = getCookies()
|
||||
const headers = getHeaders()
|
||||
const { user } = await args.payload.auth({ headers })
|
||||
|
||||
if (user) {
|
||||
if ((user.tenants && user.tenants.length > 1) || user?.roles?.includes('super-admin')) {
|
||||
return (
|
||||
<RelationshipField
|
||||
label="Tenant"
|
||||
name={path}
|
||||
path={path}
|
||||
readOnly={readOnly}
|
||||
relationTo="tenants"
|
||||
required
|
||||
/>
|
||||
)
|
||||
}
|
||||
if (
|
||||
user &&
|
||||
((Array.isArray(user.tenants) && user.tenants.length > 1) ||
|
||||
user?.roles?.includes('super-admin'))
|
||||
) {
|
||||
return (
|
||||
<TenantFieldComponentClient
|
||||
initialValue={cookies.get('payload-tenant')?.value || undefined}
|
||||
path={args.path}
|
||||
readOnly={args.readOnly}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ import type { Field } from 'payload'
|
||||
|
||||
import { isSuperAdmin } from '../../access/isSuperAdmin'
|
||||
import { tenantFieldUpdate } from './access/update'
|
||||
import { TenantFieldComponent } from './components/Field'
|
||||
import { autofillTenant } from './hooks/autofillTenant'
|
||||
|
||||
export const tenantField: Field = {
|
||||
@@ -17,7 +16,7 @@ export const tenantField: Field = {
|
||||
},
|
||||
admin: {
|
||||
components: {
|
||||
Field: TenantFieldComponent,
|
||||
Field: '@/fields/TenantField/components/Field#TenantFieldComponent',
|
||||
},
|
||||
position: 'sidebar',
|
||||
},
|
||||
|
||||
@@ -17,6 +17,9 @@ export interface Config {
|
||||
'payload-preferences': PayloadPreference;
|
||||
'payload-migrations': PayloadMigration;
|
||||
};
|
||||
db: {
|
||||
defaultIDType: string;
|
||||
};
|
||||
globals: {};
|
||||
locale: null;
|
||||
user: User & {
|
||||
@@ -26,15 +29,20 @@ export interface Config {
|
||||
export interface UserAuthOperations {
|
||||
forgotPassword: {
|
||||
email: string;
|
||||
password: string;
|
||||
};
|
||||
login: {
|
||||
password: string;
|
||||
email: string;
|
||||
password: string;
|
||||
};
|
||||
registerFirstUser: {
|
||||
email: string;
|
||||
password: string;
|
||||
};
|
||||
unlock: {
|
||||
email: string;
|
||||
password: string;
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
|
||||
@@ -7,7 +7,6 @@ import { fileURLToPath } from 'url'
|
||||
import { Pages } from './collections/Pages'
|
||||
import { Tenants } from './collections/Tenants'
|
||||
import Users from './collections/Users'
|
||||
import { TenantSelectorRSC } from './components/TenantSelector'
|
||||
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
const dirname = path.dirname(filename)
|
||||
@@ -15,7 +14,7 @@ const dirname = path.dirname(filename)
|
||||
export default buildConfig({
|
||||
admin: {
|
||||
components: {
|
||||
afterNavLinks: [TenantSelectorRSC],
|
||||
afterNavLinks: ['@/components/TenantSelector#TenantSelectorRSC'],
|
||||
},
|
||||
user: 'users',
|
||||
},
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { User } from '../../payload-types'
|
||||
import type { User } from '../payload-types'
|
||||
|
||||
export const getTenantAccessIDs = (user: User | null): string[] => {
|
||||
if (!user) return []
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -23,7 +23,7 @@ export default withBundleAnalyzer(
|
||||
env: {
|
||||
PAYLOAD_CORE_DEV: 'true',
|
||||
ROOT_DIR: path.resolve(dirname),
|
||||
PAYLOAD_DISABLE_DEPENDENCY_CHECKER: 'true',
|
||||
PAYLOAD_CI_DEPENDENCY_CHECKER: 'true',
|
||||
},
|
||||
async redirects() {
|
||||
return [
|
||||
|
||||
81
package.json
81
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "payload-monorepo",
|
||||
"version": "3.0.0-beta.88",
|
||||
"version": "3.0.0-beta.96",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
@@ -10,68 +10,70 @@
|
||||
"build:app": "next build",
|
||||
"build:app:analyze": "cross-env ANALYZE=true next build",
|
||||
"build:clean": "pnpm clean:build",
|
||||
"build:core": "turbo build --filter \"!@payloadcms/plugin-*\"",
|
||||
"build:core:force": "pnpm clean:build && turbo build --filter \"!@payloadcms/plugin-*\" --no-cache --force",
|
||||
"build:core": "turbo build --filter \"!@payloadcms/plugin-*\" --filter \"!@payloadcms/storage-*\"",
|
||||
"build:core:force": "pnpm clean:build && pnpm build:core --no-cache --force",
|
||||
"build:create-payload-app": "turbo build --filter create-payload-app",
|
||||
"build:db-mongodb": "turbo build --filter db-mongodb",
|
||||
"build:db-postgres": "turbo build --filter db-postgres",
|
||||
"build:db-sqlite": "turbo build --filter db-sqlite",
|
||||
"build:drizzle": "turbo build --filter drizzle",
|
||||
"build:email-nodemailer": "turbo build --filter email-nodemailer",
|
||||
"build:email-resend": "turbo build --filter email-resend",
|
||||
"build:eslint-config": "turbo build --filter eslint-config",
|
||||
"build:db-mongodb": "turbo build --filter \"@payloadcms/db-mongodb\"",
|
||||
"build:db-postgres": "turbo build --filter \"@payloadcms/db-postgres\"",
|
||||
"build:db-sqlite": "turbo build --filter \"@payloadcms/db-sqlite\"",
|
||||
"build:db-vercel-postgres": "turbo build --filter \"@payloadcms/db-vercel-postgres\"",
|
||||
"build:drizzle": "turbo build --filter \"@payloadcms/drizzle\"",
|
||||
"build:email-nodemailer": "turbo build --filter \"@payloadcms/email-nodemailer\"",
|
||||
"build:email-resend": "turbo build --filter \"@payloadcms/email-resend\"",
|
||||
"build:eslint-config": "turbo build --filter \"@payloadcms/eslint-config\"",
|
||||
"build:essentials:force": "pnpm clean:build && turbo build --filter=\"payload...\" --filter=\"@payloadcms/ui\" --filter=\"@payloadcms/next\" --filter=\"@payloadcms/db-mongodb\" --filter=\"@payloadcms/db-postgres\" --filter=\"@payloadcms/richtext-lexical\" --filter=\"@payloadcms/translations\" --filter=\"@payloadcms/plugin-cloud\" --filter=\"@payloadcms/graphql\" --no-cache --force",
|
||||
"build:force": "pnpm run build:core:force",
|
||||
"build:graphql": "turbo build --filter graphql",
|
||||
"build:live-preview": "turbo build --filter live-preview",
|
||||
"build:live-preview-react": "turbo build --filter live-preview-react",
|
||||
"build:live-preview-vue": "turbo build --filter live-preview-vue",
|
||||
"build:next": "turbo build --filter next",
|
||||
"build:graphql": "turbo build --filter \"@payloadcms/graphql\"",
|
||||
"build:live-preview": "turbo build --filter \"@payloadcms/live-preview\"",
|
||||
"build:live-preview-react": "turbo build --filter \"@payloadcms/live-preview-react\"",
|
||||
"build:live-preview-vue": "turbo build --filter \"@payloadcms/live-preview-vue\"",
|
||||
"build:next": "turbo build --filter \"@payloadcms/next\"",
|
||||
"build:payload": "turbo build --filter payload",
|
||||
"build:plugin-cloud": "turbo build --filter plugin-cloud",
|
||||
"build:plugin-cloud-storage": "turbo build --filter plugin-cloud-storage",
|
||||
"build:plugin-form-builder": "turbo build --filter plugin-form-builder",
|
||||
"build:plugin-nested-docs": "turbo build --filter plugin-nested-docs",
|
||||
"build:plugin-redirects": "turbo build --filter plugin-redirects",
|
||||
"build:plugin-relationship-object-ids": "turbo build --filter plugin-relationship-object-ids",
|
||||
"build:plugin-search": "turbo build --filter plugin-search",
|
||||
"build:plugin-sentry": "turbo build --filter plugin-sentry",
|
||||
"build:plugin-seo": "turbo build --filter plugin-seo",
|
||||
"build:plugin-stripe": "turbo build --filter plugin-stripe",
|
||||
"build:plugin-cloud": "turbo build --filter \"@payloadcms/plugin-cloud\"",
|
||||
"build:plugin-cloud-storage": "turbo build --filter \"@payloadcms/plugin-cloud-storage\"",
|
||||
"build:plugin-form-builder": "turbo build --filter \"@payloadcms/plugin-form-builder\"",
|
||||
"build:plugin-nested-docs": "turbo build --filter \"@payloadcms/plugin-nested-docs\"",
|
||||
"build:plugin-redirects": "turbo build --filter \"@payloadcms/plugin-redirects\"",
|
||||
"build:plugin-relationship-object-ids": "turbo build --filter \"@payloadcms/plugin-relationship-object-ids\"",
|
||||
"build:plugin-search": "turbo build --filter \"@payloadcms/plugin-search\"",
|
||||
"build:plugin-sentry": "turbo build --filter \"@payloadcms/plugin-sentry\"",
|
||||
"build:plugin-seo": "turbo build --filter \"@payloadcms/plugin-seo\"",
|
||||
"build:plugin-stripe": "turbo build --filter \"@payloadcms/plugin-stripe\"",
|
||||
"build:plugins": "turbo build --filter \"@payloadcms/plugin-*\"",
|
||||
"build:richtext-lexical": "turbo build --filter richtext-lexical",
|
||||
"build:richtext-slate": "turbo build --filter richtext-slate",
|
||||
"build:storage-azure": "turbo build --filter storage-azure",
|
||||
"build:storage-gcs": "turbo build --filter storage-gcs",
|
||||
"build:storage-s3": "turbo build --filter storage-s3",
|
||||
"build:storage-uploadthing": "turbo build --filter storage-uploadthing",
|
||||
"build:storage-vercel-blob": "turbo build --filter storage-vercel-blob",
|
||||
"build:richtext-lexical": "turbo build --filter \"@payloadcms/richtext-lexical\"",
|
||||
"build:richtext-slate": "turbo build --filter \"@payloadcms/richtext-slate\"",
|
||||
"build:storage-azure": "turbo build --filter \"@payloadcms/storage-azure\"",
|
||||
"build:storage-gcs": "turbo build --filter \"@payloadcms/storage-gcs\"",
|
||||
"build:storage-s3": "turbo build --filter \"@payloadcms/storage-s3\"",
|
||||
"build:storage-uploadthing": "turbo build --filter \"@payloadcms/storage-uploadthing\"",
|
||||
"build:storage-vercel-blob": "turbo build --filter \"@payloadcms/storage-vercel-blob\"",
|
||||
"build:tests": "pnpm --filter payload-test-suite run typecheck",
|
||||
"build:translations": "turbo build --filter translations",
|
||||
"build:ui": "turbo build --filter ui",
|
||||
"build:translations": "turbo build --filter \"@payloadcms/translations\"",
|
||||
"build:ui": "turbo build --filter \"@payloadcms/ui\"",
|
||||
"clean": "turbo clean",
|
||||
"clean:all": "node ./scripts/delete-recursively.js '@node_modules' 'media/*' '**/dist/' '**/.cache/*' '**/.next/*' '**/.turbo/*' '**/tsconfig.tsbuildinfo' '**/payload*.tgz' '**/meta_*.json'",
|
||||
"clean:build": "node ./scripts/delete-recursively.js 'media/' '**/dist/' '**/.cache/' '**/.next/' '**/.turbo/' '**/tsconfig.tsbuildinfo' '**/payload*.tgz' '**/meta_*.json'",
|
||||
"clean:cache": "node ./scripts/delete-recursively.js node_modules/.cache! packages/payload/node_modules/.cache! .next/*",
|
||||
"dev": "tsx ./test/dev.ts",
|
||||
"dev": "cross-env NODE_OPTIONS=--no-deprecation tsx ./test/dev.ts",
|
||||
"dev:generate-graphql-schema": "pnpm runts ./test/generateGraphQLSchema.ts",
|
||||
"dev:generate-importmap": "pnpm runts ./test/generateImportMap.ts",
|
||||
"dev:generate-types": "pnpm runts ./test/generateTypes.ts",
|
||||
"dev:postgres": "cross-env PAYLOAD_DATABASE=postgres pnpm runts ./test/dev.ts",
|
||||
"dev:vercel-postgres": "cross-env PAYLOAD_DATABASE=vercel-postgres pnpm runts ./test/dev.ts",
|
||||
"devsafe": "node ./scripts/delete-recursively.js '**/.next' && pnpm dev",
|
||||
"docker:restart": "pnpm docker:stop --remove-orphans && pnpm docker:start",
|
||||
"docker:start": "docker compose -f packages/plugin-cloud-storage/docker-compose.yml up -d",
|
||||
"docker:stop": "docker compose -f packages/plugin-cloud-storage/docker-compose.yml down",
|
||||
"fix": "eslint \"packages/**/*.ts\" --fix",
|
||||
"force:build": "pnpm run build:core:force",
|
||||
"lint": "eslint \"packages/**/*.ts\"",
|
||||
"lint": "turbo run lint --concurrency 1 --continue",
|
||||
"lint-staged": "lint-staged",
|
||||
"lint:fix": "turbo run lint:fix --concurrency 1 --continue",
|
||||
"obliterate-playwright-cache-macos": "rm -rf ~/Library/Caches/ms-playwright && find /System/Volumes/Data/private/var/folders -type d -name 'playwright*' -exec rm -rf {} +",
|
||||
"prepare": "husky",
|
||||
"reinstall": "pnpm clean:all && pnpm install",
|
||||
"release:alpha": "pnpm runts ./scripts/release.ts --bump prerelease --tag alpha",
|
||||
"release:beta": "pnpm runts ./scripts/release.ts --bump prerelease --tag beta",
|
||||
"runts": "node --no-deprecation --import @swc-node/register/esm-register",
|
||||
"runts": "cross-env NODE_OPTIONS=--no-deprecation node --no-deprecation --import @swc-node/register/esm-register",
|
||||
"script:gen-templates": "pnpm runts ./scripts/generate-template-variations.ts",
|
||||
"script:list-published": "pnpm runts scripts/lib/getPackageRegistryVersions.ts",
|
||||
"script:pack": "pnpm runts scripts/pack-all-to-dest.ts",
|
||||
@@ -157,13 +159,14 @@
|
||||
"swc-plugin-transform-remove-imports": "1.15.0",
|
||||
"tempy": "1.0.1",
|
||||
"tsx": "4.17.0",
|
||||
"turbo": "^1.13.3",
|
||||
"turbo": "^2.1.0",
|
||||
"typescript": "5.5.4"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^19.0.0 || ^19.0.0-rc-06d0b89e-20240801",
|
||||
"react-dom": "^19.0.0 || ^19.0.0-rc-06d0b89e-20240801"
|
||||
},
|
||||
"packageManager": "pnpm@9.7.1",
|
||||
"engines": {
|
||||
"node": "^18.20.2 || >=20.9.0",
|
||||
"pnpm": "^9.7.0"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "create-payload-app",
|
||||
"version": "3.0.0-beta.88",
|
||||
"version": "3.0.0-beta.96",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -42,6 +42,8 @@
|
||||
"build": "pnpm pack-template-files && pnpm typecheck && pnpm build:swc",
|
||||
"build:swc": "swc ./src -d ./dist --config-file .swcrc --strip-leading-paths",
|
||||
"clean": "rimraf {dist,*.tsbuildinfo}",
|
||||
"lint": "eslint .",
|
||||
"lint:fix": "eslint . --fix",
|
||||
"pack-template-files": "node --no-deprecation --import @swc-node/register/esm-register src/scripts/pack-template-files.ts",
|
||||
"prepublishOnly": "pnpm clean && pnpm build",
|
||||
"test": "jest",
|
||||
|
||||
@@ -14,6 +14,7 @@ import { getValidTemplates } from './templates.js'
|
||||
describe('createProject', () => {
|
||||
let projectDir: string
|
||||
beforeAll(() => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log = jest.fn()
|
||||
})
|
||||
|
||||
@@ -57,7 +58,7 @@ describe('createProject', () => {
|
||||
const packageJson = fse.readJsonSync(packageJsonPath)
|
||||
|
||||
// Check package name and description
|
||||
expect(packageJson.name).toEqual(projectName)
|
||||
expect(packageJson.name).toStrictEqual(projectName)
|
||||
})
|
||||
|
||||
describe('creates project from template', () => {
|
||||
@@ -115,10 +116,6 @@ describe('createProject', () => {
|
||||
})
|
||||
)?.[0]
|
||||
|
||||
if (!payloadConfigPath) {
|
||||
throw new Error(`Could not find payload.config.ts inside ${projectDir}`)
|
||||
}
|
||||
|
||||
const content = fse.readFileSync(payloadConfigPath, 'utf-8')
|
||||
|
||||
// Check payload.config.ts
|
||||
|
||||
@@ -2,7 +2,6 @@ import type { CompilerOptions } from 'typescript'
|
||||
|
||||
import * as p from '@clack/prompts'
|
||||
import { parse, stringify } from 'comment-json'
|
||||
import execa from 'execa'
|
||||
import fs from 'fs'
|
||||
import fse from 'fs-extra'
|
||||
import globby from 'globby'
|
||||
@@ -53,8 +52,7 @@ export async function initNext(args: InitNextArgs): Promise<InitNextResult> {
|
||||
nextAppDetails.nextAppDir = createdAppDir
|
||||
}
|
||||
|
||||
const { hasTopLevelLayout, isPayloadInstalled, isSrcDir, nextAppDir, nextConfigType } =
|
||||
nextAppDetails
|
||||
const { hasTopLevelLayout, isSrcDir, nextAppDir, nextConfigType } = nextAppDetails
|
||||
|
||||
if (!nextConfigType) {
|
||||
return {
|
||||
@@ -169,7 +167,9 @@ async function installAndConfigurePayload(
|
||||
}
|
||||
|
||||
const logDebug = (message: string) => {
|
||||
if (debug) origDebug(message)
|
||||
if (debug) {
|
||||
origDebug(message)
|
||||
}
|
||||
}
|
||||
|
||||
if (!fs.existsSync(projectDir)) {
|
||||
@@ -210,7 +210,7 @@ async function installAndConfigurePayload(
|
||||
)
|
||||
|
||||
// This is a little clunky and needs to account for isSrcDir
|
||||
copyRecursiveSync(templateSrcDir, path.dirname(nextConfigPath), debug)
|
||||
copyRecursiveSync(templateSrcDir, path.dirname(nextConfigPath))
|
||||
|
||||
// Wrap next.config.js with withPayload
|
||||
await wrapNextConfig({ nextConfigPath, nextConfigType })
|
||||
|
||||
@@ -4,13 +4,19 @@ import slugify from '@sindresorhus/slugify'
|
||||
import type { CliArgs } from '../types.js'
|
||||
|
||||
export async function parseProjectName(args: CliArgs): Promise<string> {
|
||||
if (args['--name']) return slugify(args['--name'])
|
||||
if (args._[0]) return slugify(args._[0])
|
||||
if (args['--name']) {
|
||||
return slugify(args['--name'])
|
||||
}
|
||||
if (args._[0]) {
|
||||
return slugify(args._[0])
|
||||
}
|
||||
|
||||
const projectName = await p.text({
|
||||
message: 'Project name?',
|
||||
validate: (value) => {
|
||||
if (!value) return 'Please enter a project name.'
|
||||
if (!value) {
|
||||
return 'Please enter a project name.'
|
||||
}
|
||||
},
|
||||
})
|
||||
if (p.isCancel(projectName)) {
|
||||
|
||||
@@ -9,7 +9,9 @@ export async function parseTemplate(
|
||||
if (args['--template']) {
|
||||
const templateName = args['--template']
|
||||
const template = validTemplates.find((t) => t.name === templateName)
|
||||
if (!template) throw new Error('Invalid template given')
|
||||
if (!template) {
|
||||
throw new Error('Invalid template given')
|
||||
}
|
||||
return template
|
||||
}
|
||||
|
||||
|
||||
@@ -29,6 +29,18 @@ const postgresReplacement: DbAdapterReplacement = {
|
||||
packageName: '@payloadcms/db-postgres',
|
||||
}
|
||||
|
||||
const vercelPostgresReplacement: DbAdapterReplacement = {
|
||||
configReplacement: (envName = 'POSTGRES_URL') => [
|
||||
' db: vercelPostgresAdapter({',
|
||||
' pool: {',
|
||||
` connectionString: process.env.${envName} || '',`,
|
||||
' },',
|
||||
' }),',
|
||||
],
|
||||
importReplacement: "import { vercelPostgresAdapter } from '@payloadcms/db-vercel-postgres'",
|
||||
packageName: '@payloadcms/db-vercel-postgres',
|
||||
}
|
||||
|
||||
const sqliteReplacement: DbAdapterReplacement = {
|
||||
configReplacement: (envName = 'DATABASE_URI') => [
|
||||
' db: sqliteAdapter({',
|
||||
@@ -45,6 +57,7 @@ export const dbReplacements: Record<DbType, DbAdapterReplacement> = {
|
||||
mongodb: mongodbReplacement,
|
||||
postgres: postgresReplacement,
|
||||
sqlite: sqliteReplacement,
|
||||
vercelPostgres: vercelPostgresReplacement,
|
||||
}
|
||||
|
||||
type StorageAdapterReplacement = {
|
||||
|
||||
@@ -27,6 +27,11 @@ const dbChoiceRecord: Record<DbType, DbChoice> = {
|
||||
title: 'SQLite (beta)',
|
||||
value: 'sqlite',
|
||||
},
|
||||
vercelPostgres: {
|
||||
dbConnectionPrefix: 'postgres://postgres:<password>@127.0.0.1:5432/',
|
||||
title: 'Vercel Postgres (beta)',
|
||||
value: 'vercelPostgres',
|
||||
},
|
||||
}
|
||||
|
||||
export async function selectDb(args: CliArgs, projectName: string): Promise<DbDetails> {
|
||||
@@ -49,7 +54,9 @@ export async function selectDb(args: CliArgs, projectName: string): Promise<DbDe
|
||||
value: dbChoice.value,
|
||||
})),
|
||||
})
|
||||
if (p.isCancel(dbType)) process.exit(0)
|
||||
if (p.isCancel(dbType)) {
|
||||
process.exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
const dbChoice = dbChoiceRecord[dbType]
|
||||
@@ -68,7 +75,9 @@ export async function selectDb(args: CliArgs, projectName: string): Promise<DbDe
|
||||
initialValue: initialDbUri,
|
||||
message: `Enter ${dbChoice.title.split(' ')[0]} connection string`, // strip beta from title
|
||||
})
|
||||
if (p.isCancel(dbUri)) process.exit(0)
|
||||
if (p.isCancel(dbUri)) {
|
||||
process.exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
@@ -16,7 +16,9 @@ import { installPackages } from './install-packages.js'
|
||||
export async function updatePayloadInProject(
|
||||
appDetails: NextAppDetails,
|
||||
): Promise<{ message: string; success: boolean }> {
|
||||
if (!appDetails.nextConfigPath) return { message: 'No Next.js config found', success: false }
|
||||
if (!appDetails.nextConfigPath) {
|
||||
return { message: 'No Next.js config found', success: false }
|
||||
}
|
||||
|
||||
const projectDir = path.dirname(appDetails.nextConfigPath)
|
||||
|
||||
|
||||
@@ -42,8 +42,6 @@ export async function parseAndModifyConfigContent(
|
||||
): Promise<{ modifiedConfigContent: string; success: boolean }> {
|
||||
content = withPayloadStatement[configType] + '\n' + content
|
||||
|
||||
console.log({ configType, content })
|
||||
|
||||
if (configType === 'cjs' || configType === 'esm') {
|
||||
try {
|
||||
const ast = parseModule(content, { loc: true })
|
||||
|
||||
@@ -36,13 +36,20 @@ export async function writeEnvFile(args: {
|
||||
.split('\n')
|
||||
.filter((e) => e)
|
||||
.map((line) => {
|
||||
if (line.startsWith('#') || !line.includes('=')) return line
|
||||
if (line.startsWith('#') || !line.includes('=')) {
|
||||
return line
|
||||
}
|
||||
|
||||
const split = line.split('=')
|
||||
const key = split[0]
|
||||
let value = split[1]
|
||||
|
||||
if (key === 'MONGODB_URI' || key === 'MONGO_URL' || key === 'DATABASE_URI') {
|
||||
if (
|
||||
key === 'MONGODB_URI' ||
|
||||
key === 'MONGO_URL' ||
|
||||
key === 'DATABASE_URI' ||
|
||||
key === 'POSTGRES_URL'
|
||||
) {
|
||||
value = databaseUri
|
||||
}
|
||||
if (key === 'PAYLOAD_SECRET' || key === 'PAYLOAD_SECRET_KEY') {
|
||||
|
||||
@@ -57,7 +57,7 @@ interface Template {
|
||||
|
||||
export type PackageManager = 'bun' | 'npm' | 'pnpm' | 'yarn'
|
||||
|
||||
export type DbType = 'mongodb' | 'postgres' | 'sqlite'
|
||||
export type DbType = 'mongodb' | 'postgres' | 'sqlite' | 'vercelPostgres'
|
||||
|
||||
export type DbDetails = {
|
||||
dbUri: string
|
||||
|
||||
@@ -6,7 +6,7 @@ import path from 'path'
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
export function copyRecursiveSync(src: string, dest: string, debug?: boolean) {
|
||||
export function copyRecursiveSync(src: string, dest: string) {
|
||||
const exists = fs.existsSync(src)
|
||||
const stats = exists && fs.statSync(src)
|
||||
const isDirectory = exists && stats !== false && stats.isDirectory()
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-mongodb",
|
||||
"version": "3.0.0-beta.88",
|
||||
"version": "3.0.0-beta.96",
|
||||
"description": "The officially supported MongoDB database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
@@ -30,6 +30,8 @@
|
||||
"build:swc": "swc ./src -d ./dist --config-file .swcrc-build --strip-leading-paths",
|
||||
"build:types": "tsc --emitDeclarationOnly --outDir dist",
|
||||
"clean": "rimraf {dist,*.tsbuildinfo}",
|
||||
"lint": "eslint .",
|
||||
"lint:fix": "eslint . --fix",
|
||||
"prepublishOnly": "pnpm clean && pnpm turbo build"
|
||||
},
|
||||
"dependencies": {
|
||||
|
||||
@@ -59,8 +59,10 @@ export const connect: Connect = async function connect(
|
||||
await this.migrate({ migrations: this.prodMigrations })
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
this.payload.logger.error(`Error: cannot connect to MongoDB. Details: ${err.message}`, err)
|
||||
this.payload.logger.error({
|
||||
err,
|
||||
msg: `Error: cannot connect to MongoDB. Details: ${err.message}`,
|
||||
})
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ import type { CreateGlobal, PayloadRequest } from 'payload'
|
||||
|
||||
import type { MongooseAdapter } from './index.js'
|
||||
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields.js'
|
||||
import { sanitizeInternalFields } from './utilities/sanitizeInternalFields.js'
|
||||
import { withSession } from './withSession.js'
|
||||
|
||||
export const createGlobal: CreateGlobal = async function createGlobal(
|
||||
|
||||
@@ -2,7 +2,7 @@ import type { DeleteOne, Document, PayloadRequest } from 'payload'
|
||||
|
||||
import type { MongooseAdapter } from './index.js'
|
||||
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields.js'
|
||||
import { sanitizeInternalFields } from './utilities/sanitizeInternalFields.js'
|
||||
import { withSession } from './withSession.js'
|
||||
|
||||
export const deleteOne: DeleteOne = async function deleteOne(
|
||||
|
||||
@@ -6,7 +6,7 @@ import { flattenWhereToOperators } from 'payload'
|
||||
import type { MongooseAdapter } from './index.js'
|
||||
|
||||
import { buildSortParam } from './queries/buildSortParam.js'
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields.js'
|
||||
import { sanitizeInternalFields } from './utilities/sanitizeInternalFields.js'
|
||||
import { withSession } from './withSession.js'
|
||||
|
||||
export const find: Find = async function find(
|
||||
|
||||
@@ -4,7 +4,7 @@ import { combineQueries } from 'payload'
|
||||
|
||||
import type { MongooseAdapter } from './index.js'
|
||||
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields.js'
|
||||
import { sanitizeInternalFields } from './utilities/sanitizeInternalFields.js'
|
||||
import { withSession } from './withSession.js'
|
||||
|
||||
export const findGlobal: FindGlobal = async function findGlobal(
|
||||
|
||||
@@ -6,7 +6,7 @@ import { buildVersionGlobalFields, flattenWhereToOperators } from 'payload'
|
||||
import type { MongooseAdapter } from './index.js'
|
||||
|
||||
import { buildSortParam } from './queries/buildSortParam.js'
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields.js'
|
||||
import { sanitizeInternalFields } from './utilities/sanitizeInternalFields.js'
|
||||
import { withSession } from './withSession.js'
|
||||
|
||||
export const findGlobalVersions: FindGlobalVersions = async function findGlobalVersions(
|
||||
|
||||
@@ -3,7 +3,7 @@ import type { Document, FindOne, PayloadRequest } from 'payload'
|
||||
|
||||
import type { MongooseAdapter } from './index.js'
|
||||
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields.js'
|
||||
import { sanitizeInternalFields } from './utilities/sanitizeInternalFields.js'
|
||||
import { withSession } from './withSession.js'
|
||||
|
||||
export const findOne: FindOne = async function findOne(
|
||||
|
||||
@@ -6,7 +6,7 @@ import { flattenWhereToOperators } from 'payload'
|
||||
import type { MongooseAdapter } from './index.js'
|
||||
|
||||
import { buildSortParam } from './queries/buildSortParam.js'
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields.js'
|
||||
import { sanitizeInternalFields } from './utilities/sanitizeInternalFields.js'
|
||||
import { withSession } from './withSession.js'
|
||||
|
||||
export const findVersions: FindVersions = async function findVersions(
|
||||
|
||||
@@ -182,6 +182,7 @@ export function mongooseAdapter({
|
||||
init,
|
||||
migrateFresh,
|
||||
migrationDir,
|
||||
packageName: '@payloadcms/db-mongodb',
|
||||
payload,
|
||||
prodMigrations,
|
||||
queryDrafts,
|
||||
|
||||
@@ -8,10 +8,10 @@ import { buildVersionCollectionFields, buildVersionGlobalFields } from 'payload'
|
||||
import type { MongooseAdapter } from './index.js'
|
||||
import type { CollectionModel } from './types.js'
|
||||
|
||||
import buildCollectionSchema from './models/buildCollectionSchema.js'
|
||||
import { buildCollectionSchema } from './models/buildCollectionSchema.js'
|
||||
import { buildGlobalModel } from './models/buildGlobalModel.js'
|
||||
import buildSchema from './models/buildSchema.js'
|
||||
import getBuildQueryPlugin from './queries/buildQuery.js'
|
||||
import { buildSchema } from './models/buildSchema.js'
|
||||
import { getBuildQueryPlugin } from './queries/buildQuery.js'
|
||||
import { getDBName } from './utilities/getDBName.js'
|
||||
|
||||
export const init: Init = function init(this: MongooseAdapter) {
|
||||
|
||||
@@ -3,10 +3,10 @@ import type { SanitizedCollectionConfig, SanitizedConfig } from 'payload'
|
||||
|
||||
import paginate from 'mongoose-paginate-v2'
|
||||
|
||||
import getBuildQueryPlugin from '../queries/buildQuery.js'
|
||||
import buildSchema from './buildSchema.js'
|
||||
import { getBuildQueryPlugin } from '../queries/buildQuery.js'
|
||||
import { buildSchema } from './buildSchema.js'
|
||||
|
||||
const buildCollectionSchema = (
|
||||
export const buildCollectionSchema = (
|
||||
collection: SanitizedCollectionConfig,
|
||||
config: SanitizedConfig,
|
||||
schemaOptions = {},
|
||||
@@ -44,5 +44,3 @@ const buildCollectionSchema = (
|
||||
|
||||
return schema
|
||||
}
|
||||
|
||||
export default buildCollectionSchema
|
||||
|
||||
@@ -4,8 +4,8 @@ import mongoose from 'mongoose'
|
||||
|
||||
import type { GlobalModel } from '../types.js'
|
||||
|
||||
import getBuildQueryPlugin from '../queries/buildQuery.js'
|
||||
import buildSchema from './buildSchema.js'
|
||||
import { getBuildQueryPlugin } from '../queries/buildQuery.js'
|
||||
import { buildSchema } from './buildSchema.js'
|
||||
|
||||
export const buildGlobalModel = (config: SanitizedConfig): GlobalModel | null => {
|
||||
if (config.globals && config.globals.length > 0) {
|
||||
|
||||
@@ -111,7 +111,7 @@ const localizeSchema = (
|
||||
return schema
|
||||
}
|
||||
|
||||
const buildSchema = (
|
||||
export const buildSchema = (
|
||||
config: SanitizedConfig,
|
||||
configFields: Field[],
|
||||
buildSchemaOptions: BuildSchemaOptions = {},
|
||||
@@ -595,16 +595,77 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const baseSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
ref: field.relationTo,
|
||||
const hasManyRelations = Array.isArray(field.relationTo)
|
||||
let schemaToReturn: { [key: string]: any } = {}
|
||||
|
||||
if (field.localized && config.localization) {
|
||||
schemaToReturn = {
|
||||
type: config.localization.localeCodes.reduce((locales, locale) => {
|
||||
let localeSchema: { [key: string]: any } = {}
|
||||
|
||||
if (hasManyRelations) {
|
||||
localeSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
_id: false,
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
relationTo: { type: String, enum: field.relationTo },
|
||||
value: {
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
refPath: `${field.name}.${locale}.relationTo`,
|
||||
},
|
||||
}
|
||||
} else {
|
||||
localeSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
ref: field.relationTo,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...locales,
|
||||
[locale]: field.hasMany
|
||||
? { type: [localeSchema], default: formatDefaultValue(field) }
|
||||
: localeSchema,
|
||||
}
|
||||
}, {}),
|
||||
localized: true,
|
||||
}
|
||||
} else if (hasManyRelations) {
|
||||
schemaToReturn = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
_id: false,
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
relationTo: { type: String, enum: field.relationTo },
|
||||
value: {
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
refPath: `${field.name}.relationTo`,
|
||||
},
|
||||
}
|
||||
|
||||
if (field.hasMany) {
|
||||
schemaToReturn = {
|
||||
type: [schemaToReturn],
|
||||
default: formatDefaultValue(field),
|
||||
}
|
||||
}
|
||||
} else {
|
||||
schemaToReturn = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
ref: field.relationTo,
|
||||
}
|
||||
|
||||
if (field.hasMany) {
|
||||
schemaToReturn = {
|
||||
type: [schemaToReturn],
|
||||
default: formatDefaultValue(field),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
[field.name]: schemaToReturn,
|
||||
})
|
||||
},
|
||||
}
|
||||
|
||||
export default buildSchema
|
||||
|
||||
@@ -18,7 +18,10 @@ export type BuildQueryArgs = {
|
||||
|
||||
// This plugin asynchronously builds a list of Mongoose query constraints
|
||||
// which can then be used in subsequent Mongoose queries.
|
||||
const getBuildQueryPlugin = ({ collectionSlug, versionsFields }: GetBuildQueryPluginArgs = {}) => {
|
||||
export const getBuildQueryPlugin = ({
|
||||
collectionSlug,
|
||||
versionsFields,
|
||||
}: GetBuildQueryPluginArgs = {}) => {
|
||||
return function buildQueryPlugin(schema) {
|
||||
const modifiedSchema = schema
|
||||
async function buildQuery({
|
||||
@@ -57,5 +60,3 @@ const getBuildQueryPlugin = ({ collectionSlug, versionsFields }: GetBuildQueryPl
|
||||
modifiedSchema.statics.buildQuery = buildQuery
|
||||
}
|
||||
}
|
||||
|
||||
export default getBuildQueryPlugin
|
||||
|
||||
@@ -16,7 +16,7 @@ describe('get localized sort property', () => {
|
||||
},
|
||||
} as Config)
|
||||
})
|
||||
it('passes through a non-localized sort property', async () => {
|
||||
it('passes through a non-localized sort property', () => {
|
||||
const result = getLocalizedSortProperty({
|
||||
config,
|
||||
fields: [
|
||||
|
||||
@@ -6,7 +6,7 @@ import { combineQueries, flattenWhereToOperators } from 'payload'
|
||||
import type { MongooseAdapter } from './index.js'
|
||||
|
||||
import { buildSortParam } from './queries/buildSortParam.js'
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields.js'
|
||||
import { sanitizeInternalFields } from './utilities/sanitizeInternalFields.js'
|
||||
import { withSession } from './withSession.js'
|
||||
|
||||
export const queryDrafts: QueryDrafts = async function queryDrafts(
|
||||
|
||||
@@ -6,6 +6,8 @@ import { v4 as uuid } from 'uuid'
|
||||
|
||||
import type { MongooseAdapter } from '../index.js'
|
||||
|
||||
// Needs await to fulfill the interface
|
||||
// eslint-disable-next-line @typescript-eslint/require-await
|
||||
export const beginTransaction: BeginTransaction = async function beginTransaction(
|
||||
this: MongooseAdapter,
|
||||
options: TransactionOptions,
|
||||
|
||||
@@ -2,7 +2,7 @@ import type { PayloadRequest, UpdateGlobal } from 'payload'
|
||||
|
||||
import type { MongooseAdapter } from './index.js'
|
||||
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields.js'
|
||||
import { sanitizeInternalFields } from './utilities/sanitizeInternalFields.js'
|
||||
import { withSession } from './withSession.js'
|
||||
|
||||
export const updateGlobal: UpdateGlobal = async function updateGlobal(
|
||||
|
||||
@@ -3,7 +3,7 @@ import type { PayloadRequest, UpdateOne } from 'payload'
|
||||
import type { MongooseAdapter } from './index.js'
|
||||
|
||||
import { handleError } from './utilities/handleError.js'
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields.js'
|
||||
import { sanitizeInternalFields } from './utilities/sanitizeInternalFields.js'
|
||||
import { withSession } from './withSession.js'
|
||||
|
||||
export const updateOne: UpdateOne = async function updateOne(
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const internalFields = ['__v']
|
||||
|
||||
const sanitizeInternalFields = <T extends Record<string, unknown>>(incomingDoc: T): T =>
|
||||
export const sanitizeInternalFields = <T extends Record<string, unknown>>(incomingDoc: T): T =>
|
||||
Object.entries(incomingDoc).reduce((newDoc, [key, val]): T => {
|
||||
if (key === '_id') {
|
||||
return {
|
||||
@@ -18,5 +18,3 @@ const sanitizeInternalFields = <T extends Record<string, unknown>>(incomingDoc:
|
||||
[key]: val,
|
||||
}
|
||||
}, {} as T)
|
||||
|
||||
export default sanitizeInternalFields
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-postgres",
|
||||
"version": "3.0.0-beta.88",
|
||||
"version": "3.0.0-beta.96",
|
||||
"description": "The officially supported Postgres database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
@@ -40,6 +40,8 @@
|
||||
"build:swc": "swc ./src -d ./dist --config-file .swcrc --strip-leading-paths",
|
||||
"build:types": "tsc --emitDeclarationOnly --outDir dist",
|
||||
"clean": "rimraf {dist,*.tsbuildinfo}",
|
||||
"lint": "eslint .",
|
||||
"lint:fix": "eslint . --fix",
|
||||
"prepack": "pnpm clean && pnpm turbo build",
|
||||
"prepublishOnly": "pnpm clean && pnpm turbo build",
|
||||
"renamePredefinedMigrations": "node --no-deprecation --import @swc-node/register/esm-register ./scripts/renamePredefinedMigrations.ts"
|
||||
@@ -59,7 +61,7 @@
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"@types/pg": "8.10.2",
|
||||
"@types/to-snake-case": "1.0.0",
|
||||
"esbuild": "0.23.0",
|
||||
"esbuild": "0.23.1",
|
||||
"payload": "workspace:*"
|
||||
},
|
||||
"peerDependencies": {
|
||||
|
||||
@@ -139,6 +139,7 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter>
|
||||
migrateReset,
|
||||
migrateStatus,
|
||||
migrationDir,
|
||||
packageName: '@payloadcms/db-postgres',
|
||||
payload,
|
||||
queryDrafts,
|
||||
rejectInitializing,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-sqlite",
|
||||
"version": "3.0.0-beta.88",
|
||||
"version": "3.0.0-beta.96",
|
||||
"description": "The officially supported SQLite database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
@@ -39,6 +39,8 @@
|
||||
"build:swc": "swc ./src -d ./dist --config-file .swcrc --strip-leading-paths",
|
||||
"build:types": "tsc --emitDeclarationOnly --outDir dist",
|
||||
"clean": "rimraf {dist,*.tsbuildinfo}",
|
||||
"lint": "eslint .",
|
||||
"lint:fix": "eslint . --fix",
|
||||
"prepack": "pnpm clean && pnpm turbo build",
|
||||
"prepublishOnly": "pnpm clean && pnpm turbo build"
|
||||
},
|
||||
|
||||
@@ -140,6 +140,7 @@ export function sqliteAdapter(args: Args): DatabaseAdapterObj<SQLiteAdapter> {
|
||||
migrateReset,
|
||||
migrateStatus,
|
||||
migrationDir,
|
||||
packageName: '@payloadcms/db-sqlite',
|
||||
payload,
|
||||
queryDrafts,
|
||||
rejectInitializing,
|
||||
|
||||
@@ -58,9 +58,17 @@ type Args = {
|
||||
tableName: string
|
||||
timestamps?: boolean
|
||||
versions: boolean
|
||||
/**
|
||||
* Tracks whether or not this table is built
|
||||
* from the result of a localized array or block field at some point
|
||||
*/
|
||||
withinLocalizedArrayOrBlock?: boolean
|
||||
}
|
||||
|
||||
type Result = {
|
||||
hasLocalizedManyNumberField: boolean
|
||||
hasLocalizedManyTextField: boolean
|
||||
hasLocalizedRelationshipField: boolean
|
||||
hasManyNumberField: 'index' | boolean
|
||||
hasManyTextField: 'index' | boolean
|
||||
relationsToBuild: RelationMap
|
||||
@@ -81,6 +89,7 @@ export const buildTable = ({
|
||||
tableName,
|
||||
timestamps,
|
||||
versions,
|
||||
withinLocalizedArrayOrBlock,
|
||||
}: Args): Result => {
|
||||
const isRoot = !incomingRootTableName
|
||||
const rootTableName = incomingRootTableName || tableName
|
||||
@@ -128,6 +137,7 @@ export const buildTable = ({
|
||||
rootTableIDColType: rootTableIDColType || idColType,
|
||||
rootTableName,
|
||||
versions,
|
||||
withinLocalizedArrayOrBlock,
|
||||
})
|
||||
|
||||
// split the relationsToBuild by localized and non-localized
|
||||
@@ -478,5 +488,12 @@ export const buildTable = ({
|
||||
return result
|
||||
})
|
||||
|
||||
return { hasManyNumberField, hasManyTextField, relationsToBuild }
|
||||
return {
|
||||
hasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField,
|
||||
hasManyNumberField,
|
||||
hasManyTextField,
|
||||
relationsToBuild,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,6 +52,11 @@ type Args = {
|
||||
rootTableIDColType: IDType
|
||||
rootTableName: string
|
||||
versions: boolean
|
||||
/**
|
||||
* Tracks whether or not this table is built
|
||||
* from the result of a localized array or block field at some point
|
||||
*/
|
||||
withinLocalizedArrayOrBlock?: boolean
|
||||
}
|
||||
|
||||
type Result = {
|
||||
@@ -84,6 +89,7 @@ export const traverseFields = ({
|
||||
rootTableIDColType,
|
||||
rootTableName,
|
||||
versions,
|
||||
withinLocalizedArrayOrBlock,
|
||||
}: Args): Result => {
|
||||
let hasLocalizedField = false
|
||||
let hasLocalizedRelationshipField = false
|
||||
@@ -150,7 +156,11 @@ export const traverseFields = ({
|
||||
switch (field.type) {
|
||||
case 'text': {
|
||||
if (field.hasMany) {
|
||||
if (field.localized) {
|
||||
const isLocalized =
|
||||
Boolean(field.localized && adapter.payload.config.localization) ||
|
||||
withinLocalizedArrayOrBlock
|
||||
|
||||
if (isLocalized) {
|
||||
hasLocalizedManyTextField = true
|
||||
}
|
||||
|
||||
@@ -179,7 +189,11 @@ export const traverseFields = ({
|
||||
|
||||
case 'number': {
|
||||
if (field.hasMany) {
|
||||
if (field.localized) {
|
||||
const isLocalized =
|
||||
Boolean(field.localized && adapter.payload.config.localization) ||
|
||||
withinLocalizedArrayOrBlock
|
||||
|
||||
if (isLocalized) {
|
||||
hasLocalizedManyNumberField = true
|
||||
}
|
||||
|
||||
@@ -255,7 +269,11 @@ export const traverseFields = ({
|
||||
parentIdx: (cols) => index(`${selectTableName}_parent_idx`).on(cols.parent),
|
||||
}
|
||||
|
||||
if (field.localized) {
|
||||
const isLocalized =
|
||||
Boolean(field.localized && adapter.payload.config.localization) ||
|
||||
withinLocalizedArrayOrBlock
|
||||
|
||||
if (isLocalized) {
|
||||
baseColumns.locale = text('locale', { enum: locales }).notNull()
|
||||
baseExtraConfig.localeIdx = (cols) =>
|
||||
index(`${selectTableName}_locale_idx`).on(cols.locale)
|
||||
@@ -337,13 +355,20 @@ export const traverseFields = ({
|
||||
_parentIDIdx: (cols) => index(`${arrayTableName}_parent_id_idx`).on(cols._parentID),
|
||||
}
|
||||
|
||||
if (field.localized && adapter.payload.config.localization) {
|
||||
const isLocalized =
|
||||
Boolean(field.localized && adapter.payload.config.localization) ||
|
||||
withinLocalizedArrayOrBlock
|
||||
|
||||
if (isLocalized) {
|
||||
baseColumns._locale = text('_locale', { enum: locales }).notNull()
|
||||
baseExtraConfig._localeIdx = (cols) =>
|
||||
index(`${arrayTableName}_locale_idx`).on(cols._locale)
|
||||
}
|
||||
|
||||
const {
|
||||
hasLocalizedManyNumberField: subHasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField: subHasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField: subHasLocalizedRelationshipField,
|
||||
hasManyNumberField: subHasManyNumberField,
|
||||
hasManyTextField: subHasManyTextField,
|
||||
relationsToBuild: subRelationsToBuild,
|
||||
@@ -360,8 +385,21 @@ export const traverseFields = ({
|
||||
rootTableName,
|
||||
tableName: arrayTableName,
|
||||
versions,
|
||||
withinLocalizedArrayOrBlock: isLocalized,
|
||||
})
|
||||
|
||||
if (subHasLocalizedManyNumberField) {
|
||||
hasLocalizedManyNumberField = subHasLocalizedManyNumberField
|
||||
}
|
||||
|
||||
if (subHasLocalizedRelationshipField) {
|
||||
hasLocalizedRelationshipField = subHasLocalizedRelationshipField
|
||||
}
|
||||
|
||||
if (subHasLocalizedManyTextField) {
|
||||
hasLocalizedManyTextField = subHasLocalizedManyTextField
|
||||
}
|
||||
|
||||
if (subHasManyTextField) {
|
||||
if (!hasManyTextField || subHasManyTextField === 'index')
|
||||
hasManyTextField = subHasManyTextField
|
||||
@@ -453,13 +491,20 @@ export const traverseFields = ({
|
||||
_pathIdx: (cols) => index(`${blockTableName}_path_idx`).on(cols._path),
|
||||
}
|
||||
|
||||
if (field.localized && adapter.payload.config.localization) {
|
||||
const isLocalized =
|
||||
Boolean(field.localized && adapter.payload.config.localization) ||
|
||||
withinLocalizedArrayOrBlock
|
||||
|
||||
if (isLocalized) {
|
||||
baseColumns._locale = text('_locale', { enum: locales }).notNull()
|
||||
baseExtraConfig._localeIdx = (cols) =>
|
||||
index(`${blockTableName}_locale_idx`).on(cols._locale)
|
||||
}
|
||||
|
||||
const {
|
||||
hasLocalizedManyNumberField: subHasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField: subHasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField: subHasLocalizedRelationshipField,
|
||||
hasManyNumberField: subHasManyNumberField,
|
||||
hasManyTextField: subHasManyTextField,
|
||||
relationsToBuild: subRelationsToBuild,
|
||||
@@ -476,8 +521,21 @@ export const traverseFields = ({
|
||||
rootTableName,
|
||||
tableName: blockTableName,
|
||||
versions,
|
||||
withinLocalizedArrayOrBlock: isLocalized,
|
||||
})
|
||||
|
||||
if (subHasLocalizedManyNumberField) {
|
||||
hasLocalizedManyNumberField = subHasLocalizedManyNumberField
|
||||
}
|
||||
|
||||
if (subHasLocalizedRelationshipField) {
|
||||
hasLocalizedRelationshipField = subHasLocalizedRelationshipField
|
||||
}
|
||||
|
||||
if (subHasLocalizedManyTextField) {
|
||||
hasLocalizedManyTextField = subHasLocalizedManyTextField
|
||||
}
|
||||
|
||||
if (subHasManyTextField) {
|
||||
if (!hasManyTextField || subHasManyTextField === 'index')
|
||||
hasManyTextField = subHasManyTextField
|
||||
@@ -577,6 +635,7 @@ export const traverseFields = ({
|
||||
rootTableIDColType,
|
||||
rootTableName,
|
||||
versions,
|
||||
withinLocalizedArrayOrBlock,
|
||||
})
|
||||
|
||||
if (groupHasLocalizedField) hasLocalizedField = true
|
||||
@@ -618,6 +677,7 @@ export const traverseFields = ({
|
||||
rootTableIDColType,
|
||||
rootTableName,
|
||||
versions,
|
||||
withinLocalizedArrayOrBlock,
|
||||
})
|
||||
|
||||
if (groupHasLocalizedField) hasLocalizedField = true
|
||||
@@ -660,6 +720,7 @@ export const traverseFields = ({
|
||||
rootTableIDColType,
|
||||
rootTableName,
|
||||
versions,
|
||||
withinLocalizedArrayOrBlock,
|
||||
})
|
||||
|
||||
if (tabHasLocalizedField) hasLocalizedField = true
|
||||
@@ -702,6 +763,7 @@ export const traverseFields = ({
|
||||
rootTableIDColType,
|
||||
rootTableName,
|
||||
versions,
|
||||
withinLocalizedArrayOrBlock,
|
||||
})
|
||||
|
||||
if (rowHasLocalizedField) hasLocalizedField = true
|
||||
@@ -717,7 +779,7 @@ export const traverseFields = ({
|
||||
case 'upload':
|
||||
if (Array.isArray(field.relationTo)) {
|
||||
field.relationTo.forEach((relation) => relationships.add(relation))
|
||||
} else if (field.type === 'relationship' && field.hasMany) {
|
||||
} else if (field.hasMany) {
|
||||
relationships.add(field.relationTo)
|
||||
} else {
|
||||
// simple relationships get a column on the targetTable with a foreign key to the relationTo table
|
||||
@@ -753,7 +815,10 @@ export const traverseFields = ({
|
||||
}
|
||||
break
|
||||
}
|
||||
if (adapter.payload.config.localization && field.localized) {
|
||||
if (
|
||||
Boolean(field.localized && adapter.payload.config.localization) ||
|
||||
withinLocalizedArrayOrBlock
|
||||
) {
|
||||
hasLocalizedRelationshipField = true
|
||||
}
|
||||
|
||||
|
||||
1
packages/db-vercel-postgres/.gitignore
vendored
Normal file
1
packages/db-vercel-postgres/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
/migrations
|
||||
15
packages/db-vercel-postgres/.swcrc
Normal file
15
packages/db-vercel-postgres/.swcrc
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/swcrc",
|
||||
"sourceMaps": true,
|
||||
"jsc": {
|
||||
"target": "esnext",
|
||||
"parser": {
|
||||
"syntax": "typescript",
|
||||
"tsx": true,
|
||||
"dts": true
|
||||
}
|
||||
},
|
||||
"module": {
|
||||
"type": "es6"
|
||||
}
|
||||
}
|
||||
43
packages/db-vercel-postgres/README.md
Normal file
43
packages/db-vercel-postgres/README.md
Normal file
@@ -0,0 +1,43 @@
|
||||
# Payload Postgres Adapter
|
||||
|
||||
[Vercel Postgres](https://vercel.com/docs/storage/vercel-postgres) adapter for [Payload](https://payloadcms.com).
|
||||
|
||||
- [Main Repository](https://github.com/payloadcms/payload)
|
||||
- [Payload Docs](https://payloadcms.com/docs)
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @payloadcms/db-vercel-postgres
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Explicit Connection String
|
||||
|
||||
```ts
|
||||
import { buildConfig } from 'payload'
|
||||
import { vercelPostgresAdapter } from '@payloadcms/db-vercel-postgres'
|
||||
|
||||
export default buildConfig({
|
||||
db: vercelPostgresAdapter({
|
||||
pool: {
|
||||
connectionString: process.env.DATABASE_URI,
|
||||
},
|
||||
}),
|
||||
// ...rest of config
|
||||
})
|
||||
```
|
||||
|
||||
### Automatic Connection String Detection
|
||||
|
||||
Have Vercel automatically detect from environment variable (typically `process.env.POSTGRES_URL`)
|
||||
|
||||
```ts
|
||||
export default buildConfig({
|
||||
db: postgresAdapter(),
|
||||
// ...rest of config
|
||||
})
|
||||
```
|
||||
|
||||
More detailed usage can be found in the [Payload Docs](https://payloadcms.com/docs/configuration/overview).
|
||||
20
packages/db-vercel-postgres/eslint.config.js
Normal file
20
packages/db-vercel-postgres/eslint.config.js
Normal file
@@ -0,0 +1,20 @@
|
||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
||||
|
||||
/** @typedef {import('eslint').Linter.FlatConfig} */
|
||||
let FlatConfig
|
||||
|
||||
/** @type {FlatConfig[]} */
|
||||
export const index = [
|
||||
...rootEslintConfig,
|
||||
{
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
project: './tsconfig.json',
|
||||
tsconfigDirName: import.meta.dirname,
|
||||
...rootParserOptions,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
export default index
|
||||
91
packages/db-vercel-postgres/package.json
Normal file
91
packages/db-vercel-postgres/package.json
Normal file
@@ -0,0 +1,91 @@
|
||||
{
|
||||
"name": "@payloadcms/db-vercel-postgres",
|
||||
"version": "3.0.0-beta.96",
|
||||
"description": "Vercel Postgres adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/payloadcms/payload.git",
|
||||
"directory": "packages/db-vercel-postgres"
|
||||
},
|
||||
"license": "MIT",
|
||||
"author": "Payload <dev@payloadcms.com> (https://payloadcms.com)",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./src/index.ts",
|
||||
"types": "./src/index.ts",
|
||||
"default": "./src/index.ts"
|
||||
},
|
||||
"./types": {
|
||||
"import": "./src/types.ts",
|
||||
"types": "./src/types.ts",
|
||||
"default": "./src/types.ts"
|
||||
},
|
||||
"./migration-utils": {
|
||||
"import": "./src/exports/migration-utils.ts",
|
||||
"types": "./src/exports/migration-utils.ts",
|
||||
"default": "./src/exports/migration-utils.ts"
|
||||
}
|
||||
},
|
||||
"main": "./src/index.ts",
|
||||
"types": "./src/types.ts",
|
||||
"files": [
|
||||
"dist",
|
||||
"mock.js"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "rimraf .dist && rimraf tsconfig.tsbuildinfo && pnpm build:types && pnpm build:swc && pnpm build:esbuild && pnpm renamePredefinedMigrations",
|
||||
"build:esbuild": "echo skipping esbuild",
|
||||
"build:swc": "swc ./src -d ./dist --config-file .swcrc --strip-leading-paths",
|
||||
"build:types": "tsc --emitDeclarationOnly --outDir dist",
|
||||
"clean": "rimraf {dist,*.tsbuildinfo}",
|
||||
"lint": "eslint .",
|
||||
"lint:fix": "eslint . --fix",
|
||||
"prepack": "pnpm clean && pnpm turbo build",
|
||||
"prepublishOnly": "pnpm clean && pnpm turbo build",
|
||||
"renamePredefinedMigrations": "node --no-deprecation --import @swc-node/register/esm-register ./scripts/renamePredefinedMigrations.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@payloadcms/drizzle": "workspace:*",
|
||||
"@vercel/postgres": "^0.9.0",
|
||||
"console-table-printer": "2.11.2",
|
||||
"drizzle-kit": "0.23.2-df9e596",
|
||||
"drizzle-orm": "0.32.1",
|
||||
"prompts": "2.4.2",
|
||||
"to-snake-case": "1.0.0",
|
||||
"uuid": "10.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@hyrious/esbuild-plugin-commonjs": "^0.2.4",
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"@types/pg": "8.10.2",
|
||||
"@types/to-snake-case": "1.0.0",
|
||||
"esbuild": "0.23.1",
|
||||
"payload": "workspace:*"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"payload": "workspace:*"
|
||||
},
|
||||
"publishConfig": {
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
"default": "./dist/index.js"
|
||||
},
|
||||
"./types": {
|
||||
"import": "./dist/types.js",
|
||||
"types": "./dist/types.d.ts",
|
||||
"default": "./dist/types.js"
|
||||
},
|
||||
"./migration-utils": {
|
||||
"import": "./dist/exports/migration-utils.js",
|
||||
"types": "./dist/exports/migration-utils.d.ts",
|
||||
"default": "./dist/exports/migration-utils.js"
|
||||
}
|
||||
},
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts"
|
||||
}
|
||||
}
|
||||
13
packages/db-vercel-postgres/relationships-v2-v3.mjs
Normal file
13
packages/db-vercel-postgres/relationships-v2-v3.mjs
Normal file
@@ -0,0 +1,13 @@
|
||||
const imports = `import { migratePostgresV2toV3 } from '@payloadcms/migratePostgresV2toV3'`
|
||||
const up = ` await migratePostgresV2toV3({
|
||||
// enables logging of changes that will be made to the database
|
||||
debug: false,
|
||||
// skips calls that modify schema or data
|
||||
dryRun: false,
|
||||
payload,
|
||||
req,
|
||||
})
|
||||
`
|
||||
export { imports, up }
|
||||
|
||||
//# sourceMappingURL=relationships-v2-v3.js.map
|
||||
@@ -0,0 +1,19 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
|
||||
/**
|
||||
* Changes built .js files to .mjs to for ESM imports
|
||||
*/
|
||||
const rename = () => {
|
||||
fs.readdirSync(path.resolve('./dist/predefinedMigrations'))
|
||||
.filter((f) => {
|
||||
return f.endsWith('.js')
|
||||
})
|
||||
.forEach((file) => {
|
||||
const newPath = path.join('./dist/predefinedMigrations', file)
|
||||
fs.renameSync(newPath, newPath.replace('.js', '.mjs'))
|
||||
})
|
||||
console.log('done')
|
||||
}
|
||||
|
||||
rename()
|
||||
61
packages/db-vercel-postgres/src/connect.ts
Normal file
61
packages/db-vercel-postgres/src/connect.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import type { DrizzleAdapter } from '@payloadcms/drizzle/types'
|
||||
import type { Connect } from 'payload'
|
||||
|
||||
import { pushDevSchema } from '@payloadcms/drizzle'
|
||||
import { VercelPool, sql } from '@vercel/postgres'
|
||||
import { drizzle } from 'drizzle-orm/node-postgres'
|
||||
|
||||
import type { VercelPostgresAdapter } from './types.js'
|
||||
|
||||
export const connect: Connect = async function connect(
|
||||
this: VercelPostgresAdapter,
|
||||
options = {
|
||||
hotReload: false,
|
||||
},
|
||||
) {
|
||||
const { hotReload } = options
|
||||
|
||||
this.schema = {
|
||||
pgSchema: this.pgSchema,
|
||||
...this.tables,
|
||||
...this.relations,
|
||||
...this.enums,
|
||||
}
|
||||
|
||||
try {
|
||||
const logger = this.logger || false
|
||||
// Passed the poolOptions if provided,
|
||||
// else have vercel/postgres detect the connection string from the environment
|
||||
this.drizzle = drizzle(this.poolOptions ? new VercelPool(this.poolOptions) : sql, {
|
||||
logger,
|
||||
schema: this.schema,
|
||||
})
|
||||
|
||||
if (!hotReload) {
|
||||
if (process.env.PAYLOAD_DROP_DATABASE === 'true') {
|
||||
this.payload.logger.info(`---- DROPPING TABLES SCHEMA(${this.schemaName || 'public'}) ----`)
|
||||
await this.dropDatabase({ adapter: this })
|
||||
this.payload.logger.info('---- DROPPED TABLES ----')
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
this.payload.logger.error(`Error: cannot connect to Postgres. Details: ${err.message}`, err)
|
||||
if (typeof this.rejectInitializing === 'function') this.rejectInitializing()
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
// Only push schema if not in production
|
||||
if (
|
||||
process.env.NODE_ENV !== 'production' &&
|
||||
process.env.PAYLOAD_MIGRATING !== 'true' &&
|
||||
this.push !== false
|
||||
) {
|
||||
await pushDevSchema(this as unknown as DrizzleAdapter)
|
||||
}
|
||||
|
||||
if (typeof this.resolveInitializing === 'function') this.resolveInitializing()
|
||||
|
||||
if (process.env.NODE_ENV === 'production' && this.prodMigrations) {
|
||||
await this.migrate({ migrations: this.prodMigrations })
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
export { migratePostgresV2toV3 } from '../predefinedMigrations/v2-v3/index.js'
|
||||
163
packages/db-vercel-postgres/src/index.ts
Normal file
163
packages/db-vercel-postgres/src/index.ts
Normal file
@@ -0,0 +1,163 @@
|
||||
import type { DatabaseAdapterObj, Payload } from 'payload'
|
||||
|
||||
import {
|
||||
beginTransaction,
|
||||
commitTransaction,
|
||||
count,
|
||||
create,
|
||||
createGlobal,
|
||||
createGlobalVersion,
|
||||
createVersion,
|
||||
deleteMany,
|
||||
deleteOne,
|
||||
deleteVersions,
|
||||
destroy,
|
||||
find,
|
||||
findGlobal,
|
||||
findGlobalVersions,
|
||||
findMigrationDir,
|
||||
findOne,
|
||||
findVersions,
|
||||
migrate,
|
||||
migrateDown,
|
||||
migrateFresh,
|
||||
migrateRefresh,
|
||||
migrateReset,
|
||||
migrateStatus,
|
||||
operatorMap,
|
||||
queryDrafts,
|
||||
rollbackTransaction,
|
||||
updateGlobal,
|
||||
updateGlobalVersion,
|
||||
updateOne,
|
||||
updateVersion,
|
||||
} from '@payloadcms/drizzle'
|
||||
import {
|
||||
convertPathToJSONTraversal,
|
||||
countDistinct,
|
||||
createJSONQuery,
|
||||
createMigration,
|
||||
defaultDrizzleSnapshot,
|
||||
deleteWhere,
|
||||
dropDatabase,
|
||||
execute,
|
||||
getMigrationTemplate,
|
||||
init,
|
||||
insert,
|
||||
requireDrizzleKit,
|
||||
} from '@payloadcms/drizzle/postgres'
|
||||
import { pgEnum, pgSchema, pgTable } from 'drizzle-orm/pg-core'
|
||||
import { createDatabaseAdapter } from 'payload'
|
||||
|
||||
import type { Args, VercelPostgresAdapter } from './types.js'
|
||||
|
||||
import { connect } from './connect.js'
|
||||
|
||||
export function vercelPostgresAdapter(args: Args = {}): DatabaseAdapterObj<VercelPostgresAdapter> {
|
||||
const postgresIDType = args.idType || 'serial'
|
||||
const payloadIDType = postgresIDType === 'serial' ? 'number' : 'text'
|
||||
|
||||
function adapter({ payload }: { payload: Payload }) {
|
||||
const migrationDir = findMigrationDir(args.migrationDir)
|
||||
let resolveInitializing
|
||||
let rejectInitializing
|
||||
let adapterSchema: VercelPostgresAdapter['pgSchema']
|
||||
|
||||
const initializing = new Promise<void>((res, rej) => {
|
||||
resolveInitializing = res
|
||||
rejectInitializing = rej
|
||||
})
|
||||
|
||||
if (args.schemaName) {
|
||||
adapterSchema = pgSchema(args.schemaName)
|
||||
} else {
|
||||
adapterSchema = { enum: pgEnum, table: pgTable }
|
||||
}
|
||||
|
||||
return createDatabaseAdapter<VercelPostgresAdapter>({
|
||||
name: 'postgres',
|
||||
defaultDrizzleSnapshot,
|
||||
drizzle: undefined,
|
||||
enums: {},
|
||||
features: {
|
||||
json: true,
|
||||
},
|
||||
fieldConstraints: {},
|
||||
getMigrationTemplate,
|
||||
idType: postgresIDType,
|
||||
initializing,
|
||||
localesSuffix: args.localesSuffix || '_locales',
|
||||
logger: args.logger,
|
||||
operators: operatorMap,
|
||||
pgSchema: adapterSchema,
|
||||
pool: undefined,
|
||||
poolOptions: args.pool,
|
||||
prodMigrations: args.prodMigrations,
|
||||
push: args.push,
|
||||
relations: {},
|
||||
relationshipsSuffix: args.relationshipsSuffix || '_rels',
|
||||
schema: {},
|
||||
schemaName: args.schemaName,
|
||||
sessions: {},
|
||||
tableNameMap: new Map<string, string>(),
|
||||
tables: {},
|
||||
transactionOptions: args.transactionOptions || undefined,
|
||||
versionsSuffix: args.versionsSuffix || '_v',
|
||||
|
||||
// DatabaseAdapter
|
||||
beginTransaction: args.transactionOptions === false ? undefined : beginTransaction,
|
||||
commitTransaction,
|
||||
connect,
|
||||
convertPathToJSONTraversal,
|
||||
count,
|
||||
countDistinct,
|
||||
create,
|
||||
createGlobal,
|
||||
createGlobalVersion,
|
||||
createJSONQuery,
|
||||
createMigration,
|
||||
createVersion,
|
||||
defaultIDType: payloadIDType,
|
||||
deleteMany,
|
||||
deleteOne,
|
||||
deleteVersions,
|
||||
deleteWhere,
|
||||
destroy,
|
||||
dropDatabase,
|
||||
execute,
|
||||
find,
|
||||
findGlobal,
|
||||
findGlobalVersions,
|
||||
findOne,
|
||||
findVersions,
|
||||
init,
|
||||
insert,
|
||||
migrate,
|
||||
migrateDown,
|
||||
migrateFresh,
|
||||
migrateRefresh,
|
||||
migrateReset,
|
||||
migrateStatus,
|
||||
migrationDir,
|
||||
packageName: '@payloadcms/db-vercel-postgres',
|
||||
payload,
|
||||
queryDrafts,
|
||||
rejectInitializing,
|
||||
requireDrizzleKit,
|
||||
resolveInitializing,
|
||||
rollbackTransaction,
|
||||
updateGlobal,
|
||||
updateGlobalVersion,
|
||||
updateOne,
|
||||
updateVersion,
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
defaultIDType: payloadIDType,
|
||||
init: adapter,
|
||||
}
|
||||
}
|
||||
|
||||
export type { MigrateDownArgs, MigrateUpArgs } from '@payloadcms/drizzle/postgres'
|
||||
export { sql } from 'drizzle-orm'
|
||||
@@ -0,0 +1,10 @@
|
||||
const imports = `import { migratePostgresV2toV3 } from '@payloadcms/db-postgres/migration-utils'`
|
||||
const upSQL = ` await migratePostgresV2toV3({
|
||||
// enables logging of changes that will be made to the database
|
||||
debug: false,
|
||||
payload,
|
||||
req,
|
||||
})
|
||||
`
|
||||
|
||||
export { imports, upSQL }
|
||||
@@ -0,0 +1,237 @@
|
||||
import type { TransactionPg } from '@payloadcms/drizzle/types'
|
||||
import type { Field, Payload, PayloadRequest } from 'payload'
|
||||
|
||||
import { upsertRow } from '@payloadcms/drizzle'
|
||||
|
||||
import type { VercelPostgresAdapter } from '../../../types.js'
|
||||
import type { DocsToResave } from '../types.js'
|
||||
|
||||
import { traverseFields } from './traverseFields.js'
|
||||
|
||||
type Args = {
|
||||
adapter: VercelPostgresAdapter
|
||||
collectionSlug?: string
|
||||
db: TransactionPg
|
||||
debug: boolean
|
||||
docsToResave: DocsToResave
|
||||
fields: Field[]
|
||||
globalSlug?: string
|
||||
isVersions: boolean
|
||||
payload: Payload
|
||||
req: PayloadRequest
|
||||
tableName: string
|
||||
}
|
||||
|
||||
export const fetchAndResave = async ({
|
||||
adapter,
|
||||
collectionSlug,
|
||||
db,
|
||||
debug,
|
||||
docsToResave,
|
||||
fields,
|
||||
globalSlug,
|
||||
isVersions,
|
||||
payload,
|
||||
req,
|
||||
tableName,
|
||||
}: Args) => {
|
||||
for (const [id, rows] of Object.entries(docsToResave)) {
|
||||
if (collectionSlug) {
|
||||
const collectionConfig = payload.collections[collectionSlug].config
|
||||
|
||||
if (collectionConfig) {
|
||||
if (isVersions) {
|
||||
const doc = await payload.findVersionByID({
|
||||
id,
|
||||
collection: collectionSlug,
|
||||
depth: 0,
|
||||
fallbackLocale: null,
|
||||
locale: 'all',
|
||||
req,
|
||||
showHiddenFields: true,
|
||||
})
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info(
|
||||
`The collection "${collectionConfig.slug}" version with ID ${id} will be migrated`,
|
||||
)
|
||||
}
|
||||
|
||||
traverseFields({
|
||||
doc,
|
||||
fields,
|
||||
path: '',
|
||||
rows,
|
||||
})
|
||||
|
||||
try {
|
||||
await upsertRow({
|
||||
id: doc.id,
|
||||
adapter,
|
||||
data: doc,
|
||||
db,
|
||||
fields,
|
||||
ignoreResult: true,
|
||||
operation: 'update',
|
||||
req,
|
||||
tableName,
|
||||
})
|
||||
} catch (err) {
|
||||
payload.logger.error(
|
||||
`"${collectionConfig.slug}" version with ID ${doc.id} FAILED TO MIGRATE`,
|
||||
)
|
||||
|
||||
throw err
|
||||
}
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info(
|
||||
`"${collectionConfig.slug}" version with ID ${doc.id} migrated successfully!`,
|
||||
)
|
||||
}
|
||||
} else {
|
||||
const doc = await payload.findByID({
|
||||
id,
|
||||
collection: collectionSlug,
|
||||
depth: 0,
|
||||
fallbackLocale: null,
|
||||
locale: 'all',
|
||||
req,
|
||||
showHiddenFields: true,
|
||||
})
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info(
|
||||
`The collection "${collectionConfig.slug}" with ID ${doc.id} will be migrated`,
|
||||
)
|
||||
}
|
||||
|
||||
traverseFields({
|
||||
doc,
|
||||
fields,
|
||||
path: '',
|
||||
rows,
|
||||
})
|
||||
|
||||
try {
|
||||
await upsertRow({
|
||||
id: doc.id,
|
||||
adapter,
|
||||
data: doc,
|
||||
db,
|
||||
fields,
|
||||
ignoreResult: true,
|
||||
operation: 'update',
|
||||
req,
|
||||
tableName,
|
||||
})
|
||||
} catch (err) {
|
||||
payload.logger.error(
|
||||
`The collection "${collectionConfig.slug}" with ID ${doc.id} has FAILED TO MIGRATE`,
|
||||
)
|
||||
|
||||
throw err
|
||||
}
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info(
|
||||
`The collection "${collectionConfig.slug}" with ID ${doc.id} has migrated successfully!`,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (globalSlug) {
|
||||
const globalConfig = payload.config.globals?.find((global) => global.slug === globalSlug)
|
||||
|
||||
if (globalConfig) {
|
||||
if (isVersions) {
|
||||
const { docs } = await payload.findGlobalVersions({
|
||||
slug: globalSlug,
|
||||
depth: 0,
|
||||
fallbackLocale: null,
|
||||
limit: 0,
|
||||
locale: 'all',
|
||||
req,
|
||||
showHiddenFields: true,
|
||||
})
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info(`${docs.length} global "${globalSlug}" versions will be migrated`)
|
||||
}
|
||||
|
||||
for (const doc of docs) {
|
||||
traverseFields({
|
||||
doc,
|
||||
fields,
|
||||
path: '',
|
||||
rows,
|
||||
})
|
||||
|
||||
try {
|
||||
await upsertRow({
|
||||
id: doc.id,
|
||||
adapter,
|
||||
data: doc,
|
||||
db,
|
||||
fields,
|
||||
ignoreResult: true,
|
||||
operation: 'update',
|
||||
req,
|
||||
tableName,
|
||||
})
|
||||
} catch (err) {
|
||||
payload.logger.error(`"${globalSlug}" version with ID ${doc.id} FAILED TO MIGRATE`)
|
||||
|
||||
throw err
|
||||
}
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info(
|
||||
`"${globalSlug}" version with ID ${doc.id} migrated successfully!`,
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const doc = await payload.findGlobal({
|
||||
slug: globalSlug,
|
||||
depth: 0,
|
||||
fallbackLocale: null,
|
||||
locale: 'all',
|
||||
req,
|
||||
showHiddenFields: true,
|
||||
})
|
||||
|
||||
traverseFields({
|
||||
doc,
|
||||
fields,
|
||||
path: '',
|
||||
rows,
|
||||
})
|
||||
|
||||
try {
|
||||
await upsertRow({
|
||||
adapter,
|
||||
data: doc,
|
||||
db,
|
||||
fields,
|
||||
ignoreResult: true,
|
||||
operation: 'update',
|
||||
req,
|
||||
tableName,
|
||||
})
|
||||
} catch (err) {
|
||||
payload.logger.error(`The global "${globalSlug}" has FAILED TO MIGRATE`)
|
||||
|
||||
throw err
|
||||
}
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info(`The global "${globalSlug}" has migrated successfully!`)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,215 @@
|
||||
import type { Field } from 'payload'
|
||||
|
||||
import { tabHasName } from 'payload/shared'
|
||||
|
||||
type Args = {
|
||||
doc: Record<string, unknown>
|
||||
fields: Field[]
|
||||
locale?: string
|
||||
path: string
|
||||
rows: Record<string, unknown>[]
|
||||
}
|
||||
|
||||
export const traverseFields = ({ doc, fields, locale, path, rows }: Args) => {
|
||||
fields.forEach((field) => {
|
||||
switch (field.type) {
|
||||
case 'group': {
|
||||
const newPath = `${path ? `${path}.` : ''}${field.name}`
|
||||
const newDoc = doc?.[field.name]
|
||||
|
||||
if (typeof newDoc === 'object' && newDoc !== null) {
|
||||
if (field.localized) {
|
||||
Object.entries(newDoc).forEach(([locale, localeDoc]) => {
|
||||
return traverseFields({
|
||||
doc: localeDoc,
|
||||
fields: field.fields,
|
||||
locale,
|
||||
path: newPath,
|
||||
rows,
|
||||
})
|
||||
})
|
||||
} else {
|
||||
return traverseFields({
|
||||
doc: newDoc as Record<string, unknown>,
|
||||
fields: field.fields,
|
||||
path: newPath,
|
||||
rows,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
case 'row':
|
||||
case 'collapsible': {
|
||||
return traverseFields({
|
||||
doc,
|
||||
fields: field.fields,
|
||||
path,
|
||||
rows,
|
||||
})
|
||||
}
|
||||
|
||||
case 'array': {
|
||||
const rowData = doc?.[field.name]
|
||||
|
||||
if (field.localized && typeof rowData === 'object' && rowData !== null) {
|
||||
Object.entries(rowData).forEach(([locale, localeRows]) => {
|
||||
if (Array.isArray(localeRows)) {
|
||||
localeRows.forEach((row, i) => {
|
||||
return traverseFields({
|
||||
doc: row as Record<string, unknown>,
|
||||
fields: field.fields,
|
||||
locale,
|
||||
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
|
||||
rows,
|
||||
})
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (Array.isArray(rowData)) {
|
||||
rowData.forEach((row, i) => {
|
||||
return traverseFields({
|
||||
doc: row as Record<string, unknown>,
|
||||
fields: field.fields,
|
||||
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
|
||||
rows,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
case 'blocks': {
|
||||
const rowData = doc?.[field.name]
|
||||
|
||||
if (field.localized && typeof rowData === 'object' && rowData !== null) {
|
||||
Object.entries(rowData).forEach(([locale, localeRows]) => {
|
||||
if (Array.isArray(localeRows)) {
|
||||
localeRows.forEach((row, i) => {
|
||||
const matchedBlock = field.blocks.find((block) => block.slug === row.blockType)
|
||||
|
||||
if (matchedBlock) {
|
||||
return traverseFields({
|
||||
doc: row as Record<string, unknown>,
|
||||
fields: matchedBlock.fields,
|
||||
locale,
|
||||
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
|
||||
rows,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (Array.isArray(rowData)) {
|
||||
rowData.forEach((row, i) => {
|
||||
const matchedBlock = field.blocks.find((block) => block.slug === row.blockType)
|
||||
|
||||
if (matchedBlock) {
|
||||
return traverseFields({
|
||||
doc: row as Record<string, unknown>,
|
||||
fields: matchedBlock.fields,
|
||||
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
|
||||
rows,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
case 'tabs': {
|
||||
return field.tabs.forEach((tab) => {
|
||||
if (tabHasName(tab)) {
|
||||
const newDoc = doc?.[tab.name]
|
||||
const newPath = `${path ? `${path}.` : ''}${tab.name}`
|
||||
|
||||
if (typeof newDoc === 'object' && newDoc !== null) {
|
||||
if (tab.localized) {
|
||||
Object.entries(newDoc).forEach(([locale, localeDoc]) => {
|
||||
return traverseFields({
|
||||
doc: localeDoc,
|
||||
fields: tab.fields,
|
||||
locale,
|
||||
path: newPath,
|
||||
rows,
|
||||
})
|
||||
})
|
||||
} else {
|
||||
return traverseFields({
|
||||
doc: newDoc as Record<string, unknown>,
|
||||
fields: tab.fields,
|
||||
path: newPath,
|
||||
rows,
|
||||
})
|
||||
}
|
||||
}
|
||||
} else {
|
||||
traverseFields({
|
||||
doc,
|
||||
fields: tab.fields,
|
||||
path,
|
||||
rows,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
case 'relationship':
|
||||
case 'upload': {
|
||||
if (typeof field.relationTo === 'string') {
|
||||
if (field.type === 'upload' || !field.hasMany) {
|
||||
const relationshipPath = `${path ? `${path}.` : ''}${field.name}`
|
||||
|
||||
if (field.localized) {
|
||||
const matchedRelationshipsWithLocales = rows.filter(
|
||||
(row) => row.path === relationshipPath,
|
||||
)
|
||||
|
||||
if (matchedRelationshipsWithLocales.length && !doc[field.name]) {
|
||||
doc[field.name] = {}
|
||||
}
|
||||
|
||||
const newDoc = doc[field.name] as Record<string, unknown>
|
||||
|
||||
matchedRelationshipsWithLocales.forEach((localeRow) => {
|
||||
if (typeof localeRow.locale === 'string') {
|
||||
const [, id] = Object.entries(localeRow).find(
|
||||
([key, val]) =>
|
||||
val !== null && !['id', 'locale', 'order', 'parent_id', 'path'].includes(key),
|
||||
)
|
||||
|
||||
newDoc[localeRow.locale] = id
|
||||
}
|
||||
})
|
||||
} else {
|
||||
const matchedRelationship = rows.find((row) => {
|
||||
const matchesPath = row.path === relationshipPath
|
||||
|
||||
if (locale) return matchesPath && locale === row.locale
|
||||
|
||||
return row.path === relationshipPath
|
||||
})
|
||||
|
||||
if (matchedRelationship) {
|
||||
const [, id] = Object.entries(matchedRelationship).find(
|
||||
([key, val]) =>
|
||||
val !== null && !['id', 'locale', 'order', 'parent_id', 'path'].includes(key),
|
||||
)
|
||||
|
||||
doc[field.name] = id
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
export type Groups =
|
||||
| 'addColumn'
|
||||
| 'addConstraint'
|
||||
| 'dropColumn'
|
||||
| 'dropConstraint'
|
||||
| 'dropTable'
|
||||
| 'notNull'
|
||||
|
||||
/**
|
||||
* Convert an "ADD COLUMN" statement to an "ALTER COLUMN" statement
|
||||
* example: ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer NOT NULL;
|
||||
* to: ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
|
||||
* @param sql
|
||||
*/
|
||||
function convertAddColumnToAlterColumn(sql) {
|
||||
// Regular expression to match the ADD COLUMN statement with its constraints
|
||||
const regex = /ALTER TABLE ("[^"]+") ADD COLUMN ("[^"]+") [\w\s]+ NOT NULL;/
|
||||
|
||||
// Replace the matched part with "ALTER COLUMN ... SET NOT NULL;"
|
||||
return sql.replace(regex, 'ALTER TABLE $1 ALTER COLUMN $2 SET NOT NULL;')
|
||||
}
|
||||
|
||||
export const groupUpSQLStatements = (list: string[]): Record<Groups, string[]> => {
|
||||
const groups = {
|
||||
addColumn: 'ADD COLUMN',
|
||||
// example: ALTER TABLE "posts" ADD COLUMN "category_id" integer
|
||||
|
||||
addConstraint: 'ADD CONSTRAINT',
|
||||
//example:
|
||||
// DO $$ BEGIN
|
||||
// ALTER TABLE "pages_blocks_my_block" ADD CONSTRAINT "pages_blocks_my_block_person_id_users_id_fk" FOREIGN KEY ("person_id") REFERENCES "users"("id") ON DELETE cascade ON UPDATE no action;
|
||||
// EXCEPTION
|
||||
// WHEN duplicate_object THEN null;
|
||||
// END $$;
|
||||
|
||||
dropColumn: 'DROP COLUMN',
|
||||
// example: ALTER TABLE "_posts_v_rels" DROP COLUMN IF EXISTS "posts_id";
|
||||
|
||||
dropConstraint: 'DROP CONSTRAINT',
|
||||
// example: ALTER TABLE "_posts_v_rels" DROP CONSTRAINT "_posts_v_rels_posts_fk";
|
||||
|
||||
dropTable: 'DROP TABLE',
|
||||
// example: DROP TABLE "pages_rels";
|
||||
|
||||
notNull: 'NOT NULL',
|
||||
// example: ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
|
||||
}
|
||||
|
||||
const result = Object.keys(groups).reduce((result, group: Groups) => {
|
||||
result[group] = []
|
||||
return result
|
||||
}, {}) as Record<Groups, string[]>
|
||||
|
||||
for (const line of list) {
|
||||
Object.entries(groups).some(([key, value]) => {
|
||||
if (line.endsWith('NOT NULL;')) {
|
||||
// split up the ADD COLUMN and ALTER COLUMN NOT NULL statements
|
||||
// example: ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer NOT NULL;
|
||||
// becomes two separate statements:
|
||||
// 1. ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer;
|
||||
// 2. ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
|
||||
result.addColumn.push(line.replace(' NOT NULL;', ';'))
|
||||
result.notNull.push(convertAddColumnToAlterColumn(line))
|
||||
return true
|
||||
}
|
||||
if (line.includes(value)) {
|
||||
result[key].push(line)
|
||||
return true
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
@@ -0,0 +1,278 @@
|
||||
import type { TransactionPg } from '@payloadcms/drizzle/types'
|
||||
import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'
|
||||
import type { Payload, PayloadRequest } from 'payload'
|
||||
|
||||
import { sql } from 'drizzle-orm'
|
||||
import fs from 'fs'
|
||||
import { createRequire } from 'module'
|
||||
import { buildVersionCollectionFields, buildVersionGlobalFields } from 'payload'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { VercelPostgresAdapter } from '../../types.js'
|
||||
import type { PathsToQuery } from './types.js'
|
||||
|
||||
import { groupUpSQLStatements } from './groupUpSQLStatements.js'
|
||||
import { migrateRelationships } from './migrateRelationships.js'
|
||||
import { traverseFields } from './traverseFields.js'
|
||||
|
||||
const require = createRequire(import.meta.url)
|
||||
|
||||
type Args = {
|
||||
debug?: boolean
|
||||
payload: Payload
|
||||
req?: Partial<PayloadRequest>
|
||||
}
|
||||
|
||||
/**
|
||||
* Moves upload and relationship columns from the join table and into the tables while moving data
|
||||
* This is done in the following order:
|
||||
* ADD COLUMNs
|
||||
* -- manipulate data to move relationships to new columns
|
||||
* ADD CONSTRAINTs
|
||||
* NOT NULLs
|
||||
* DROP TABLEs
|
||||
* DROP CONSTRAINTs
|
||||
* DROP COLUMNs
|
||||
* @param debug
|
||||
* @param payload
|
||||
* @param req
|
||||
*/
|
||||
export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
|
||||
const adapter = payload.db as unknown as VercelPostgresAdapter
|
||||
const db = adapter.sessions[await req.transactionID].db as TransactionPg
|
||||
const dir = payload.db.migrationDir
|
||||
|
||||
// get the drizzle migrateUpSQL from drizzle using the last schema
|
||||
const { generateDrizzleJson, generateMigration } = require('drizzle-kit/api')
|
||||
const drizzleJsonAfter = generateDrizzleJson(adapter.schema)
|
||||
|
||||
// Get the previous migration snapshot
|
||||
const previousSnapshot = fs
|
||||
.readdirSync(dir)
|
||||
.filter((file) => file.endsWith('.json') && !file.endsWith('relationships_v2_v3.json'))
|
||||
.sort()
|
||||
.reverse()?.[0]
|
||||
|
||||
if (!previousSnapshot) {
|
||||
throw new Error(
|
||||
`No previous migration schema file found! A prior migration from v2 is required to migrate to v3.`,
|
||||
)
|
||||
}
|
||||
|
||||
const drizzleJsonBefore = JSON.parse(
|
||||
fs.readFileSync(`${dir}/${previousSnapshot}`, 'utf8'),
|
||||
) as DrizzleSnapshotJSON
|
||||
|
||||
const generatedSQL = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
|
||||
|
||||
if (!generatedSQL.length) {
|
||||
payload.logger.info(`No schema changes needed.`)
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
const sqlUpStatements = groupUpSQLStatements(generatedSQL)
|
||||
|
||||
const addColumnsStatement = sqlUpStatements.addColumn.join('\n')
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info('CREATING NEW RELATIONSHIP COLUMNS')
|
||||
payload.logger.info(addColumnsStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(addColumnsStatement))
|
||||
|
||||
for (const collection of payload.config.collections) {
|
||||
const tableName = adapter.tableNameMap.get(toSnakeCase(collection.slug))
|
||||
const pathsToQuery: PathsToQuery = new Set()
|
||||
|
||||
traverseFields({
|
||||
adapter,
|
||||
collectionSlug: collection.slug,
|
||||
columnPrefix: '',
|
||||
db,
|
||||
disableNotNull: false,
|
||||
fields: collection.fields,
|
||||
isVersions: false,
|
||||
newTableName: tableName,
|
||||
parentTableName: tableName,
|
||||
path: '',
|
||||
pathsToQuery,
|
||||
payload,
|
||||
rootTableName: tableName,
|
||||
})
|
||||
|
||||
await migrateRelationships({
|
||||
adapter,
|
||||
collectionSlug: collection.slug,
|
||||
db,
|
||||
debug,
|
||||
fields: collection.fields,
|
||||
isVersions: false,
|
||||
pathsToQuery,
|
||||
payload,
|
||||
req,
|
||||
tableName,
|
||||
})
|
||||
|
||||
if (collection.versions) {
|
||||
const versionsTableName = adapter.tableNameMap.get(
|
||||
`_${toSnakeCase(collection.slug)}${adapter.versionsSuffix}`,
|
||||
)
|
||||
const versionFields = buildVersionCollectionFields(collection)
|
||||
const versionPathsToQuery: PathsToQuery = new Set()
|
||||
|
||||
traverseFields({
|
||||
adapter,
|
||||
collectionSlug: collection.slug,
|
||||
columnPrefix: '',
|
||||
db,
|
||||
disableNotNull: true,
|
||||
fields: versionFields,
|
||||
isVersions: true,
|
||||
newTableName: versionsTableName,
|
||||
parentTableName: versionsTableName,
|
||||
path: '',
|
||||
pathsToQuery: versionPathsToQuery,
|
||||
payload,
|
||||
rootTableName: versionsTableName,
|
||||
})
|
||||
|
||||
await migrateRelationships({
|
||||
adapter,
|
||||
collectionSlug: collection.slug,
|
||||
db,
|
||||
debug,
|
||||
fields: versionFields,
|
||||
isVersions: true,
|
||||
pathsToQuery: versionPathsToQuery,
|
||||
payload,
|
||||
req,
|
||||
tableName: versionsTableName,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for (const global of payload.config.globals) {
|
||||
const tableName = adapter.tableNameMap.get(toSnakeCase(global.slug))
|
||||
|
||||
const pathsToQuery: PathsToQuery = new Set()
|
||||
|
||||
traverseFields({
|
||||
adapter,
|
||||
columnPrefix: '',
|
||||
db,
|
||||
disableNotNull: false,
|
||||
fields: global.fields,
|
||||
globalSlug: global.slug,
|
||||
isVersions: false,
|
||||
newTableName: tableName,
|
||||
parentTableName: tableName,
|
||||
path: '',
|
||||
pathsToQuery,
|
||||
payload,
|
||||
rootTableName: tableName,
|
||||
})
|
||||
|
||||
await migrateRelationships({
|
||||
adapter,
|
||||
db,
|
||||
debug,
|
||||
fields: global.fields,
|
||||
globalSlug: global.slug,
|
||||
isVersions: false,
|
||||
pathsToQuery,
|
||||
payload,
|
||||
req,
|
||||
tableName,
|
||||
})
|
||||
|
||||
if (global.versions) {
|
||||
const versionsTableName = adapter.tableNameMap.get(
|
||||
`_${toSnakeCase(global.slug)}${adapter.versionsSuffix}`,
|
||||
)
|
||||
|
||||
const versionFields = buildVersionGlobalFields(global)
|
||||
|
||||
const versionPathsToQuery: PathsToQuery = new Set()
|
||||
|
||||
traverseFields({
|
||||
adapter,
|
||||
columnPrefix: '',
|
||||
db,
|
||||
disableNotNull: true,
|
||||
fields: versionFields,
|
||||
globalSlug: global.slug,
|
||||
isVersions: true,
|
||||
newTableName: versionsTableName,
|
||||
parentTableName: versionsTableName,
|
||||
path: '',
|
||||
pathsToQuery: versionPathsToQuery,
|
||||
payload,
|
||||
rootTableName: versionsTableName,
|
||||
})
|
||||
|
||||
await migrateRelationships({
|
||||
adapter,
|
||||
db,
|
||||
debug,
|
||||
fields: versionFields,
|
||||
globalSlug: global.slug,
|
||||
isVersions: true,
|
||||
pathsToQuery: versionPathsToQuery,
|
||||
payload,
|
||||
req,
|
||||
tableName: versionsTableName,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// ADD CONSTRAINT
|
||||
const addConstraintsStatement = sqlUpStatements.addConstraint.join('\n')
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info('ADDING CONSTRAINTS')
|
||||
payload.logger.info(addConstraintsStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(addConstraintsStatement))
|
||||
|
||||
// NOT NULL
|
||||
const notNullStatements = sqlUpStatements.notNull.join('\n')
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info('NOT NULL CONSTRAINTS')
|
||||
payload.logger.info(notNullStatements)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(notNullStatements))
|
||||
|
||||
// DROP TABLE
|
||||
const dropTablesStatement = sqlUpStatements.dropTable.join('\n')
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info('DROPPING TABLES')
|
||||
payload.logger.info(dropTablesStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(dropTablesStatement))
|
||||
|
||||
// DROP CONSTRAINT
|
||||
const dropConstraintsStatement = sqlUpStatements.dropConstraint.join('\n')
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info('DROPPING CONSTRAINTS')
|
||||
payload.logger.info(dropConstraintsStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(dropConstraintsStatement))
|
||||
|
||||
// DROP COLUMN
|
||||
const dropColumnsStatement = sqlUpStatements.dropColumn.join('\n')
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info('DROPPING COLUMNS')
|
||||
payload.logger.info(dropColumnsStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(dropColumnsStatement))
|
||||
}
|
||||
@@ -0,0 +1,103 @@
|
||||
import type { TransactionPg } from '@payloadcms/drizzle/types'
|
||||
import type { Field, Payload, PayloadRequest } from 'payload'
|
||||
|
||||
import { sql } from 'drizzle-orm'
|
||||
|
||||
import type { VercelPostgresAdapter } from '../../types.js'
|
||||
import type { DocsToResave, PathsToQuery } from './types.js'
|
||||
|
||||
import { fetchAndResave } from './fetchAndResave/index.js'
|
||||
|
||||
type Args = {
|
||||
adapter: VercelPostgresAdapter
|
||||
collectionSlug?: string
|
||||
db: TransactionPg
|
||||
debug: boolean
|
||||
fields: Field[]
|
||||
globalSlug?: string
|
||||
isVersions: boolean
|
||||
pathsToQuery: PathsToQuery
|
||||
payload: Payload
|
||||
req?: Partial<PayloadRequest>
|
||||
tableName: string
|
||||
}
|
||||
|
||||
export const migrateRelationships = async ({
|
||||
adapter,
|
||||
collectionSlug,
|
||||
db,
|
||||
debug,
|
||||
fields,
|
||||
globalSlug,
|
||||
isVersions,
|
||||
pathsToQuery,
|
||||
payload,
|
||||
req,
|
||||
tableName,
|
||||
}: Args) => {
|
||||
if (pathsToQuery.size === 0) return
|
||||
|
||||
let offset = 0
|
||||
|
||||
let paginationResult
|
||||
|
||||
const where = Array.from(pathsToQuery).reduce((statement, path, i) => {
|
||||
return (statement += `
|
||||
"${tableName}${adapter.relationshipsSuffix}"."path" LIKE '${path}'${pathsToQuery.size !== i + 1 ? ' OR' : ''}
|
||||
`)
|
||||
}, '')
|
||||
|
||||
while (typeof paginationResult === 'undefined' || paginationResult.rows.length > 0) {
|
||||
const paginationStatement = `SELECT DISTINCT parent_id FROM ${tableName}${adapter.relationshipsSuffix} WHERE
|
||||
${where} ORDER BY parent_id LIMIT 500 OFFSET ${offset * 500};
|
||||
`
|
||||
|
||||
paginationResult = await adapter.drizzle.execute(sql.raw(`${paginationStatement}`))
|
||||
|
||||
if (paginationResult.rows.length === 0) return
|
||||
|
||||
offset += 1
|
||||
|
||||
const statement = `SELECT * FROM ${tableName}${adapter.relationshipsSuffix} WHERE
|
||||
(${where}) AND parent_id IN (${paginationResult.rows.map((row) => row.parent_id).join(', ')});
|
||||
`
|
||||
if (debug) {
|
||||
payload.logger.info('FINDING ROWS TO MIGRATE')
|
||||
payload.logger.info(statement)
|
||||
}
|
||||
|
||||
const result = await adapter.drizzle.execute(sql.raw(`${statement}`))
|
||||
|
||||
const docsToResave: DocsToResave = {}
|
||||
|
||||
result.rows.forEach((row) => {
|
||||
const parentID = row.parent_id
|
||||
|
||||
if (typeof parentID === 'string' || typeof parentID === 'number') {
|
||||
if (!docsToResave[parentID]) docsToResave[parentID] = []
|
||||
docsToResave[parentID].push(row)
|
||||
}
|
||||
})
|
||||
|
||||
await fetchAndResave({
|
||||
adapter,
|
||||
collectionSlug,
|
||||
db,
|
||||
debug,
|
||||
docsToResave,
|
||||
fields,
|
||||
globalSlug,
|
||||
isVersions,
|
||||
payload,
|
||||
req: req as unknown as PayloadRequest,
|
||||
tableName,
|
||||
})
|
||||
}
|
||||
|
||||
const deleteStatement = `DELETE FROM ${tableName}${adapter.relationshipsSuffix} WHERE ${where}`
|
||||
if (debug) {
|
||||
payload.logger.info('DELETING ROWS')
|
||||
payload.logger.info(deleteStatement)
|
||||
}
|
||||
await db.execute(sql.raw(`${deleteStatement}`))
|
||||
}
|
||||
@@ -0,0 +1,117 @@
|
||||
import type { TransactionPg } from '@payloadcms/drizzle/types'
|
||||
import type { Field, Payload } from 'payload'
|
||||
|
||||
import { tabHasName } from 'payload/shared'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { VercelPostgresAdapter } from '../../types.js'
|
||||
import type { PathsToQuery } from './types.js'
|
||||
|
||||
type Args = {
|
||||
adapter: VercelPostgresAdapter
|
||||
collectionSlug?: string
|
||||
columnPrefix: string
|
||||
db: TransactionPg
|
||||
disableNotNull: boolean
|
||||
fields: Field[]
|
||||
globalSlug?: string
|
||||
isVersions: boolean
|
||||
newTableName: string
|
||||
parentTableName: string
|
||||
path: string
|
||||
pathsToQuery: PathsToQuery
|
||||
payload: Payload
|
||||
rootTableName: string
|
||||
}
|
||||
|
||||
export const traverseFields = (args: Args) => {
|
||||
args.fields.forEach((field) => {
|
||||
switch (field.type) {
|
||||
case 'group': {
|
||||
let newTableName = `${args.newTableName}_${toSnakeCase(field.name)}`
|
||||
|
||||
if (field.localized && args.payload.config.localization) {
|
||||
newTableName += args.adapter.localesSuffix
|
||||
}
|
||||
|
||||
return traverseFields({
|
||||
...args,
|
||||
columnPrefix: `${args.columnPrefix}${toSnakeCase(field.name)}_`,
|
||||
fields: field.fields,
|
||||
newTableName,
|
||||
path: `${args.path ? `${args.path}.` : ''}${field.name}`,
|
||||
})
|
||||
}
|
||||
|
||||
case 'row':
|
||||
case 'collapsible': {
|
||||
return traverseFields({
|
||||
...args,
|
||||
fields: field.fields,
|
||||
})
|
||||
}
|
||||
|
||||
case 'array': {
|
||||
const newTableName = args.adapter.tableNameMap.get(
|
||||
`${args.newTableName}_${toSnakeCase(field.name)}`,
|
||||
)
|
||||
|
||||
return traverseFields({
|
||||
...args,
|
||||
columnPrefix: '',
|
||||
fields: field.fields,
|
||||
newTableName,
|
||||
parentTableName: newTableName,
|
||||
path: `${args.path ? `${args.path}.` : ''}${field.name}.%`,
|
||||
})
|
||||
}
|
||||
|
||||
case 'blocks': {
|
||||
return field.blocks.forEach((block) => {
|
||||
const newTableName = args.adapter.tableNameMap.get(
|
||||
`${args.rootTableName}_blocks_${toSnakeCase(block.slug)}`,
|
||||
)
|
||||
|
||||
traverseFields({
|
||||
...args,
|
||||
columnPrefix: '',
|
||||
fields: block.fields,
|
||||
newTableName,
|
||||
parentTableName: newTableName,
|
||||
path: `${args.path ? `${args.path}.` : ''}${field.name}.%`,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
case 'tabs': {
|
||||
return field.tabs.forEach((tab) => {
|
||||
if (tabHasName(tab)) {
|
||||
args.columnPrefix = `${args.columnPrefix}_${toSnakeCase(tab.name)}_`
|
||||
args.path = `${args.path ? `${args.path}.` : ''}${tab.name}`
|
||||
args.newTableName = `${args.newTableName}_${toSnakeCase(tab.name)}`
|
||||
|
||||
if (tab.localized && args.payload.config.localization) {
|
||||
args.newTableName += args.adapter.localesSuffix
|
||||
}
|
||||
}
|
||||
|
||||
traverseFields({
|
||||
...args,
|
||||
fields: tab.fields,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
case 'relationship':
|
||||
case 'upload': {
|
||||
if (typeof field.relationTo === 'string') {
|
||||
if (field.type === 'upload' || !field.hasMany) {
|
||||
args.pathsToQuery.add(`${args.path ? `${args.path}.` : ''}${field.name}`)
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
/**
|
||||
* Set of all paths which should be moved
|
||||
* This will be built up into one WHERE query
|
||||
*/
|
||||
export type PathsToQuery = Set<string>
|
||||
|
||||
export type DocsToResave = {
|
||||
[id: number | string]: Record<string, unknown>[]
|
||||
}
|
||||
78
packages/db-vercel-postgres/src/types.ts
Normal file
78
packages/db-vercel-postgres/src/types.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import type {
|
||||
BasePostgresAdapter,
|
||||
GenericEnum,
|
||||
MigrateDownArgs,
|
||||
MigrateUpArgs,
|
||||
PostgresDB,
|
||||
} from '@payloadcms/drizzle/postgres'
|
||||
import type { DrizzleAdapter } from '@payloadcms/drizzle/types'
|
||||
import type { VercelPool, VercelPostgresPoolConfig } from '@vercel/postgres'
|
||||
import type { DrizzleConfig } from 'drizzle-orm'
|
||||
import type { PgSchema, PgTableFn, PgTransactionConfig } from 'drizzle-orm/pg-core'
|
||||
|
||||
export type Args = {
|
||||
connectionString?: string
|
||||
idType?: 'serial' | 'uuid'
|
||||
localesSuffix?: string
|
||||
logger?: DrizzleConfig['logger']
|
||||
migrationDir?: string
|
||||
/**
|
||||
* Optional pool configuration for Vercel Postgres
|
||||
* If not provided, vercel/postgres will attempt to use the Vercel environment variables
|
||||
*/
|
||||
pool?: VercelPostgresPoolConfig
|
||||
prodMigrations?: {
|
||||
down: (args: MigrateDownArgs) => Promise<void>
|
||||
name: string
|
||||
up: (args: MigrateUpArgs) => Promise<void>
|
||||
}[]
|
||||
push?: boolean
|
||||
relationshipsSuffix?: string
|
||||
/**
|
||||
* The schema name to use for the database
|
||||
* @experimental This only works when there are not other tables or enums of the same name in the database under a different schema. Awaiting fix from Drizzle.
|
||||
*/
|
||||
schemaName?: string
|
||||
transactionOptions?: PgTransactionConfig | false
|
||||
versionsSuffix?: string
|
||||
}
|
||||
|
||||
export type VercelPostgresAdapter = {
|
||||
pool?: VercelPool
|
||||
poolOptions?: Args['pool']
|
||||
} & BasePostgresAdapter
|
||||
|
||||
declare module 'payload' {
|
||||
export interface DatabaseAdapter
|
||||
extends Omit<Args, 'idType' | 'logger' | 'migrationDir' | 'pool'>,
|
||||
DrizzleAdapter {
|
||||
beginTransaction: (options?: PgTransactionConfig) => Promise<null | number | string>
|
||||
drizzle: PostgresDB
|
||||
enums: Record<string, GenericEnum>
|
||||
/**
|
||||
* An object keyed on each table, with a key value pair where the constraint name is the key, followed by the dot-notation field name
|
||||
* Used for returning properly formed errors from unique fields
|
||||
*/
|
||||
fieldConstraints: Record<string, Record<string, string>>
|
||||
idType: Args['idType']
|
||||
initializing: Promise<void>
|
||||
localesSuffix?: string
|
||||
logger: DrizzleConfig['logger']
|
||||
pgSchema?: { table: PgTableFn } | PgSchema
|
||||
pool: VercelPool
|
||||
poolOptions: Args['pool']
|
||||
prodMigrations?: {
|
||||
down: (args: MigrateDownArgs) => Promise<void>
|
||||
name: string
|
||||
up: (args: MigrateUpArgs) => Promise<void>
|
||||
}[]
|
||||
push: boolean
|
||||
rejectInitializing: () => void
|
||||
relationshipsSuffix?: string
|
||||
resolveInitializing: () => void
|
||||
schema: Record<string, unknown>
|
||||
schemaName?: Args['schemaName']
|
||||
tableNameMap: Map<string, string>
|
||||
versionsSuffix?: string
|
||||
}
|
||||
}
|
||||
38
packages/db-vercel-postgres/tsconfig.json
Normal file
38
packages/db-vercel-postgres/tsconfig.json
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"composite": true,
|
||||
"noEmit": false,
|
||||
"emitDeclarationOnly": true,
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./src",
|
||||
"skipLibCheck": true,
|
||||
},
|
||||
"exclude": [
|
||||
"dist",
|
||||
"build",
|
||||
"tests",
|
||||
"test",
|
||||
"node_modules",
|
||||
"eslint.config.js",
|
||||
"src/**/*.spec.js",
|
||||
"src/**/*.spec.jsx",
|
||||
"src/**/*.spec.ts",
|
||||
"src/**/*.spec.tsx"
|
||||
],
|
||||
"include": [
|
||||
"src",
|
||||
"src/**/*.ts",
|
||||
],
|
||||
"references": [
|
||||
{
|
||||
"path": "../payload"
|
||||
},
|
||||
{
|
||||
"path": "../translations"
|
||||
},
|
||||
{
|
||||
"path": "../drizzle"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
.tmp
|
||||
**/.git
|
||||
**/.hg
|
||||
**/.pnp.*
|
||||
**/.svn
|
||||
**/.yarn/**
|
||||
**/build
|
||||
**/dist/**
|
||||
**/node_modules
|
||||
**/temp
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/drizzle",
|
||||
"version": "3.0.0-beta.88",
|
||||
"version": "3.0.0-beta.96",
|
||||
"description": "A library of shared functions used by different payload database adapters",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
@@ -39,6 +39,8 @@
|
||||
"build:swc": "swc ./src -d ./dist --config-file .swcrc --strip-leading-paths",
|
||||
"build:types": "tsc --emitDeclarationOnly --outDir dist",
|
||||
"clean": "rimraf {dist,*.tsbuildinfo}",
|
||||
"lint": "eslint .",
|
||||
"lint:fix": "eslint . --fix",
|
||||
"prepack": "pnpm clean && pnpm turbo build",
|
||||
"prepublishOnly": "pnpm clean && pnpm turbo build"
|
||||
},
|
||||
|
||||
@@ -12,11 +12,11 @@ type BuildFindQueryArgs = {
|
||||
tableName: string
|
||||
}
|
||||
|
||||
export type Result = DBQueryConfig<'many', true, any, any> & {
|
||||
with?: DBQueryConfig<'many', true, any, any> & {
|
||||
export type Result = {
|
||||
with?: {
|
||||
_locales?: DBQueryConfig<'many', true, any, any>
|
||||
}
|
||||
}
|
||||
} & DBQueryConfig<'many', true, any, any>
|
||||
} & DBQueryConfig<'many', true, any, any>
|
||||
|
||||
// Generate the Drizzle query for findMany based on
|
||||
// a collection field structure
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
/* eslint-disable no-param-reassign */
|
||||
import type { Field } from 'payload'
|
||||
|
||||
import { fieldAffectsData, tabHasName } from 'payload/shared'
|
||||
@@ -34,8 +33,9 @@ export const traverseFields = ({
|
||||
// handle simple relationship
|
||||
if (
|
||||
depth > 0 &&
|
||||
(field.type === 'upload' ||
|
||||
(field.type === 'relationship' && !field.hasMany && typeof field.relationTo === 'string'))
|
||||
(field.type === 'upload' || field.type === 'relationship') &&
|
||||
!field.hasMany &&
|
||||
typeof field.relationTo === 'string'
|
||||
) {
|
||||
if (field.localized) {
|
||||
_locales.with[`${path}${field.name}`] = true
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
/* eslint-disable no-restricted-syntax, no-await-in-loop */
|
||||
import type { PayloadRequest } from 'payload'
|
||||
|
||||
import {
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
/* eslint-disable no-restricted-syntax, no-await-in-loop */
|
||||
import type { PayloadRequest } from 'payload'
|
||||
|
||||
import {
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
/* eslint-disable no-restricted-syntax, no-await-in-loop */
|
||||
import type { PayloadRequest } from 'payload'
|
||||
|
||||
import {
|
||||
|
||||
@@ -30,7 +30,7 @@ export async function migrateStatus(this: DrizzleAdapter): Promise<void> {
|
||||
const existingMigration = existingMigrations.find((m) => m.name === migration.name)
|
||||
return {
|
||||
Name: migration.name,
|
||||
// eslint-disable-next-line perfectionist/sort-objects
|
||||
|
||||
Batch: existingMigration?.batch,
|
||||
Ran: existingMigration ? 'Yes' : 'No',
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'
|
||||
import type { CreateMigration } from 'payload'
|
||||
|
||||
import fs from 'fs'
|
||||
@@ -112,6 +111,7 @@ export const createMigration: CreateMigration = async function createMigration(
|
||||
getMigrationTemplate({
|
||||
downSQL: downSQL || ` // Migration code`,
|
||||
imports,
|
||||
packageName: payload.db.packageName,
|
||||
upSQL: upSQL || ` // Migration code`,
|
||||
}),
|
||||
)
|
||||
|
||||
@@ -9,8 +9,9 @@ export const indent = (text: string) =>
|
||||
export const getMigrationTemplate = ({
|
||||
downSQL,
|
||||
imports,
|
||||
packageName,
|
||||
upSQL,
|
||||
}: MigrationTemplateArgs): string => `import { MigrateUpArgs, MigrateDownArgs, sql } from '@payloadcms/db-postgres'
|
||||
}: MigrationTemplateArgs): string => `import { MigrateUpArgs, MigrateDownArgs, sql } from '${packageName}'
|
||||
${imports ? `${imports}\n` : ''}
|
||||
export async function up({ payload, req }: MigrateUpArgs): Promise<void> {
|
||||
${indent(upSQL)}
|
||||
|
||||
@@ -54,9 +54,17 @@ type Args = {
|
||||
tableName: string
|
||||
timestamps?: boolean
|
||||
versions: boolean
|
||||
/**
|
||||
* Tracks whether or not this table is built
|
||||
* from the result of a localized array or block field at some point
|
||||
*/
|
||||
withinLocalizedArrayOrBlock?: boolean
|
||||
}
|
||||
|
||||
type Result = {
|
||||
hasLocalizedManyNumberField: boolean
|
||||
hasLocalizedManyTextField: boolean
|
||||
hasLocalizedRelationshipField: boolean
|
||||
hasManyNumberField: 'index' | boolean
|
||||
hasManyTextField: 'index' | boolean
|
||||
relationsToBuild: RelationMap
|
||||
@@ -76,6 +84,7 @@ export const buildTable = ({
|
||||
tableName,
|
||||
timestamps,
|
||||
versions,
|
||||
withinLocalizedArrayOrBlock,
|
||||
}: Args): Result => {
|
||||
const isRoot = !incomingRootTableName
|
||||
const rootTableName = incomingRootTableName || tableName
|
||||
@@ -122,6 +131,7 @@ export const buildTable = ({
|
||||
rootTableIDColType: rootTableIDColType || idColType,
|
||||
rootTableName,
|
||||
versions,
|
||||
withinLocalizedArrayOrBlock,
|
||||
})
|
||||
|
||||
// split the relationsToBuild by localized and non-localized
|
||||
@@ -464,5 +474,12 @@ export const buildTable = ({
|
||||
return result
|
||||
})
|
||||
|
||||
return { hasManyNumberField, hasManyTextField, relationsToBuild }
|
||||
return {
|
||||
hasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField,
|
||||
hasManyNumberField,
|
||||
hasManyTextField,
|
||||
relationsToBuild,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,6 +58,11 @@ type Args = {
|
||||
rootTableIDColType: string
|
||||
rootTableName: string
|
||||
versions: boolean
|
||||
/**
|
||||
* Tracks whether or not this table is built
|
||||
* from the result of a localized array or block field at some point
|
||||
*/
|
||||
withinLocalizedArrayOrBlock?: boolean
|
||||
}
|
||||
|
||||
type Result = {
|
||||
@@ -89,6 +94,7 @@ export const traverseFields = ({
|
||||
rootTableIDColType,
|
||||
rootTableName,
|
||||
versions,
|
||||
withinLocalizedArrayOrBlock,
|
||||
}: Args): Result => {
|
||||
const throwValidationError = true
|
||||
let hasLocalizedField = false
|
||||
@@ -156,7 +162,11 @@ export const traverseFields = ({
|
||||
switch (field.type) {
|
||||
case 'text': {
|
||||
if (field.hasMany) {
|
||||
if (field.localized) {
|
||||
const isLocalized =
|
||||
Boolean(field.localized && adapter.payload.config.localization) ||
|
||||
withinLocalizedArrayOrBlock
|
||||
|
||||
if (isLocalized) {
|
||||
hasLocalizedManyTextField = true
|
||||
}
|
||||
|
||||
@@ -185,7 +195,11 @@ export const traverseFields = ({
|
||||
|
||||
case 'number': {
|
||||
if (field.hasMany) {
|
||||
if (field.localized) {
|
||||
const isLocalized =
|
||||
Boolean(field.localized && adapter.payload.config.localization) ||
|
||||
withinLocalizedArrayOrBlock
|
||||
|
||||
if (isLocalized) {
|
||||
hasLocalizedManyNumberField = true
|
||||
}
|
||||
|
||||
@@ -276,7 +290,11 @@ export const traverseFields = ({
|
||||
parentIdx: (cols) => index(`${selectTableName}_parent_idx`).on(cols.parent),
|
||||
}
|
||||
|
||||
if (field.localized) {
|
||||
const isLocalized =
|
||||
Boolean(field.localized && adapter.payload.config.localization) ||
|
||||
withinLocalizedArrayOrBlock
|
||||
|
||||
if (isLocalized) {
|
||||
baseColumns.locale = adapter.enums.enum__locales('locale').notNull()
|
||||
baseExtraConfig.localeIdx = (cols) =>
|
||||
index(`${selectTableName}_locale_idx`).on(cols.locale)
|
||||
@@ -354,13 +372,20 @@ export const traverseFields = ({
|
||||
_parentIDIdx: (cols) => index(`${arrayTableName}_parent_id_idx`).on(cols._parentID),
|
||||
}
|
||||
|
||||
if (field.localized && adapter.payload.config.localization) {
|
||||
const isLocalized =
|
||||
Boolean(field.localized && adapter.payload.config.localization) ||
|
||||
withinLocalizedArrayOrBlock
|
||||
|
||||
if (isLocalized) {
|
||||
baseColumns._locale = adapter.enums.enum__locales('_locale').notNull()
|
||||
baseExtraConfig._localeIdx = (cols) =>
|
||||
index(`${arrayTableName}_locale_idx`).on(cols._locale)
|
||||
}
|
||||
|
||||
const {
|
||||
hasLocalizedManyNumberField: subHasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField: subHasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField: subHasLocalizedRelationshipField,
|
||||
hasManyNumberField: subHasManyNumberField,
|
||||
hasManyTextField: subHasManyTextField,
|
||||
relationsToBuild: subRelationsToBuild,
|
||||
@@ -377,8 +402,21 @@ export const traverseFields = ({
|
||||
rootTableName,
|
||||
tableName: arrayTableName,
|
||||
versions,
|
||||
withinLocalizedArrayOrBlock: isLocalized,
|
||||
})
|
||||
|
||||
if (subHasLocalizedManyNumberField) {
|
||||
hasLocalizedManyNumberField = subHasLocalizedManyNumberField
|
||||
}
|
||||
|
||||
if (subHasLocalizedRelationshipField) {
|
||||
hasLocalizedRelationshipField = subHasLocalizedRelationshipField
|
||||
}
|
||||
|
||||
if (subHasLocalizedManyTextField) {
|
||||
hasLocalizedManyTextField = subHasLocalizedManyTextField
|
||||
}
|
||||
|
||||
if (subHasManyTextField) {
|
||||
if (!hasManyTextField || subHasManyTextField === 'index')
|
||||
hasManyTextField = subHasManyTextField
|
||||
@@ -466,13 +504,20 @@ export const traverseFields = ({
|
||||
_pathIdx: (cols) => index(`${blockTableName}_path_idx`).on(cols._path),
|
||||
}
|
||||
|
||||
if (field.localized && adapter.payload.config.localization) {
|
||||
const isLocalized =
|
||||
Boolean(field.localized && adapter.payload.config.localization) ||
|
||||
withinLocalizedArrayOrBlock
|
||||
|
||||
if (isLocalized) {
|
||||
baseColumns._locale = adapter.enums.enum__locales('_locale').notNull()
|
||||
baseExtraConfig._localeIdx = (cols) =>
|
||||
index(`${blockTableName}_locale_idx`).on(cols._locale)
|
||||
}
|
||||
|
||||
const {
|
||||
hasLocalizedManyNumberField: subHasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField: subHasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField: subHasLocalizedRelationshipField,
|
||||
hasManyNumberField: subHasManyNumberField,
|
||||
hasManyTextField: subHasManyTextField,
|
||||
relationsToBuild: subRelationsToBuild,
|
||||
@@ -489,8 +534,21 @@ export const traverseFields = ({
|
||||
rootTableName,
|
||||
tableName: blockTableName,
|
||||
versions,
|
||||
withinLocalizedArrayOrBlock: isLocalized,
|
||||
})
|
||||
|
||||
if (subHasLocalizedManyNumberField) {
|
||||
hasLocalizedManyNumberField = subHasLocalizedManyNumberField
|
||||
}
|
||||
|
||||
if (subHasLocalizedRelationshipField) {
|
||||
hasLocalizedRelationshipField = subHasLocalizedRelationshipField
|
||||
}
|
||||
|
||||
if (subHasLocalizedManyTextField) {
|
||||
hasLocalizedManyTextField = subHasLocalizedManyTextField
|
||||
}
|
||||
|
||||
if (subHasManyTextField) {
|
||||
if (!hasManyTextField || subHasManyTextField === 'index')
|
||||
hasManyTextField = subHasManyTextField
|
||||
@@ -589,6 +647,7 @@ export const traverseFields = ({
|
||||
rootTableIDColType,
|
||||
rootTableName,
|
||||
versions,
|
||||
withinLocalizedArrayOrBlock,
|
||||
})
|
||||
|
||||
if (groupHasLocalizedField) hasLocalizedField = true
|
||||
@@ -629,6 +688,7 @@ export const traverseFields = ({
|
||||
rootTableIDColType,
|
||||
rootTableName,
|
||||
versions,
|
||||
withinLocalizedArrayOrBlock,
|
||||
})
|
||||
|
||||
if (groupHasLocalizedField) hasLocalizedField = true
|
||||
@@ -670,6 +730,7 @@ export const traverseFields = ({
|
||||
rootTableIDColType,
|
||||
rootTableName,
|
||||
versions,
|
||||
withinLocalizedArrayOrBlock,
|
||||
})
|
||||
|
||||
if (tabHasLocalizedField) hasLocalizedField = true
|
||||
@@ -711,6 +772,7 @@ export const traverseFields = ({
|
||||
rootTableIDColType,
|
||||
rootTableName,
|
||||
versions,
|
||||
withinLocalizedArrayOrBlock,
|
||||
})
|
||||
|
||||
if (rowHasLocalizedField) hasLocalizedField = true
|
||||
@@ -726,7 +788,7 @@ export const traverseFields = ({
|
||||
case 'upload':
|
||||
if (Array.isArray(field.relationTo)) {
|
||||
field.relationTo.forEach((relation) => relationships.add(relation))
|
||||
} else if (field.type === 'relationship' && field.hasMany) {
|
||||
} else if (field.hasMany) {
|
||||
relationships.add(field.relationTo)
|
||||
} else {
|
||||
// simple relationships get a column on the targetTable with a foreign key to the relationTo table
|
||||
@@ -761,7 +823,11 @@ export const traverseFields = ({
|
||||
}
|
||||
break
|
||||
}
|
||||
if (adapter.payload.config.localization && field.localized) {
|
||||
|
||||
if (
|
||||
Boolean(field.localized && adapter.payload.config.localization) ||
|
||||
withinLocalizedArrayOrBlock
|
||||
) {
|
||||
hasLocalizedRelationshipField = true
|
||||
}
|
||||
|
||||
|
||||
@@ -111,8 +111,6 @@ export type BasePostgresAdapter = {
|
||||
logger: DrizzleConfig['logger']
|
||||
operators: Operators
|
||||
pgSchema?: Schema
|
||||
// pool: Pool
|
||||
// poolOptions: Args['pool']
|
||||
prodMigrations?: {
|
||||
down: (args: MigrateDownArgs) => Promise<void>
|
||||
name: string
|
||||
|
||||
@@ -28,11 +28,10 @@ export async function buildAndOrConditions({
|
||||
const completedConditions = []
|
||||
// Loop over all AND / OR operations and add them to the AND / OR query param
|
||||
// Operations should come through as an array
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
|
||||
for (const condition of where) {
|
||||
// If the operation is properly formatted as an object
|
||||
if (typeof condition === 'object') {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const result = await parseParams({
|
||||
adapter,
|
||||
fields,
|
||||
|
||||
@@ -445,7 +445,7 @@ export const getTableColumnFromPath = ({
|
||||
case 'relationship':
|
||||
case 'upload': {
|
||||
const newCollectionPath = pathSegments.slice(1).join('.')
|
||||
if (Array.isArray(field.relationTo) || (field.type === 'relationship' && field.hasMany)) {
|
||||
if (Array.isArray(field.relationTo) || field.hasMany) {
|
||||
let relationshipFields
|
||||
const relationTableName = `${rootTableName}${adapter.relationshipsSuffix}`
|
||||
const {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user