Compare commits

..

2 Commits

Author SHA1 Message Date
German Jablonski
0333e2cd1c tests: spin postgres docker automatically in jest (pnpm test:int:postgres) 2025-07-15 19:55:12 +01:00
German Jablonski
bbf0c2474d tests: spin postgres docker automatically in pnpm dev 2025-07-15 19:41:10 +01:00
222 changed files with 1590 additions and 4827 deletions

View File

@@ -153,7 +153,6 @@ jobs:
matrix:
database:
- mongodb
- firestore
- postgres
- postgres-custom-schema
- postgres-uuid
@@ -284,7 +283,6 @@ jobs:
- fields__collections__Text
- fields__collections__UI
- fields__collections__Upload
- folders
- hooks
- lexical__collections__Lexical__e2e__main
- lexical__collections__Lexical__e2e__blocks
@@ -419,7 +417,6 @@ jobs:
- fields__collections__Text
- fields__collections__UI
- fields__collections__Upload
- folders
- hooks
- lexical__collections__Lexical__e2e__main
- lexical__collections__Lexical__e2e__blocks
@@ -721,8 +718,6 @@ jobs:
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
- name: Analyze esbuild bundle size
# Temporarily disable this for community PRs until this can be implemented in a separate workflow
if: github.event.pull_request.head.repo.fork == false
uses: exoego/esbuild-bundle-analyzer@v1
with:
metafiles: 'packages/payload/meta_index.json,packages/payload/meta_shared.json,packages/ui/meta_client.json,packages/ui/meta_shared.json,packages/next/meta_index.json,packages/richtext-lexical/meta_client.json'

View File

@@ -77,9 +77,13 @@ If you wish to use your own MongoDB database for the `test` directory instead of
### Using Postgres
If you have postgres installed on your system, you can also run the test suites using postgres. By default, mongodb is used.
Our test suites supports automatic PostgreSQL + PostGIS setup using Docker. No local PostgreSQL installation required. By default, mongodb is used.
To do that, simply set the `PAYLOAD_DATABASE` environment variable to `postgres`.
To use postgres, simply set the `PAYLOAD_DATABASE` environment variable to `postgres`.
```bash
PAYLOAD_DATABASE=postgres pnpm dev {suite}
```
### Running the e2e and int tests

View File

@@ -30,22 +30,18 @@ export default buildConfig({
## Options
| Option | Description |
| ---------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| `autoPluralization` | Tell Mongoose to auto-pluralize any collection names if it encounters any singular words used as collection `slug`s. |
| `connectOptions` | Customize MongoDB connection options. Payload will connect to your MongoDB database using default options which you can override and extend to include all the [options](https://mongoosejs.com/docs/connections.html#options) available to mongoose. |
| `collectionsSchemaOptions` | Customize Mongoose schema options for collections. |
| `disableIndexHints` | Set to true to disable hinting to MongoDB to use 'id' as index. This is currently done when counting documents for pagination, as it increases the speed of the count function used in that query. Disabling this optimization might fix some problems with AWS DocumentDB. Defaults to false |
| `migrationDir` | Customize the directory that migrations are stored. |
| `transactionOptions` | An object with configuration properties used in [transactions](https://www.mongodb.com/docs/manual/core/transactions/) or `false` which will disable the use of transactions. |
| `collation` | Enable language-specific string comparison with customizable options. Available on MongoDB 3.4+. Defaults locale to "en". Example: `{ strength: 3 }`. For a full list of collation options and their definitions, see the [MongoDB documentation](https://www.mongodb.com/docs/manual/reference/collation/). |
| `allowAdditionalKeys` | By default, Payload strips all additional keys from MongoDB data that don't exist in the Payload schema. If you have some data that you want to include to the result but it doesn't exist in Payload, you can set this to `true`. Be careful as Payload access control _won't_ work for this data. |
| `allowIDOnCreate` | Set to `true` to use the `id` passed in data on the create API operations without using a custom ID field. |
| `disableFallbackSort` | Set to `true` to disable the adapter adding a fallback sort when sorting by non-unique fields, this can affect performance in some cases but it ensures a consistent order of results. |
| `useAlternativeDropDatabase` | Set to `true` to use an alternative `dropDatabase` implementation that calls `collection.deleteMany({})` on every collection instead of sending a raw `dropDatabase` command. Payload only uses `dropDatabase` for testing purposes. Defaults to `false`. |
| `useBigIntForNumberIDs` | Set to `true` to use `BigInt` for custom ID fields of type `'number'`. Useful for databases that don't support `double` or `int32` IDs. Defaults to `false`. |
| `useJoinAggregations` | Set to `false` to disable join aggregations (which use correlated subqueries) and instead populate join fields via multiple `find` queries. Defaults to `true`. |
| `usePipelineInSortLookup` | Set to `false` to disable the use of `pipeline` in the `$lookup` aggregation in sorting. Defaults to `true`. |
| Option | Description |
| -------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| `autoPluralization` | Tell Mongoose to auto-pluralize any collection names if it encounters any singular words used as collection `slug`s. |
| `connectOptions` | Customize MongoDB connection options. Payload will connect to your MongoDB database using default options which you can override and extend to include all the [options](https://mongoosejs.com/docs/connections.html#options) available to mongoose. |
| `collectionsSchemaOptions` | Customize Mongoose schema options for collections. |
| `disableIndexHints` | Set to true to disable hinting to MongoDB to use 'id' as index. This is currently done when counting documents for pagination, as it increases the speed of the count function used in that query. Disabling this optimization might fix some problems with AWS DocumentDB. Defaults to false |
| `migrationDir` | Customize the directory that migrations are stored. |
| `transactionOptions` | An object with configuration properties used in [transactions](https://www.mongodb.com/docs/manual/core/transactions/) or `false` which will disable the use of transactions. |
| `collation` | Enable language-specific string comparison with customizable options. Available on MongoDB 3.4+. Defaults locale to "en". Example: `{ strength: 3 }`. For a full list of collation options and their definitions, see the [MongoDB documentation](https://www.mongodb.com/docs/manual/reference/collation/). |
| `allowAdditionalKeys` | By default, Payload strips all additional keys from MongoDB data that don't exist in the Payload schema. If you have some data that you want to include to the result but it doesn't exist in Payload, you can set this to `true`. Be careful as Payload access control _won't_ work for this data. |
| `allowIDOnCreate` | Set to `true` to use the `id` passed in data on the create API operations without using a custom ID field. |
| `disableFallbackSort` | Set to `true` to disable the adapter adding a fallback sort when sorting by non-unique fields, this can affect performance in some cases but it ensures a consistent order of results. |
## Access to Mongoose models
@@ -60,21 +56,9 @@ You can access Mongoose models as follows:
## Using other MongoDB implementations
You can import the `compatabilityOptions` object to get the recommended settings for other MongoDB implementations. Since these databases aren't officially supported by payload, you may still encounter issues even with these settings (please create an issue or PR if you believe these options should be updated):
Limitations with [DocumentDB](https://aws.amazon.com/documentdb/) and [Azure Cosmos DB](https://azure.microsoft.com/en-us/products/cosmos-db):
```ts
import { mongooseAdapter, compatabilityOptions } from '@payloadcms/db-mongodb'
export default buildConfig({
db: mongooseAdapter({
url: process.env.DATABASE_URI,
// For example, if you're using firestore:
...compatabilityOptions.firestore,
}),
})
```
We export compatability options for [DocumentDB](https://aws.amazon.com/documentdb/), [Azure Cosmos DB](https://azure.microsoft.com/en-us/products/cosmos-db) and [Firestore](https://cloud.google.com/firestore/mongodb-compatibility/docs/overview). Known limitations:
- Azure Cosmos DB does not support transactions that update two or more documents in different collections, which is a common case when using Payload (via hooks).
- Azure Cosmos DB the root config property `indexSortableFields` must be set to `true`.
- For Azure Cosmos DB you must pass `transactionOptions: false` to the adapter options. Azure Cosmos DB does not support transactions that update two and more documents in different collections, which is a common case when using Payload (via hooks).
- For Azure Cosmos DB the root config property `indexSortableFields` must be set to `true`.
- The [Join Field](../fields/join) is not supported in DocumentDB and Azure Cosmos DB, as we internally use MongoDB aggregations to query data for that field, which are limited there. This can be changed in the future.
- For DocumentDB pass `disableIndexHints: true` to disable hinting to the DB to use `id` as index which can cause problems with DocumentDB.

View File

@@ -51,7 +51,7 @@ export default buildConfig({
// add as many cron jobs as you want
],
shouldAutoRun: async (payload) => {
// Tell Payload if it should run jobs or not. This function is optional and will return true by default.
// Tell Payload if it should run jobs or not.
// This function will be invoked each time Payload goes to pick up and run jobs.
// If this function ever returns false, the cron schedule will be stopped.
return true

View File

@@ -194,27 +194,6 @@ const result = await payload.count({
})
```
### FindDistinct#collection-find-distinct
```js
// Result will be an object with:
// {
// values: ['value-1', 'value-2'], // array of distinct values,
// field: 'title', // the field
// totalDocs: 10, // count of the distinct values satisfies query,
// perPage: 10, // count of distinct values per page (based on provided limit)
// }
const result = await payload.findDistinct({
collection: 'posts', // required
locale: 'en',
where: {}, // pass a `where` query here
user: dummyUser,
overrideAccess: false,
field: 'title',
sort: 'title',
})
```
### Update by ID#collection-update-by-id
```js

View File

@@ -58,7 +58,7 @@ To learn more, see the [Custom Components Performance](../admin/custom-component
### Block references
Use [Block References](../fields/blocks#block-references) to share the same block across multiple fields without bloating the config. This will reduce the number of fields to traverse when processing permissions, etc. and can significantly reduce the amount of data sent from the server to the client in the Admin Panel.
Use [Block References](../fields/blocks#block-references) to share the same block across multiple fields without bloating the config. This will reduce the number of fields to traverse when processing permissions, etc. and can can significantly reduce the amount of data sent from the server to the client in the Admin Panel.
For example, if you have a block that is used in multiple fields, you can define it once and reference it in each field.

View File

@@ -6,8 +6,6 @@ import { anyone } from './access/anyone'
import { checkRole } from './access/checkRole'
import { loginAfterCreate } from './hooks/loginAfterCreate'
import { protectRoles } from './hooks/protectRoles'
import { access } from 'fs'
import { create } from 'domain'
export const Users: CollectionConfig = {
slug: 'users',
@@ -34,34 +32,6 @@ export const Users: CollectionConfig = {
afterChange: [loginAfterCreate],
},
fields: [
{
name: 'email',
type: 'email',
required: true,
unique: true,
access: {
read: adminsAndUser,
update: adminsAndUser,
},
},
{
name: 'password',
type: 'password',
required: true,
admin: {
description: 'Leave blank to keep the current password.',
},
},
{
name: 'resetPasswordToken',
type: 'text',
hidden: true,
},
{
name: 'resetPasswordExpiration',
type: 'date',
hidden: true,
},
{
name: 'firstName',
type: 'text',
@@ -75,11 +45,6 @@ export const Users: CollectionConfig = {
type: 'select',
hasMany: true,
saveToJWT: true,
access: {
read: admins,
update: admins,
create: admins,
},
hooks: {
beforeChange: [protectRoles],
},

View File

@@ -1,6 +1,6 @@
{
"name": "payload-monorepo",
"version": "3.48.0",
"version": "3.47.0",
"private": true,
"type": "module",
"workspaces": [
@@ -76,6 +76,8 @@
"dev:prod:memorydb": "cross-env NODE_OPTIONS=--no-deprecation tsx ./test/dev.ts --prod --start-memory-db",
"dev:vercel-postgres": "cross-env PAYLOAD_DATABASE=vercel-postgres pnpm runts ./test/dev.ts",
"devsafe": "node ./scripts/delete-recursively.js '**/.next' && pnpm dev",
"docker:postgres": "docker compose -f test/docker-compose.yml up -d postgres",
"docker:postgres:stop": "docker compose -f test/docker-compose.yml down postgres",
"docker:restart": "pnpm docker:stop --remove-orphans && pnpm docker:start",
"docker:start": "docker compose -f test/docker-compose.yml up -d",
"docker:stop": "docker compose -f test/docker-compose.yml down",
@@ -112,7 +114,6 @@
"test:e2e:prod:ci": "pnpm prepare-run-test-against-prod:ci && pnpm runts ./test/runE2E.ts --prod",
"test:e2e:prod:ci:noturbo": "pnpm prepare-run-test-against-prod:ci && pnpm runts ./test/runE2E.ts --prod --no-turbo",
"test:int": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
"test:int:firestore": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=firestore DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
"test:int:postgres": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=postgres DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
"test:int:sqlite": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=sqlite DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
"test:types": "tstyche",

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/admin-bar",
"version": "3.48.0",
"version": "3.47.0",
"description": "An admin bar for React apps using Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "create-payload-app",
"version": "3.48.0",
"version": "3.47.0",
"homepage": "https://payloadcms.com",
"repository": {
"type": "git",

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-mongodb",
"version": "3.48.0",
"version": "3.47.0",
"description": "The officially supported MongoDB database adapter for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -36,25 +36,6 @@ export const connect: Connect = async function connect(
try {
this.connection = (await mongoose.connect(urlToConnect, connectionOptions)).connection
if (this.useAlternativeDropDatabase) {
if (this.connection.db) {
// Firestore doesn't support dropDatabase, so we monkey patch
// dropDatabase to delete all documents from all collections instead
this.connection.db.dropDatabase = async function (): Promise<boolean> {
const existingCollections = await this.listCollections().toArray()
await Promise.all(
existingCollections.map(async (collectionInfo) => {
const collection = this.collection(collectionInfo.name)
await collection.deleteMany({})
}),
)
return true
}
this.connection.dropDatabase = async function () {
await this.db?.dropDatabase()
}
}
}
// If we are running a replica set with MongoDB Memory Server,
// wait until the replica set elects a primary before proceeding

View File

@@ -12,7 +12,6 @@ import { buildJoinAggregation } from './utilities/buildJoinAggregation.js'
import { buildProjectionFromSelect } from './utilities/buildProjectionFromSelect.js'
import { getCollection } from './utilities/getEntity.js'
import { getSession } from './utilities/getSession.js'
import { resolveJoins } from './utilities/resolveJoins.js'
import { transform } from './utilities/transform.js'
export const find: Find = async function find(
@@ -156,16 +155,6 @@ export const find: Find = async function find(
result = await Model.paginate(query, paginationOptions)
}
if (!this.useJoinAggregations) {
await resolveJoins({
adapter: this,
collectionSlug,
docs: result.docs as Record<string, unknown>[],
joins,
locale,
})
}
transform({
adapter: this,
data: result.docs,

View File

@@ -1,141 +0,0 @@
import type { PipelineStage } from 'mongoose'
import { type FindDistinct, getFieldByPath } from 'payload'
import type { MongooseAdapter } from './index.js'
import { buildQuery } from './queries/buildQuery.js'
import { buildSortParam } from './queries/buildSortParam.js'
import { getCollection } from './utilities/getEntity.js'
import { getSession } from './utilities/getSession.js'
export const findDistinct: FindDistinct = async function (this: MongooseAdapter, args) {
const { collectionConfig, Model } = getCollection({
adapter: this,
collectionSlug: args.collection,
})
const session = await getSession(this, args.req)
const { where = {} } = args
const sortAggregation: PipelineStage[] = []
const sort = buildSortParam({
adapter: this,
config: this.payload.config,
fields: collectionConfig.flattenedFields,
locale: args.locale,
sort: args.sort ?? args.field,
sortAggregation,
timestamps: true,
})
const query = await buildQuery({
adapter: this,
collectionSlug: args.collection,
fields: collectionConfig.flattenedFields,
locale: args.locale,
where,
})
const fieldPathResult = getFieldByPath({
fields: collectionConfig.flattenedFields,
path: args.field,
})
let fieldPath = args.field
if (fieldPathResult?.pathHasLocalized && args.locale) {
fieldPath = fieldPathResult.localizedPath.replace('<locale>', args.locale)
}
const page = args.page || 1
const sortProperty = Object.keys(sort)[0]! // assert because buildSortParam always returns at least 1 key.
const sortDirection = sort[sortProperty] === 'asc' ? 1 : -1
const pipeline: PipelineStage[] = [
{
$match: query,
},
...(sortAggregation.length > 0 ? sortAggregation : []),
{
$group: {
_id: {
_field: `$${fieldPath}`,
...(sortProperty === fieldPath
? {}
: {
_sort: `$${sortProperty}`,
}),
},
},
},
{
$sort: {
[sortProperty === fieldPath ? '_id._field' : '_id._sort']: sortDirection,
},
},
]
const getValues = async () => {
return Model.aggregate(pipeline, { session }).then((res) =>
res.map((each) => ({
[args.field]: JSON.parse(JSON.stringify(each._id._field)),
})),
)
}
if (args.limit) {
pipeline.push({
$skip: (page - 1) * args.limit,
})
pipeline.push({ $limit: args.limit })
const totalDocs = await Model.aggregate(
[
{
$match: query,
},
{
$group: {
_id: `$${fieldPath}`,
},
},
{ $count: 'count' },
],
{
session,
},
).then((res) => res[0]?.count ?? 0)
const totalPages = Math.ceil(totalDocs / args.limit)
const hasPrevPage = page > 1
const hasNextPage = totalPages > page
const pagingCounter = (page - 1) * args.limit + 1
return {
hasNextPage,
hasPrevPage,
limit: args.limit,
nextPage: hasNextPage ? page + 1 : null,
page,
pagingCounter,
prevPage: hasPrevPage ? page - 1 : null,
totalDocs,
totalPages,
values: await getValues(),
}
}
const values = await getValues()
return {
hasNextPage: false,
hasPrevPage: false,
limit: 0,
page: 1,
pagingCounter: 1,
totalDocs: values.length,
totalPages: 1,
values,
}
}

View File

@@ -10,7 +10,6 @@ import { buildJoinAggregation } from './utilities/buildJoinAggregation.js'
import { buildProjectionFromSelect } from './utilities/buildProjectionFromSelect.js'
import { getCollection } from './utilities/getEntity.js'
import { getSession } from './utilities/getSession.js'
import { resolveJoins } from './utilities/resolveJoins.js'
import { transform } from './utilities/transform.js'
export const findOne: FindOne = async function findOne(
@@ -68,16 +67,6 @@ export const findOne: FindOne = async function findOne(
doc = await Model.findOne(query, {}, options)
}
if (doc && !this.useJoinAggregations) {
await resolveJoins({
adapter: this,
collectionSlug,
docs: [doc] as Record<string, unknown>[],
joins,
locale,
})
}
if (!doc) {
return null
}

View File

@@ -42,7 +42,6 @@ import { deleteOne } from './deleteOne.js'
import { deleteVersions } from './deleteVersions.js'
import { destroy } from './destroy.js'
import { find } from './find.js'
import { findDistinct } from './findDistinct.js'
import { findGlobal } from './findGlobal.js'
import { findGlobalVersions } from './findGlobalVersions.js'
import { findOne } from './findOne.js'
@@ -144,29 +143,6 @@ export interface Args {
/** The URL to connect to MongoDB or false to start payload and prevent connecting */
url: false | string
/**
* Set to `true` to use an alternative `dropDatabase` implementation that calls `collection.deleteMany({})` on every collection instead of sending a raw `dropDatabase` command.
* Payload only uses `dropDatabase` for testing purposes.
* @default false
*/
useAlternativeDropDatabase?: boolean
/**
* Set to `true` to use `BigInt` for custom ID fields of type `'number'`.
* Useful for databases that don't support `double` or `int32` IDs.
* @default false
*/
useBigIntForNumberIDs?: boolean
/**
* Set to `false` to disable join aggregations (which use correlated subqueries) and instead populate join fields via multiple `find` queries.
* @default true
*/
useJoinAggregations?: boolean
/**
* Set to `false` to disable the use of `pipeline` in the `$lookup` aggregation in sorting.
* @default true
*/
usePipelineInSortLookup?: boolean
}
export type MongooseAdapter = {
@@ -183,10 +159,6 @@ export type MongooseAdapter = {
up: (args: MigrateUpArgs) => Promise<void>
}[]
sessions: Record<number | string, ClientSession>
useAlternativeDropDatabase: boolean
useBigIntForNumberIDs: boolean
useJoinAggregations: boolean
usePipelineInSortLookup: boolean
versions: {
[slug: string]: CollectionModel
}
@@ -222,10 +194,6 @@ declare module 'payload' {
updateVersion: <T extends TypeWithID = TypeWithID>(
args: { options?: QueryOptions } & UpdateVersionArgs<T>,
) => Promise<TypeWithVersion<T>>
useAlternativeDropDatabase: boolean
useBigIntForNumberIDs: boolean
useJoinAggregations: boolean
usePipelineInSortLookup: boolean
versions: {
[slug: string]: CollectionModel
}
@@ -246,10 +214,6 @@ export function mongooseAdapter({
prodMigrations,
transactionOptions = {},
url,
useAlternativeDropDatabase = false,
useBigIntForNumberIDs = false,
useJoinAggregations = true,
usePipelineInSortLookup = true,
}: Args): DatabaseAdapterObj {
function adapter({ payload }: { payload: Payload }) {
const migrationDir = findMigrationDir(migrationDirArg)
@@ -298,7 +262,6 @@ export function mongooseAdapter({
destroy,
disableFallbackSort,
find,
findDistinct,
findGlobal,
findGlobalVersions,
findOne,
@@ -316,10 +279,6 @@ export function mongooseAdapter({
updateOne,
updateVersion,
upsert,
useAlternativeDropDatabase,
useBigIntForNumberIDs,
useJoinAggregations,
usePipelineInSortLookup,
})
}
@@ -331,8 +290,6 @@ export function mongooseAdapter({
}
}
export { compatabilityOptions } from './utilities/compatabilityOptions.js'
/**
* Attempt to find migrations directory.
*

View File

@@ -143,12 +143,7 @@ export const buildSchema = (args: {
const idField = schemaFields.find((field) => fieldAffectsData(field) && field.name === 'id')
if (idField) {
fields = {
_id:
idField.type === 'number'
? payload.db.useBigIntForNumberIDs
? mongoose.Schema.Types.BigInt
: Number
: String,
_id: idField.type === 'number' ? Number : String,
}
schemaFields = schemaFields.filter(
(field) => !(fieldAffectsData(field) && field.name === 'id'),
@@ -905,11 +900,7 @@ const getRelationshipValueType = (field: RelationshipField | UploadField, payloa
}
if (customIDType === 'number') {
if (payload.db.useBigIntForNumberIDs) {
return mongoose.Schema.Types.BigInt
} else {
return mongoose.Schema.Types.Number
}
return mongoose.Schema.Types.Number
}
return mongoose.Schema.Types.String

View File

@@ -99,57 +99,31 @@ const relationshipSort = ({
sortFieldPath = foreignFieldPath.localizedPath.replace('<locale>', locale)
}
const as = `__${relationshipPath.replace(/\./g, '__')}`
// If we have not already sorted on this relationship yet, we need to add a lookup stage
if (!sortAggregation.some((each) => '$lookup' in each && each.$lookup.as === as)) {
let localField = versions ? `version.${relationshipPath}` : relationshipPath
if (adapter.usePipelineInSortLookup) {
const flattenedField = `__${localField.replace(/\./g, '__')}_lookup`
sortAggregation.push({
$addFields: {
[flattenedField]: `$${localField}`,
},
})
localField = flattenedField
}
if (
!sortAggregation.some((each) => {
return '$lookup' in each && each.$lookup.as === `__${path}`
})
) {
sortAggregation.push({
$lookup: {
as,
as: `__${path}`,
foreignField: '_id',
from: foreignCollection.Model.collection.name,
localField,
...(!adapter.usePipelineInSortLookup && {
pipeline: [
{
$project: {
[sortFieldPath]: true,
},
localField: versions ? `version.${relationshipPath}` : relationshipPath,
pipeline: [
{
$project: {
[sortFieldPath]: true,
},
],
}),
},
],
},
})
if (adapter.usePipelineInSortLookup) {
sortAggregation.push({
$unset: localField,
})
}
}
sort[`__${path}.${sortFieldPath}`] = sortDirection
if (!adapter.usePipelineInSortLookup) {
const lookup = sortAggregation.find(
(each) => '$lookup' in each && each.$lookup.as === as,
) as PipelineStage.Lookup
const pipeline = lookup.$lookup.pipeline![0] as PipelineStage.Project
pipeline.$project[sortFieldPath] = true
return true
}
sort[`${as}.${sortFieldPath}`] = sortDirection
return true
}
}

View File

@@ -12,7 +12,6 @@ import { buildJoinAggregation } from './utilities/buildJoinAggregation.js'
import { buildProjectionFromSelect } from './utilities/buildProjectionFromSelect.js'
import { getCollection } from './utilities/getEntity.js'
import { getSession } from './utilities/getSession.js'
import { resolveJoins } from './utilities/resolveJoins.js'
import { transform } from './utilities/transform.js'
export const queryDrafts: QueryDrafts = async function queryDrafts(
@@ -159,17 +158,6 @@ export const queryDrafts: QueryDrafts = async function queryDrafts(
result = await Model.paginate(versionQuery, paginationOptions)
}
if (!this.useJoinAggregations) {
await resolveJoins({
adapter: this,
collectionSlug,
docs: result.docs as Record<string, unknown>[],
joins,
locale,
versions: true,
})
}
transform({
adapter: this,
data: result.docs,

View File

@@ -1,4 +1,4 @@
import type { MongooseUpdateQueryOptions, UpdateQuery } from 'mongoose'
import type { MongooseUpdateQueryOptions } from 'mongoose'
import type { UpdateOne } from 'payload'
import type { MongooseAdapter } from './index.js'
@@ -50,20 +50,15 @@ export const updateOne: UpdateOne = async function updateOne(
let result
const $inc: Record<string, number> = {}
let updateData: UpdateQuery<any> = data
transform({ $inc, adapter: this, data, fields, operation: 'write' })
if (Object.keys($inc).length) {
updateData = { $inc, $set: updateData }
}
transform({ adapter: this, data, fields, operation: 'write' })
try {
if (returning === false) {
await Model.updateOne(query, updateData, options)
await Model.updateOne(query, data, options)
transform({ adapter: this, data, fields, operation: 'read' })
return null
} else {
result = await Model.findOneAndUpdate(query, updateData, options)
result = await Model.findOneAndUpdate(query, data, options)
}
} catch (error) {
handleError({ collection: collectionSlug, error, req })

View File

@@ -76,11 +76,7 @@ export const aggregatePaginate = async ({
countPromise = Model.estimatedDocumentCount(query)
} else {
const hint = adapter.disableIndexHints !== true ? { _id: 1 } : undefined
countPromise = Model.countDocuments(query, {
collation,
session,
...(hint ? { hint } : {}),
})
countPromise = Model.countDocuments(query, { collation, hint, session })
}
}

View File

@@ -44,9 +44,6 @@ export const buildJoinAggregation = async ({
projection,
versions,
}: BuildJoinAggregationArgs): Promise<PipelineStage[] | undefined> => {
if (!adapter.useJoinAggregations) {
return
}
if (
(Object.keys(collectionConfig.joins).length === 0 &&
collectionConfig.polymorphicJoins.length == 0) ||

View File

@@ -1,25 +0,0 @@
import type { Args } from '../index.js'
/**
* Each key is a mongo-compatible database and the value
* is the recommended `mongooseAdapter` settings for compatability.
*/
export const compatabilityOptions = {
cosmosdb: {
transactionOptions: false,
useJoinAggregations: false,
usePipelineInSortLookup: false,
},
documentdb: {
disableIndexHints: true,
},
firestore: {
disableIndexHints: true,
ensureIndexes: false,
transactionOptions: false,
useAlternativeDropDatabase: true,
useBigIntForNumberIDs: true,
useJoinAggregations: false,
usePipelineInSortLookup: false,
},
} satisfies Record<string, Partial<Args>>

View File

@@ -1,647 +0,0 @@
import type { JoinQuery, SanitizedJoins, Where } from 'payload'
import {
appendVersionToQueryKey,
buildVersionCollectionFields,
combineQueries,
getQueryDraftsSort,
} from 'payload'
import { fieldShouldBeLocalized } from 'payload/shared'
import type { MongooseAdapter } from '../index.js'
import { buildQuery } from '../queries/buildQuery.js'
import { buildSortParam } from '../queries/buildSortParam.js'
import { transform } from './transform.js'
export type ResolveJoinsArgs = {
/** The MongoDB adapter instance */
adapter: MongooseAdapter
/** The slug of the collection being queried */
collectionSlug: string
/** Array of documents to resolve joins for */
docs: Record<string, unknown>[]
/** Join query specifications (which joins to resolve and how) */
joins?: JoinQuery
/** Optional locale for localized queries */
locale?: string
/** Optional projection for the join query */
projection?: Record<string, true>
/** Whether to resolve versions instead of published documents */
versions?: boolean
}
/**
* Resolves join relationships for a collection of documents.
* This function fetches related documents based on join configurations and
* attaches them to the original documents with pagination support.
*/
export async function resolveJoins({
adapter,
collectionSlug,
docs,
joins,
locale,
projection,
versions = false,
}: ResolveJoinsArgs): Promise<void> {
// Early return if no joins are specified or no documents to process
if (!joins || docs.length === 0) {
return
}
// Get the collection configuration from the adapter
const collectionConfig = adapter.payload.collections[collectionSlug]?.config
if (!collectionConfig) {
return
}
// Build a map of join paths to their configurations for quick lookup
// This flattens the nested join structure into a single map keyed by join path
const joinMap: Record<string, { targetCollection: string } & SanitizedJoin> = {}
// Add regular joins
for (const [target, joinList] of Object.entries(collectionConfig.joins)) {
for (const join of joinList) {
joinMap[join.joinPath] = { ...join, targetCollection: target }
}
}
// Add polymorphic joins
for (const join of collectionConfig.polymorphicJoins || []) {
// For polymorphic joins, we use the collections array as the target
joinMap[join.joinPath] = { ...join, targetCollection: join.field.collection as string }
}
// Process each requested join concurrently
const joinPromises = Object.entries(joins).map(async ([joinPath, joinQuery]) => {
if (!joinQuery) {
return null
}
// If a projection is provided, and the join path is not in the projection, skip it
if (projection && !projection[joinPath]) {
return null
}
// Get the join definition from our map
const joinDef = joinMap[joinPath]
if (!joinDef) {
return null
}
// Normalize collections to always be an array for unified processing
const allCollections = Array.isArray(joinDef.field.collection)
? joinDef.field.collection
: [joinDef.field.collection]
// Use the provided locale or fall back to the default locale for localized fields
const localizationConfig = adapter.payload.config.localization
const effectiveLocale =
locale ||
(typeof localizationConfig === 'object' &&
localizationConfig &&
localizationConfig.defaultLocale)
// Extract relationTo filter from the where clause to determine which collections to query
const relationToFilter = extractRelationToFilter(joinQuery.where || {})
// Determine which collections to query based on relationTo filter
const collections = relationToFilter
? allCollections.filter((col) => relationToFilter.includes(col))
: allCollections
// Check if this is a polymorphic collection join (where field.collection is an array)
const isPolymorphicJoin = Array.isArray(joinDef.field.collection)
// Apply pagination settings
const limit = joinQuery.limit ?? joinDef.field.defaultLimit ?? 10
const page = joinQuery.page ?? 1
const skip = (page - 1) * limit
// Process collections concurrently
const collectionPromises = collections.map(async (joinCollectionSlug) => {
const targetConfig = adapter.payload.collections[joinCollectionSlug]?.config
if (!targetConfig) {
return null
}
const useDrafts = versions && Boolean(targetConfig.versions?.drafts)
let JoinModel
if (useDrafts) {
JoinModel = adapter.versions[targetConfig.slug]
} else {
JoinModel = adapter.collections[targetConfig.slug]
}
if (!JoinModel) {
return null
}
// Extract all parent document IDs to use in the join query
const parentIDs = docs.map((d) => (versions ? (d.parent ?? d._id ?? d.id) : (d._id ?? d.id)))
// Build the base query
let whereQuery: null | Record<string, unknown> = null
whereQuery = isPolymorphicJoin
? filterWhereForCollection(
joinQuery.where || {},
targetConfig.flattenedFields,
true, // exclude relationTo for individual collections
)
: joinQuery.where || {}
// Skip this collection if the WHERE clause cannot be satisfied for polymorphic collection joins
if (whereQuery === null) {
return null
}
whereQuery = useDrafts
? await JoinModel.buildQuery({
locale,
payload: adapter.payload,
where: combineQueries(appendVersionToQueryKey(whereQuery as Where), {
latest: {
equals: true,
},
}),
})
: await buildQuery({
adapter,
collectionSlug: joinCollectionSlug,
fields: targetConfig.flattenedFields,
locale,
where: whereQuery as Where,
})
// Handle localized paths and version prefixes
let dbFieldName = joinDef.field.on
if (effectiveLocale && typeof localizationConfig === 'object' && localizationConfig) {
const pathSegments = joinDef.field.on.split('.')
const transformedSegments: string[] = []
const fields = useDrafts
? buildVersionCollectionFields(adapter.payload.config, targetConfig, true)
: targetConfig.flattenedFields
for (let i = 0; i < pathSegments.length; i++) {
const segment = pathSegments[i]!
transformedSegments.push(segment)
// Check if this segment corresponds to a localized field
const fieldAtSegment = fields.find((f) => f.name === segment)
if (fieldAtSegment && fieldAtSegment.localized) {
transformedSegments.push(effectiveLocale)
}
}
dbFieldName = transformedSegments.join('.')
}
// Add version prefix for draft queries
if (useDrafts) {
dbFieldName = `version.${dbFieldName}`
}
// Check if the target field is a polymorphic relationship
const isPolymorphic = joinDef.targetField
? Array.isArray(joinDef.targetField.relationTo)
: false
if (isPolymorphic) {
// For polymorphic relationships, we need to match both relationTo and value
whereQuery[`${dbFieldName}.relationTo`] = collectionSlug
whereQuery[`${dbFieldName}.value`] = { $in: parentIDs }
} else {
// For regular relationships and polymorphic collection joins
whereQuery[dbFieldName] = { $in: parentIDs }
}
// Build the sort parameters for the query
const fields = useDrafts
? buildVersionCollectionFields(adapter.payload.config, targetConfig, true)
: targetConfig.flattenedFields
const sort = buildSortParam({
adapter,
config: adapter.payload.config,
fields,
locale,
sort: useDrafts
? getQueryDraftsSort({
collectionConfig: targetConfig,
sort: joinQuery.sort || joinDef.field.defaultSort || targetConfig.defaultSort,
})
: joinQuery.sort || joinDef.field.defaultSort || targetConfig.defaultSort,
timestamps: true,
})
const projection = buildJoinProjection(dbFieldName, useDrafts, sort)
const [results, dbCount] = await Promise.all([
JoinModel.find(whereQuery, projection, {
sort,
...(isPolymorphicJoin ? {} : { limit, skip }),
}).lean(),
isPolymorphicJoin ? Promise.resolve(0) : JoinModel.countDocuments(whereQuery),
])
const count = isPolymorphicJoin ? results.length : dbCount
transform({
adapter,
data: results,
fields: useDrafts
? buildVersionCollectionFields(adapter.payload.config, targetConfig, false)
: targetConfig.fields,
operation: 'read',
})
// Return results with collection info for grouping
return {
collectionSlug: joinCollectionSlug,
count,
dbFieldName,
results,
sort,
useDrafts,
}
})
const collectionResults = await Promise.all(collectionPromises)
// Group the results by parent ID
const grouped: Record<
string,
{
docs: Record<string, unknown>[]
sort: Record<string, string>
}
> = {}
let totalCount = 0
for (const collectionResult of collectionResults) {
if (!collectionResult) {
continue
}
const { collectionSlug, count, dbFieldName, results, sort, useDrafts } = collectionResult
totalCount += count
for (const result of results) {
if (useDrafts) {
result.id = result.parent
}
const parentValues = getByPathWithArrays(result, dbFieldName) as (
| { relationTo: string; value: number | string }
| number
| string
)[]
if (parentValues.length === 0) {
continue
}
for (let parentValue of parentValues) {
if (!parentValue) {
continue
}
if (typeof parentValue === 'object') {
parentValue = parentValue.value
}
const joinData = {
relationTo: collectionSlug,
value: result.id,
}
const parentKey = parentValue as string
if (!grouped[parentKey]) {
grouped[parentKey] = {
docs: [],
sort,
}
}
// Always store the ObjectID reference in polymorphic format
grouped[parentKey].docs.push({
...result,
__joinData: joinData,
})
}
}
}
for (const results of Object.values(grouped)) {
results.docs.sort((a, b) => {
for (const [fieldName, sortOrder] of Object.entries(results.sort)) {
const sort = sortOrder === 'asc' ? 1 : -1
const aValue = a[fieldName] as Date | number | string
const bValue = b[fieldName] as Date | number | string
if (aValue < bValue) {
return -1 * sort
}
if (aValue > bValue) {
return 1 * sort
}
}
return 0
})
results.docs = results.docs.map(
(doc) => (isPolymorphicJoin ? doc.__joinData : doc.id) as Record<string, unknown>,
)
}
// Determine if the join field should be localized
const localeSuffix =
fieldShouldBeLocalized({
field: joinDef.field,
parentIsLocalized: joinDef.parentIsLocalized,
}) &&
adapter.payload.config.localization &&
effectiveLocale
? `.${effectiveLocale}`
: ''
// Adjust the join path with locale suffix if needed
const localizedJoinPath = `${joinPath}${localeSuffix}`
return {
grouped,
isPolymorphicJoin,
joinQuery,
limit,
localizedJoinPath,
page,
skip,
totalCount,
}
})
// Wait for all join operations to complete
const joinResults = await Promise.all(joinPromises)
// Process the results and attach them to documents
for (const joinResult of joinResults) {
if (!joinResult) {
continue
}
const { grouped, isPolymorphicJoin, joinQuery, limit, localizedJoinPath, skip, totalCount } =
joinResult
// Attach the joined data to each parent document
for (const doc of docs) {
const id = (versions ? (doc.parent ?? doc._id ?? doc.id) : (doc._id ?? doc.id)) as string
const all = grouped[id]?.docs || []
// Calculate the slice for pagination
// When limit is 0, it means unlimited - return all results
const slice = isPolymorphicJoin
? limit === 0
? all
: all.slice(skip, skip + limit)
: // For non-polymorphic joins, we assume that page and limit were applied at the database level
all
// Create the join result object with pagination metadata
const value: Record<string, unknown> = {
docs: slice,
hasNextPage: limit === 0 ? false : totalCount > skip + slice.length,
}
// Include total count if requested
if (joinQuery.count) {
value.totalDocs = totalCount
}
// Navigate to the correct nested location in the document and set the join data
// This handles nested join paths like "user.posts" by creating intermediate objects
const segments = localizedJoinPath.split('.')
let ref: Record<string, unknown>
if (versions) {
if (!doc.version) {
doc.version = {}
}
ref = doc.version as Record<string, unknown>
} else {
ref = doc
}
for (let i = 0; i < segments.length - 1; i++) {
const seg = segments[i]!
if (!ref[seg]) {
ref[seg] = {}
}
ref = ref[seg] as Record<string, unknown>
}
// Set the final join data at the target path
ref[segments[segments.length - 1]!] = value
}
}
}
/**
* Extracts relationTo filter values from a WHERE clause
* @param where - The WHERE clause to search
* @returns Array of collection slugs if relationTo filter found, null otherwise
*/
function extractRelationToFilter(where: Record<string, unknown>): null | string[] {
if (!where || typeof where !== 'object') {
return null
}
// Check for direct relationTo conditions
if (where.relationTo && typeof where.relationTo === 'object') {
const relationTo = where.relationTo as Record<string, unknown>
if (relationTo.in && Array.isArray(relationTo.in)) {
return relationTo.in as string[]
}
if (relationTo.equals) {
return [relationTo.equals as string]
}
}
// Check for relationTo in logical operators
if (where.and && Array.isArray(where.and)) {
for (const condition of where.and) {
const result = extractRelationToFilter(condition)
if (result) {
return result
}
}
}
if (where.or && Array.isArray(where.or)) {
for (const condition of where.or) {
const result = extractRelationToFilter(condition)
if (result) {
return result
}
}
}
return null
}
/**
* Filters a WHERE clause to only include fields that exist in the target collection
* This is needed for polymorphic joins where different collections have different fields
* @param where - The original WHERE clause
* @param availableFields - The fields available in the target collection
* @param excludeRelationTo - Whether to exclude relationTo field (for individual collections)
* @returns A filtered WHERE clause, or null if the query cannot match this collection
*/
function filterWhereForCollection(
where: Record<string, unknown>,
availableFields: Array<{ name: string }>,
excludeRelationTo: boolean = false,
): null | Record<string, unknown> {
if (!where || typeof where !== 'object') {
return where
}
const fieldNames = new Set(availableFields.map((f) => f.name))
// Add special fields that are available in polymorphic relationships
if (!excludeRelationTo) {
fieldNames.add('relationTo')
}
const filtered: Record<string, unknown> = {}
for (const [key, value] of Object.entries(where)) {
if (key === 'and') {
// Handle AND operator - all conditions must be satisfiable
if (Array.isArray(value)) {
const filteredConditions: Record<string, unknown>[] = []
for (const condition of value) {
const filteredCondition = filterWhereForCollection(
condition,
availableFields,
excludeRelationTo,
)
// If any condition in AND cannot be satisfied, the whole AND fails
if (filteredCondition === null) {
return null
}
if (Object.keys(filteredCondition).length > 0) {
filteredConditions.push(filteredCondition)
}
}
if (filteredConditions.length > 0) {
filtered[key] = filteredConditions
}
}
} else if (key === 'or') {
// Handle OR operator - at least one condition must be satisfiable
if (Array.isArray(value)) {
const filteredConditions = value
.map((condition) =>
filterWhereForCollection(condition, availableFields, excludeRelationTo),
)
.filter((condition) => condition !== null && Object.keys(condition).length > 0)
if (filteredConditions.length > 0) {
filtered[key] = filteredConditions
}
// If no OR conditions can be satisfied, we still continue (OR is more permissive)
}
} else if (key === 'relationTo' && excludeRelationTo) {
// Skip relationTo field for non-polymorphic collections
continue
} else if (fieldNames.has(key)) {
// Include the condition if the field exists in this collection
filtered[key] = value
} else {
// Field doesn't exist in this collection - this makes the query unsatisfiable
return null
}
}
return filtered
}
type SanitizedJoin = SanitizedJoins[string][number]
/**
* Builds projection for join queries
*/
function buildJoinProjection(
baseFieldName: string,
useDrafts: boolean,
sort: Record<string, string>,
): Record<string, 1> {
const projection: Record<string, 1> = {
_id: 1,
[baseFieldName]: 1,
}
if (useDrafts) {
projection.parent = 1
}
for (const fieldName of Object.keys(sort)) {
projection[fieldName] = 1
}
return projection
}
/**
* Enhanced utility function to safely traverse nested object properties using dot notation
* Handles arrays by searching through array elements for matching values
* @param doc - The document to traverse
* @param path - Dot-separated path (e.g., "array.category")
* @returns Array of values found at the specified path (for arrays) or single value
*/
function getByPathWithArrays(doc: unknown, path: string): unknown[] {
const segments = path.split('.')
let current = doc
for (let i = 0; i < segments.length; i++) {
const segment = segments[i]!
if (current === undefined || current === null) {
return []
}
// Get the value at the current segment
const value = (current as Record<string, unknown>)[segment]
if (value === undefined || value === null) {
return []
}
// If this is the last segment, return the value(s)
if (i === segments.length - 1) {
return Array.isArray(value) ? value : [value]
}
// If the value is an array and we have more segments to traverse
if (Array.isArray(value)) {
const remainingPath = segments.slice(i + 1).join('.')
const results: unknown[] = []
// Search through each array element
for (const item of value) {
if (item && typeof item === 'object') {
const subResults = getByPathWithArrays(item, remainingPath)
results.push(...subResults)
}
}
return results
}
// Continue traversing
current = value
}
return []
}

View File

@@ -208,7 +208,6 @@ const sanitizeDate = ({
}
type Args = {
$inc?: Record<string, number>
/** instance of the adapter */
adapter: MongooseAdapter
/** data to transform, can be an array of documents or a single document */
@@ -397,7 +396,6 @@ const stripFields = ({
}
export const transform = ({
$inc,
adapter,
data,
fields,
@@ -408,7 +406,7 @@ export const transform = ({
}: Args) => {
if (Array.isArray(data)) {
for (const item of data) {
transform({ $inc, adapter, data: item, fields, globalSlug, operation, validateRelationships })
transform({ adapter, data: item, fields, globalSlug, operation, validateRelationships })
}
return
}
@@ -426,11 +424,6 @@ export const transform = ({
data.id = data.id.toHexString()
}
// Handle BigInt conversion for custom ID fields of type 'number'
if (adapter.useBigIntForNumberIDs && typeof data.id === 'bigint') {
data.id = Number(data.id)
}
if (!adapter.allowAdditionalKeys) {
stripFields({
config,
@@ -445,27 +438,13 @@ export const transform = ({
data.globalType = globalSlug
}
const sanitize: TraverseFieldsCallback = ({ field, parentPath, ref: incomingRef }) => {
const sanitize: TraverseFieldsCallback = ({ field, ref: incomingRef }) => {
if (!incomingRef || typeof incomingRef !== 'object') {
return
}
const ref = incomingRef as Record<string, unknown>
if (
$inc &&
field.type === 'number' &&
operation === 'write' &&
field.name in ref &&
ref[field.name]
) {
const value = ref[field.name]
if (value && typeof value === 'object' && '$inc' in value && typeof value.$inc === 'number') {
$inc[`${parentPath}${field.name}`] = value.$inc
delete ref[field.name]
}
}
if (field.type === 'date' && operation === 'read' && field.name in ref && ref[field.name]) {
if (config.localization && fieldShouldBeLocalized({ field, parentIsLocalized })) {
const fieldRef = ref[field.name] as Record<string, unknown>

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-postgres",
"version": "3.48.0",
"version": "3.47.0",
"description": "The officially supported Postgres database adapter for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -17,7 +17,6 @@ import {
deleteVersions,
destroy,
find,
findDistinct,
findGlobal,
findGlobalVersions,
findMigrationDir,
@@ -121,7 +120,6 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter>
json: true,
},
fieldConstraints: {},
findDistinct,
generateSchema: createSchemaGenerator({
columnToCodeConverter,
corePackageSuffix: 'pg-core',

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-sqlite",
"version": "3.48.0",
"version": "3.47.0",
"description": "The officially supported SQLite database adapter for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -6,13 +6,13 @@ import type { CountDistinct, SQLiteAdapter } from './types.js'
export const countDistinct: CountDistinct = async function countDistinct(
this: SQLiteAdapter,
{ column, db, joins, tableName, where },
{ db, joins, tableName, where },
) {
// When we don't have any joins - use a simple COUNT(*) query.
if (joins.length === 0) {
const countResult = await db
.select({
count: column ? count(sql`DISTINCT ${column}`) : count(),
count: count(),
})
.from(this.tables[tableName])
.where(where)
@@ -25,7 +25,7 @@ export const countDistinct: CountDistinct = async function countDistinct(
})
.from(this.tables[tableName])
.where(where)
.groupBy(column ?? this.tables[tableName].id)
.groupBy(this.tables[tableName].id)
.limit(1)
.$dynamic()

View File

@@ -18,7 +18,6 @@ import {
deleteVersions,
destroy,
find,
findDistinct,
findGlobal,
findGlobalVersions,
findMigrationDir,
@@ -102,7 +101,6 @@ export function sqliteAdapter(args: Args): DatabaseAdapterObj<SQLiteAdapter> {
json: true,
},
fieldConstraints: {},
findDistinct,
generateSchema: createSchemaGenerator({
columnToCodeConverter,
corePackageSuffix: 'sqlite-core',

View File

@@ -5,7 +5,6 @@ import type { DrizzleConfig, Relation, Relations, SQL } from 'drizzle-orm'
import type { LibSQLDatabase } from 'drizzle-orm/libsql'
import type {
AnySQLiteColumn,
SQLiteColumn,
SQLiteInsertOnConflictDoUpdateConfig,
SQLiteTableWithColumns,
SQLiteTransactionConfig,
@@ -88,7 +87,6 @@ export type GenericTable = SQLiteTableWithColumns<{
export type GenericRelation = Relations<string, Record<string, Relation<string>>>
export type CountDistinct = (args: {
column?: SQLiteColumn<any>
db: LibSQLDatabase
joins: BuildQueryJoinAliases
tableName: string

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-vercel-postgres",
"version": "3.48.0",
"version": "3.47.0",
"description": "Vercel Postgres adapter for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -18,7 +18,6 @@ import {
deleteVersions,
destroy,
find,
findDistinct,
findGlobal,
findGlobalVersions,
findMigrationDir,
@@ -175,7 +174,6 @@ export function vercelPostgresAdapter(args: Args = {}): DatabaseAdapterObj<Verce
dropDatabase,
execute,
find,
findDistinct,
findGlobal,
findGlobalVersions,
readReplicaOptions: args.readReplicas,

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/drizzle",
"version": "3.48.0",
"version": "3.47.0",
"description": "A library of shared functions used by different payload database adapters",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,108 +0,0 @@
import type { FindDistinct, SanitizedCollectionConfig } from 'payload'
import toSnakeCase from 'to-snake-case'
import type { DrizzleAdapter, GenericColumn } from './types.js'
import { buildQuery } from './queries/buildQuery.js'
import { selectDistinct } from './queries/selectDistinct.js'
import { getTransaction } from './utilities/getTransaction.js'
import { DistinctSymbol } from './utilities/rawConstraint.js'
export const findDistinct: FindDistinct = async function (this: DrizzleAdapter, args) {
const db = await getTransaction(this, args.req)
const collectionConfig: SanitizedCollectionConfig =
this.payload.collections[args.collection].config
const page = args.page || 1
const offset = args.limit ? (page - 1) * args.limit : undefined
const tableName = this.tableNameMap.get(toSnakeCase(collectionConfig.slug))
const { joins, orderBy, selectFields, where } = buildQuery({
adapter: this,
fields: collectionConfig.flattenedFields,
locale: args.locale,
sort: args.sort ?? args.field,
tableName,
where: {
and: [
args.where ?? {},
{
[args.field]: {
equals: DistinctSymbol,
},
},
],
},
})
orderBy.pop()
const selectDistinctResult = await selectDistinct({
adapter: this,
db,
forceRun: true,
joins,
query: ({ query }) => {
query = query.orderBy(() => orderBy.map(({ column, order }) => order(column)))
if (args.limit) {
if (offset) {
query = query.offset(offset)
}
query = query.limit(args.limit)
}
return query
},
selectFields: {
_selected: selectFields['_selected'],
...(orderBy[0].column === selectFields['_selected'] ? {} : { _order: orderBy[0].column }),
} as Record<string, GenericColumn>,
tableName,
where,
})
const values = selectDistinctResult.map((each) => ({
[args.field]: (each as Record<string, any>)._selected,
}))
if (args.limit) {
const totalDocs = await this.countDistinct({
column: selectFields['_selected'],
db,
joins,
tableName,
where,
})
const totalPages = Math.ceil(totalDocs / args.limit)
const hasPrevPage = page > 1
const hasNextPage = totalPages > page
const pagingCounter = (page - 1) * args.limit + 1
return {
hasNextPage,
hasPrevPage,
limit: args.limit,
nextPage: hasNextPage ? page + 1 : null,
page,
pagingCounter,
prevPage: hasPrevPage ? page - 1 : null,
totalDocs,
totalPages,
values,
}
}
return {
hasNextPage: false,
hasPrevPage: false,
limit: 0,
page: 1,
pagingCounter: 1,
totalDocs: values.length,
totalPages: 1,
values,
}
}

View File

@@ -12,7 +12,6 @@ export { deleteVersions } from './deleteVersions.js'
export { destroy } from './destroy.js'
export { find } from './find.js'
export { chainMethods } from './find/chainMethods.js'
export { findDistinct } from './findDistinct.js'
export { findGlobal } from './findGlobal.js'
export { findGlobalVersions } from './findGlobalVersions.js'
export { findMigrationDir } from './findMigrationDir.js'

View File

@@ -6,13 +6,13 @@ import type { BasePostgresAdapter, CountDistinct } from './types.js'
export const countDistinct: CountDistinct = async function countDistinct(
this: BasePostgresAdapter,
{ column, db, joins, tableName, where },
{ db, joins, tableName, where },
) {
// When we don't have any joins - use a simple COUNT(*) query.
if (joins.length === 0) {
const countResult = await db
.select({
count: column ? count(sql`DISTINCT ${column}`) : count(),
count: count(),
})
.from(this.tables[tableName])
.where(where)
@@ -26,7 +26,7 @@ export const countDistinct: CountDistinct = async function countDistinct(
})
.from(this.tables[tableName])
.where(where)
.groupBy(column || this.tables[tableName].id)
.groupBy(this.tables[tableName].id)
.limit(1)
.$dynamic()

View File

@@ -20,7 +20,6 @@ import type {
UniqueConstraintBuilder,
} from 'drizzle-orm/pg-core'
import type { PgTableFn } from 'drizzle-orm/pg-core/table'
import type { SQLiteColumn } from 'drizzle-orm/sqlite-core'
import type { Payload, PayloadRequest } from 'payload'
import type { ClientConfig, QueryResult } from 'pg'
@@ -65,7 +64,6 @@ export type GenericRelation = Relations<string, Record<string, Relation<string>>
export type PostgresDB = NodePgDatabase<Record<string, unknown>>
export type CountDistinct = (args: {
column?: PgColumn<any> | SQLiteColumn<any>
db: PostgresDB | TransactionPg
joins: BuildQueryJoinAliases
tableName: string

View File

@@ -10,7 +10,6 @@ import type { DrizzleAdapter, GenericColumn } from '../types.js'
import type { BuildQueryJoinAliases } from './buildQuery.js'
import { getNameFromDrizzleTable } from '../utilities/getNameFromDrizzleTable.js'
import { DistinctSymbol } from '../utilities/rawConstraint.js'
import { buildAndOrConditions } from './buildAndOrConditions.js'
import { getTableColumnFromPath } from './getTableColumnFromPath.js'
import { sanitizeQueryValue } from './sanitizeQueryValue.js'
@@ -109,17 +108,6 @@ export function parseParams({
value: val,
})
const resolvedColumn =
rawColumn ||
(aliasTable && tableName === getNameFromDrizzleTable(table)
? aliasTable[columnName]
: table[columnName])
if (val === DistinctSymbol) {
selectFields['_selected'] = resolvedColumn
break
}
queryConstraints.forEach(({ columnName: col, table: constraintTable, value }) => {
if (typeof value === 'string' && value.indexOf('%') > -1) {
constraints.push(adapter.operators.like(constraintTable[col], value))
@@ -293,6 +281,12 @@ export function parseParams({
break
}
const resolvedColumn =
rawColumn ||
(aliasTable && tableName === getNameFromDrizzleTable(table)
? aliasTable[columnName]
: table[columnName])
if (queryOperator === 'not_equals' && queryValue !== null) {
constraints.push(
or(

View File

@@ -14,7 +14,6 @@ import type { BuildQueryJoinAliases } from './buildQuery.js'
type Args = {
adapter: DrizzleAdapter
db: DrizzleAdapter['drizzle'] | DrizzleTransaction
forceRun?: boolean
joins: BuildQueryJoinAliases
query?: (args: { query: SQLiteSelect }) => SQLiteSelect
selectFields: Record<string, GenericColumn>
@@ -28,14 +27,13 @@ type Args = {
export const selectDistinct = ({
adapter,
db,
forceRun,
joins,
query: queryModifier = ({ query }) => query,
selectFields,
tableName,
where,
}: Args): QueryPromise<{ id: number | string }[] & Record<string, GenericColumn>> => {
if (forceRun || Object.keys(joins).length > 0) {
if (Object.keys(joins).length > 0) {
let query: SQLiteSelect
const table = adapter.tables[tableName]

View File

@@ -8,7 +8,6 @@ import { traverseFields } from './traverseFields.js'
type Args = {
adapter: DrizzleAdapter
data: Record<string, unknown>
enableAtomicWrites?: boolean
fields: FlattenedField[]
parentIsLocalized?: boolean
path?: string
@@ -18,7 +17,6 @@ type Args = {
export const transformForWrite = ({
adapter,
data,
enableAtomicWrites,
fields,
parentIsLocalized,
path = '',
@@ -50,7 +48,6 @@ export const transformForWrite = ({
blocksToDelete: rowToInsert.blocksToDelete,
columnPrefix: '',
data,
enableAtomicWrites,
fieldPrefix: '',
fields,
locales: rowToInsert.locales,

View File

@@ -1,5 +1,6 @@
import type { FlattenedField } from 'payload'
import { sql } from 'drizzle-orm'
import { APIError, type FlattenedField } from 'payload'
import { fieldIsVirtual, fieldShouldBeLocalized } from 'payload/shared'
import toSnakeCase from 'to-snake-case'
@@ -40,7 +41,6 @@ type Args = {
*/
columnPrefix: string
data: Record<string, unknown>
enableAtomicWrites?: boolean
existingLocales?: Record<string, unknown>[]
/**
* A prefix that will retain camel-case formatting, representing prior fields
@@ -87,7 +87,6 @@ export const traverseFields = ({
blocksToDelete,
columnPrefix,
data,
enableAtomicWrites,
existingLocales,
fieldPrefix,
fields,
@@ -269,7 +268,6 @@ export const traverseFields = ({
blocksToDelete,
columnPrefix: `${columnName}_`,
data: localeData as Record<string, unknown>,
enableAtomicWrites,
existingLocales,
fieldPrefix: `${fieldName}_`,
fields: field.flattenedFields,
@@ -555,22 +553,6 @@ export const traverseFields = ({
formattedValue = JSON.stringify(value)
}
if (
field.type === 'number' &&
value &&
typeof value === 'object' &&
'$inc' in value &&
typeof value.$inc === 'number'
) {
if (!enableAtomicWrites) {
throw new APIError(
'The passed data must not contain any nested fields for atomic writes',
)
}
formattedValue = sql.raw(`${columnName} + ${value.$inc}`)
}
if (field.type === 'date') {
if (typeof value === 'number' && !Number.isNaN(value)) {
formattedValue = new Date(value).toISOString()

View File

@@ -89,7 +89,6 @@ export type TransactionPg = PgTransaction<
export type DrizzleTransaction = TransactionPg | TransactionSQLite
export type CountDistinct = (args: {
column?: PgColumn<any> | SQLiteColumn<any>
db: DrizzleTransaction | LibSQLDatabase | PostgresDB
joins: BuildQueryJoinAliases
tableName: string

View File

@@ -1,15 +1,67 @@
import type { LibSQLDatabase } from 'drizzle-orm/libsql'
import type { UpdateOne } from 'payload'
import type { FlattenedField, UpdateOne } from 'payload'
import { eq } from 'drizzle-orm'
import toSnakeCase from 'to-snake-case'
import type { DrizzleAdapter } from './types.js'
import { buildFindManyArgs } from './find/buildFindManyArgs.js'
import { buildQuery } from './queries/buildQuery.js'
import { selectDistinct } from './queries/selectDistinct.js'
import { transform } from './transform/read/index.js'
import { transformForWrite } from './transform/write/index.js'
import { upsertRow } from './upsertRow/index.js'
import { getTransaction } from './utilities/getTransaction.js'
/**
* Checks whether we should use the upsertRow function for the passed data and otherwise use a simple SQL SET call.
* We need to use upsertRow only when the data has arrays, blocks, hasMany select/text/number, localized fields, complex relationships.
*/
const shouldUseUpsertRow = ({
data,
fields,
}: {
data: Record<string, unknown>
fields: FlattenedField[]
}) => {
for (const key in data) {
const value = data[key]
const field = fields.find((each) => each.name === key)
if (!field) {
continue
}
if (
field.type === 'array' ||
field.type === 'blocks' ||
((field.type === 'text' ||
field.type === 'relationship' ||
field.type === 'upload' ||
field.type === 'select' ||
field.type === 'number') &&
field.hasMany) ||
((field.type === 'relationship' || field.type === 'upload') &&
Array.isArray(field.relationTo)) ||
field.localized
) {
return true
}
if (
(field.type === 'group' || field.type === 'tab') &&
value &&
typeof value === 'object' &&
shouldUseUpsertRow({ data: value as Record<string, unknown>, fields: field.flattenedFields })
) {
return true
}
}
return false
}
export const updateOne: UpdateOne = async function updateOne(
this: DrizzleAdapter,
{
@@ -74,23 +126,71 @@ export const updateOne: UpdateOne = async function updateOne(
return null
}
const result = await upsertRow({
id: idToUpdate,
if (!idToUpdate || shouldUseUpsertRow({ data, fields: collection.flattenedFields })) {
const result = await upsertRow({
id: idToUpdate,
adapter: this,
data,
db,
fields: collection.flattenedFields,
ignoreResult: returning === false,
joinQuery,
operation: 'update',
req,
select,
tableName,
})
if (returning === false) {
return null
}
return result
}
const { row } = transformForWrite({
adapter: this,
data,
db,
fields: collection.flattenedFields,
ignoreResult: returning === false,
joinQuery,
operation: 'update',
req,
select,
tableName,
})
const drizzle = db as LibSQLDatabase
await drizzle
.update(this.tables[tableName])
.set(row)
// TODO: we can skip fetching idToUpdate here with using the incoming where
.where(eq(this.tables[tableName].id, idToUpdate))
if (returning === false) {
return null
}
const findManyArgs = buildFindManyArgs({
adapter: this,
depth: 0,
fields: collection.flattenedFields,
joinQuery: false,
select,
tableName,
})
findManyArgs.where = eq(this.tables[tableName].id, idToUpdate)
const doc = await db.query[tableName].findFirst(findManyArgs)
// //////////////////////////////////
// TRANSFORM DATA
// //////////////////////////////////
const result = transform({
adapter: this,
config: this.payload.config,
data: doc,
fields: collection.flattenedFields,
joinQuery: false,
tableName,
})
return result
}

View File

@@ -1,4 +1,3 @@
import type { LibSQLDatabase } from 'drizzle-orm/libsql'
import type { TypeWithID } from 'payload'
import { eq } from 'drizzle-orm'
@@ -13,14 +12,13 @@ import { transformForWrite } from '../transform/write/index.js'
import { deleteExistingArrayRows } from './deleteExistingArrayRows.js'
import { deleteExistingRowsByPath } from './deleteExistingRowsByPath.js'
import { insertArrays } from './insertArrays.js'
import { shouldUseOptimizedUpsertRow } from './shouldUseOptimizedUpsertRow.js'
/**
* If `id` is provided, it will update the row with that ID.
* If `where` is provided, it will update the row that matches the `where`
* If neither `id` nor `where` is provided, it will create a new row.
*
* adapter function replaces the entire row and does not support partial updates.
* This function replaces the entire row and does not support partial updates.
*/
export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>({
id,
@@ -41,446 +39,428 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
upsertTarget,
where,
}: Args): Promise<T> => {
let insertedRow: Record<string, unknown> = { id }
if (id && shouldUseOptimizedUpsertRow({ data, fields })) {
const { row } = transformForWrite({
adapter,
data,
enableAtomicWrites: true,
fields,
tableName,
})
// Split out the incoming data into the corresponding:
// base row, locales, relationships, blocks, and arrays
const rowToInsert = transformForWrite({
adapter,
data,
fields,
path,
tableName,
})
const drizzle = db as LibSQLDatabase
// First, we insert the main row
let insertedRow: Record<string, unknown>
await drizzle
.update(adapter.tables[tableName])
.set(row)
// TODO: we can skip fetching idToUpdate here with using the incoming where
.where(eq(adapter.tables[tableName].id, id))
} else {
// Split out the incoming data into the corresponding:
// base row, locales, relationships, blocks, and arrays
const rowToInsert = transformForWrite({
adapter,
data,
enableAtomicWrites: false,
fields,
path,
tableName,
})
try {
if (operation === 'update') {
const target = upsertTarget || adapter.tables[tableName].id
// First, we insert the main row
try {
if (operation === 'update') {
const target = upsertTarget || adapter.tables[tableName].id
if (id) {
rowToInsert.row.id = id
;[insertedRow] = await adapter.insert({
db,
onConflictDoUpdate: { set: rowToInsert.row, target },
tableName,
values: rowToInsert.row,
})
} else {
;[insertedRow] = await adapter.insert({
db,
onConflictDoUpdate: { set: rowToInsert.row, target, where },
tableName,
values: rowToInsert.row,
})
}
} else {
if (adapter.allowIDOnCreate && data.id) {
rowToInsert.row.id = data.id
}
if (id) {
rowToInsert.row.id = id
;[insertedRow] = await adapter.insert({
db,
onConflictDoUpdate: { set: rowToInsert.row, target },
tableName,
values: rowToInsert.row,
})
} else {
;[insertedRow] = await adapter.insert({
db,
onConflictDoUpdate: { set: rowToInsert.row, target, where },
tableName,
values: rowToInsert.row,
})
}
const localesToInsert: Record<string, unknown>[] = []
const relationsToInsert: Record<string, unknown>[] = []
const textsToInsert: Record<string, unknown>[] = []
const numbersToInsert: Record<string, unknown>[] = []
const blocksToInsert: { [blockType: string]: BlockRowToInsert[] } = {}
const selectsToInsert: { [selectTableName: string]: Record<string, unknown>[] } = {}
// If there are locale rows with data, add the parent and locale to each
if (Object.keys(rowToInsert.locales).length > 0) {
Object.entries(rowToInsert.locales).forEach(([locale, localeRow]) => {
localeRow._parentID = insertedRow.id
localeRow._locale = locale
localesToInsert.push(localeRow)
})
} else {
if (adapter.allowIDOnCreate && data.id) {
rowToInsert.row.id = data.id
}
;[insertedRow] = await adapter.insert({
db,
tableName,
values: rowToInsert.row,
})
}
// If there are relationships, add parent to each
if (rowToInsert.relationships.length > 0) {
rowToInsert.relationships.forEach((relation) => {
relation.parent = insertedRow.id
relationsToInsert.push(relation)
})
}
const localesToInsert: Record<string, unknown>[] = []
const relationsToInsert: Record<string, unknown>[] = []
const textsToInsert: Record<string, unknown>[] = []
const numbersToInsert: Record<string, unknown>[] = []
const blocksToInsert: { [blockType: string]: BlockRowToInsert[] } = {}
const selectsToInsert: { [selectTableName: string]: Record<string, unknown>[] } = {}
// If there are texts, add parent to each
if (rowToInsert.texts.length > 0) {
rowToInsert.texts.forEach((textRow) => {
textRow.parent = insertedRow.id
textsToInsert.push(textRow)
})
}
// If there are locale rows with data, add the parent and locale to each
if (Object.keys(rowToInsert.locales).length > 0) {
Object.entries(rowToInsert.locales).forEach(([locale, localeRow]) => {
localeRow._parentID = insertedRow.id
localeRow._locale = locale
localesToInsert.push(localeRow)
})
}
// If there are numbers, add parent to each
if (rowToInsert.numbers.length > 0) {
rowToInsert.numbers.forEach((numberRow) => {
numberRow.parent = insertedRow.id
numbersToInsert.push(numberRow)
})
}
// If there are relationships, add parent to each
if (rowToInsert.relationships.length > 0) {
rowToInsert.relationships.forEach((relation) => {
relation.parent = insertedRow.id
relationsToInsert.push(relation)
})
}
// If there are selects, add parent to each, and then
// store by table name and rows
if (Object.keys(rowToInsert.selects).length > 0) {
Object.entries(rowToInsert.selects).forEach(([selectTableName, selectRows]) => {
selectsToInsert[selectTableName] = []
// If there are texts, add parent to each
if (rowToInsert.texts.length > 0) {
rowToInsert.texts.forEach((textRow) => {
textRow.parent = insertedRow.id
textsToInsert.push(textRow)
})
}
selectRows.forEach((row) => {
if (typeof row.parent === 'undefined') {
row.parent = insertedRow.id
}
// If there are numbers, add parent to each
if (rowToInsert.numbers.length > 0) {
rowToInsert.numbers.forEach((numberRow) => {
numberRow.parent = insertedRow.id
numbersToInsert.push(numberRow)
})
}
selectsToInsert[selectTableName].push(row)
})
})
}
// If there are selects, add parent to each, and then
// store by table name and rows
if (Object.keys(rowToInsert.selects).length > 0) {
Object.entries(rowToInsert.selects).forEach(([selectTableName, selectRows]) => {
selectsToInsert[selectTableName] = []
// If there are blocks, add parent to each, and then
// store by table name and rows
Object.keys(rowToInsert.blocks).forEach((tableName) => {
rowToInsert.blocks[tableName].forEach((blockRow) => {
blockRow.row._parentID = insertedRow.id
if (!blocksToInsert[tableName]) {
blocksToInsert[tableName] = []
selectRows.forEach((row) => {
if (typeof row.parent === 'undefined') {
row.parent = insertedRow.id
}
if (blockRow.row.uuid) {
delete blockRow.row.uuid
}
blocksToInsert[tableName].push(blockRow)
selectsToInsert[selectTableName].push(row)
})
})
}
// //////////////////////////////////
// INSERT LOCALES
// //////////////////////////////////
if (localesToInsert.length > 0) {
const localeTableName = `${tableName}${adapter.localesSuffix}`
const localeTable = adapter.tables[`${tableName}${adapter.localesSuffix}`]
if (operation === 'update') {
await adapter.deleteWhere({
db,
tableName: localeTableName,
where: eq(localeTable._parentID, insertedRow.id),
})
// If there are blocks, add parent to each, and then
// store by table name and rows
Object.keys(rowToInsert.blocks).forEach((tableName) => {
rowToInsert.blocks[tableName].forEach((blockRow) => {
blockRow.row._parentID = insertedRow.id
if (!blocksToInsert[tableName]) {
blocksToInsert[tableName] = []
}
if (blockRow.row.uuid) {
delete blockRow.row.uuid
}
blocksToInsert[tableName].push(blockRow)
})
})
await adapter.insert({
// //////////////////////////////////
// INSERT LOCALES
// //////////////////////////////////
if (localesToInsert.length > 0) {
const localeTableName = `${tableName}${adapter.localesSuffix}`
const localeTable = adapter.tables[`${tableName}${adapter.localesSuffix}`]
if (operation === 'update') {
await adapter.deleteWhere({
db,
tableName: localeTableName,
values: localesToInsert,
where: eq(localeTable._parentID, insertedRow.id),
})
}
// //////////////////////////////////
// INSERT RELATIONSHIPS
// //////////////////////////////////
await adapter.insert({
db,
tableName: localeTableName,
values: localesToInsert,
})
}
const relationshipsTableName = `${tableName}${adapter.relationshipsSuffix}`
// //////////////////////////////////
// INSERT RELATIONSHIPS
// //////////////////////////////////
if (operation === 'update') {
await deleteExistingRowsByPath({
adapter,
db,
localeColumnName: 'locale',
parentColumnName: 'parent',
parentID: insertedRow.id,
pathColumnName: 'path',
rows: [...relationsToInsert, ...rowToInsert.relationshipsToDelete],
tableName: relationshipsTableName,
})
}
const relationshipsTableName = `${tableName}${adapter.relationshipsSuffix}`
if (relationsToInsert.length > 0) {
await adapter.insert({
db,
tableName: relationshipsTableName,
values: relationsToInsert,
})
}
if (operation === 'update') {
await deleteExistingRowsByPath({
adapter,
db,
localeColumnName: 'locale',
parentColumnName: 'parent',
parentID: insertedRow.id,
pathColumnName: 'path',
rows: [...relationsToInsert, ...rowToInsert.relationshipsToDelete],
tableName: relationshipsTableName,
})
}
// //////////////////////////////////
// INSERT hasMany TEXTS
// //////////////////////////////////
if (relationsToInsert.length > 0) {
await adapter.insert({
db,
tableName: relationshipsTableName,
values: relationsToInsert,
})
}
const textsTableName = `${tableName}_texts`
// //////////////////////////////////
// INSERT hasMany TEXTS
// //////////////////////////////////
if (operation === 'update') {
await deleteExistingRowsByPath({
adapter,
db,
localeColumnName: 'locale',
parentColumnName: 'parent',
parentID: insertedRow.id,
pathColumnName: 'path',
rows: [...textsToInsert, ...rowToInsert.textsToDelete],
tableName: textsTableName,
})
}
const textsTableName = `${tableName}_texts`
if (textsToInsert.length > 0) {
await adapter.insert({
db,
tableName: textsTableName,
values: textsToInsert,
})
}
if (operation === 'update') {
await deleteExistingRowsByPath({
adapter,
db,
localeColumnName: 'locale',
parentColumnName: 'parent',
parentID: insertedRow.id,
pathColumnName: 'path',
rows: [...textsToInsert, ...rowToInsert.textsToDelete],
tableName: textsTableName,
})
}
// //////////////////////////////////
// INSERT hasMany NUMBERS
// //////////////////////////////////
if (textsToInsert.length > 0) {
await adapter.insert({
db,
tableName: textsTableName,
values: textsToInsert,
})
}
const numbersTableName = `${tableName}_numbers`
// //////////////////////////////////
// INSERT hasMany NUMBERS
// //////////////////////////////////
if (operation === 'update') {
await deleteExistingRowsByPath({
adapter,
db,
localeColumnName: 'locale',
parentColumnName: 'parent',
parentID: insertedRow.id,
pathColumnName: 'path',
rows: [...numbersToInsert, ...rowToInsert.numbersToDelete],
tableName: numbersTableName,
})
}
const numbersTableName = `${tableName}_numbers`
if (numbersToInsert.length > 0) {
await adapter.insert({
db,
tableName: numbersTableName,
values: numbersToInsert,
})
}
if (operation === 'update') {
await deleteExistingRowsByPath({
adapter,
db,
localeColumnName: 'locale',
parentColumnName: 'parent',
parentID: insertedRow.id,
pathColumnName: 'path',
rows: [...numbersToInsert, ...rowToInsert.numbersToDelete],
tableName: numbersTableName,
})
}
// //////////////////////////////////
// INSERT BLOCKS
// //////////////////////////////////
if (numbersToInsert.length > 0) {
await adapter.insert({
db,
tableName: numbersTableName,
values: numbersToInsert,
})
}
const insertedBlockRows: Record<string, Record<string, unknown>[]> = {}
// //////////////////////////////////
// INSERT BLOCKS
// //////////////////////////////////
if (operation === 'update') {
for (const tableName of rowToInsert.blocksToDelete) {
const blockTable = adapter.tables[tableName]
await adapter.deleteWhere({
db,
tableName,
where: eq(blockTable._parentID, insertedRow.id),
})
}
}
const insertedBlockRows: Record<string, Record<string, unknown>[]> = {}
// When versions are enabled, adapter is used to track mapping between blocks/arrays ObjectID to their numeric generated representation, then we use it for nested to arrays/blocks select hasMany in versions.
const arraysBlocksUUIDMap: Record<string, number | string> = {}
for (const [tableName, blockRows] of Object.entries(blocksToInsert)) {
insertedBlockRows[tableName] = await adapter.insert({
if (operation === 'update') {
for (const tableName of rowToInsert.blocksToDelete) {
const blockTable = adapter.tables[tableName]
await adapter.deleteWhere({
db,
tableName,
values: blockRows.map(({ row }) => row),
})
insertedBlockRows[tableName].forEach((row, i) => {
blockRows[i].row = row
if (
typeof row._uuid === 'string' &&
(typeof row.id === 'string' || typeof row.id === 'number')
) {
arraysBlocksUUIDMap[row._uuid] = row.id
}
})
const blockLocaleIndexMap: number[] = []
const blockLocaleRowsToInsert = blockRows.reduce((acc, blockRow, i) => {
if (Object.entries(blockRow.locales).length > 0) {
Object.entries(blockRow.locales).forEach(([blockLocale, blockLocaleData]) => {
if (Object.keys(blockLocaleData).length > 0) {
blockLocaleData._parentID = blockRow.row.id
blockLocaleData._locale = blockLocale
acc.push(blockLocaleData)
blockLocaleIndexMap.push(i)
}
})
}
return acc
}, [])
if (blockLocaleRowsToInsert.length > 0) {
await adapter.insert({
db,
tableName: `${tableName}${adapter.localesSuffix}`,
values: blockLocaleRowsToInsert,
})
}
await insertArrays({
adapter,
arrays: blockRows.map(({ arrays }) => arrays),
db,
parentRows: insertedBlockRows[tableName],
uuidMap: arraysBlocksUUIDMap,
where: eq(blockTable._parentID, insertedRow.id),
})
}
}
// //////////////////////////////////
// INSERT ARRAYS RECURSIVELY
// //////////////////////////////////
// When versions are enabled, this is used to track mapping between blocks/arrays ObjectID to their numeric generated representation, then we use it for nested to arrays/blocks select hasMany in versions.
const arraysBlocksUUIDMap: Record<string, number | string> = {}
if (operation === 'update') {
for (const arrayTableName of Object.keys(rowToInsert.arrays)) {
await deleteExistingArrayRows({
adapter,
db,
parentID: insertedRow.id,
tableName: arrayTableName,
for (const [tableName, blockRows] of Object.entries(blocksToInsert)) {
insertedBlockRows[tableName] = await adapter.insert({
db,
tableName,
values: blockRows.map(({ row }) => row),
})
insertedBlockRows[tableName].forEach((row, i) => {
blockRows[i].row = row
if (
typeof row._uuid === 'string' &&
(typeof row.id === 'string' || typeof row.id === 'number')
) {
arraysBlocksUUIDMap[row._uuid] = row.id
}
})
const blockLocaleIndexMap: number[] = []
const blockLocaleRowsToInsert = blockRows.reduce((acc, blockRow, i) => {
if (Object.entries(blockRow.locales).length > 0) {
Object.entries(blockRow.locales).forEach(([blockLocale, blockLocaleData]) => {
if (Object.keys(blockLocaleData).length > 0) {
blockLocaleData._parentID = blockRow.row.id
blockLocaleData._locale = blockLocale
acc.push(blockLocaleData)
blockLocaleIndexMap.push(i)
}
})
}
return acc
}, [])
if (blockLocaleRowsToInsert.length > 0) {
await adapter.insert({
db,
tableName: `${tableName}${adapter.localesSuffix}`,
values: blockLocaleRowsToInsert,
})
}
await insertArrays({
adapter,
arrays: [rowToInsert.arrays],
arrays: blockRows.map(({ arrays }) => arrays),
db,
parentRows: [insertedRow],
parentRows: insertedBlockRows[tableName],
uuidMap: arraysBlocksUUIDMap,
})
}
// //////////////////////////////////
// INSERT hasMany SELECTS
// //////////////////////////////////
// //////////////////////////////////
// INSERT ARRAYS RECURSIVELY
// //////////////////////////////////
for (const [selectTableName, tableRows] of Object.entries(selectsToInsert)) {
const selectTable = adapter.tables[selectTableName]
if (operation === 'update') {
await adapter.deleteWhere({
db,
tableName: selectTableName,
where: eq(selectTable.parent, insertedRow.id),
})
}
if (operation === 'update') {
for (const arrayTableName of Object.keys(rowToInsert.arrays)) {
await deleteExistingArrayRows({
adapter,
db,
parentID: insertedRow.id,
tableName: arrayTableName,
})
}
}
if (Object.keys(arraysBlocksUUIDMap).length > 0) {
tableRows.forEach((row: any) => {
if (row.parent in arraysBlocksUUIDMap) {
row.parent = arraysBlocksUUIDMap[row.parent]
}
})
}
await insertArrays({
adapter,
arrays: [rowToInsert.arrays],
db,
parentRows: [insertedRow],
uuidMap: arraysBlocksUUIDMap,
})
if (tableRows.length) {
await adapter.insert({
db,
tableName: selectTableName,
values: tableRows,
})
}
// //////////////////////////////////
// INSERT hasMany SELECTS
// //////////////////////////////////
for (const [selectTableName, tableRows] of Object.entries(selectsToInsert)) {
const selectTable = adapter.tables[selectTableName]
if (operation === 'update') {
await adapter.deleteWhere({
db,
tableName: selectTableName,
where: eq(selectTable.parent, insertedRow.id),
})
}
// //////////////////////////////////
// Error Handling
// //////////////////////////////////
} catch (caughtError) {
// Unique constraint violation error
// '23505' is the code for PostgreSQL, and 'SQLITE_CONSTRAINT_UNIQUE' is for SQLite
let error = caughtError
if (typeof caughtError === 'object' && 'cause' in caughtError) {
error = caughtError.cause
if (Object.keys(arraysBlocksUUIDMap).length > 0) {
tableRows.forEach((row: any) => {
if (row.parent in arraysBlocksUUIDMap) {
row.parent = arraysBlocksUUIDMap[row.parent]
}
})
}
if (error.code === '23505' || error.code === 'SQLITE_CONSTRAINT_UNIQUE') {
let fieldName: null | string = null
// We need to try and find the right constraint for the field but if we can't we fallback to a generic message
if (error.code === '23505') {
// For PostgreSQL, we can try to extract the field name from the error constraint
if (adapter.fieldConstraints?.[tableName]?.[error.constraint]) {
fieldName = adapter.fieldConstraints[tableName]?.[error.constraint]
} else {
const replacement = `${tableName}_`
if (tableRows.length) {
await adapter.insert({
db,
tableName: selectTableName,
values: tableRows,
})
}
}
if (error.constraint.includes(replacement)) {
const replacedConstraint = error.constraint.replace(replacement, '')
// //////////////////////////////////
// Error Handling
// //////////////////////////////////
} catch (caughtError) {
// Unique constraint violation error
// '23505' is the code for PostgreSQL, and 'SQLITE_CONSTRAINT_UNIQUE' is for SQLite
if (replacedConstraint && adapter.fieldConstraints[tableName]?.[replacedConstraint]) {
fieldName = adapter.fieldConstraints[tableName][replacedConstraint]
}
let error = caughtError
if (typeof caughtError === 'object' && 'cause' in caughtError) {
error = caughtError.cause
}
if (error.code === '23505' || error.code === 'SQLITE_CONSTRAINT_UNIQUE') {
let fieldName: null | string = null
// We need to try and find the right constraint for the field but if we can't we fallback to a generic message
if (error.code === '23505') {
// For PostgreSQL, we can try to extract the field name from the error constraint
if (adapter.fieldConstraints?.[tableName]?.[error.constraint]) {
fieldName = adapter.fieldConstraints[tableName]?.[error.constraint]
} else {
const replacement = `${tableName}_`
if (error.constraint.includes(replacement)) {
const replacedConstraint = error.constraint.replace(replacement, '')
if (replacedConstraint && adapter.fieldConstraints[tableName]?.[replacedConstraint]) {
fieldName = adapter.fieldConstraints[tableName][replacedConstraint]
}
}
}
if (!fieldName) {
// Last case scenario we extract the key and value from the detail on the error
const detail = error.detail
const regex = /Key \(([^)]+)\)=\(([^)]+)\)/
const match: string[] = detail.match(regex)
if (match && match[1]) {
const key = match[1]
fieldName = key
}
}
} else if (error.code === 'SQLITE_CONSTRAINT_UNIQUE') {
/**
* For SQLite, we can try to extract the field name from the error message
* The message typically looks like:
* "UNIQUE constraint failed: table_name.field_name"
*/
const regex = /UNIQUE constraint failed: ([^.]+)\.([^.]+)/
const match: string[] = error.message.match(regex)
if (match && match[2]) {
if (adapter.fieldConstraints[tableName]) {
fieldName = adapter.fieldConstraints[tableName][`${match[2]}_idx`]
}
if (!fieldName) {
// Last case scenario we extract the key and value from the detail on the error
const detail = error.detail
const regex = /Key \(([^)]+)\)=\(([^)]+)\)/
const match: string[] = detail.match(regex)
if (match && match[1]) {
const key = match[1]
fieldName = key
}
}
} else if (error.code === 'SQLITE_CONSTRAINT_UNIQUE') {
/**
* For SQLite, we can try to extract the field name from the error message
* The message typically looks like:
* "UNIQUE constraint failed: table_name.field_name"
*/
const regex = /UNIQUE constraint failed: ([^.]+)\.([^.]+)/
const match: string[] = error.message.match(regex)
if (match && match[2]) {
if (adapter.fieldConstraints[tableName]) {
fieldName = adapter.fieldConstraints[tableName][`${match[2]}_idx`]
}
if (!fieldName) {
fieldName = match[2]
}
fieldName = match[2]
}
}
throw new ValidationError(
{
id,
errors: [
{
message: req?.t ? req.t('error:valueMustBeUnique') : 'Value must be unique',
path: fieldName,
},
],
req,
},
req?.t,
)
} else {
throw error
}
throw new ValidationError(
{
id,
errors: [
{
message: req?.t ? req.t('error:valueMustBeUnique') : 'Value must be unique',
path: fieldName,
},
],
req,
},
req?.t,
)
} else {
throw error
}
}

View File

@@ -1,52 +0,0 @@
import type { FlattenedField } from 'payload'
/**
* Checks whether we should use the upsertRow function for the passed data and otherwise use a simple SQL SET call.
* We need to use upsertRow only when the data has arrays, blocks, hasMany select/text/number, localized fields, complex relationships.
*/
export const shouldUseOptimizedUpsertRow = ({
data,
fields,
}: {
data: Record<string, unknown>
fields: FlattenedField[]
}) => {
for (const key in data) {
const value = data[key]
const field = fields.find((each) => each.name === key)
if (!field) {
continue
}
if (
field.type === 'array' ||
field.type === 'blocks' ||
((field.type === 'text' ||
field.type === 'relationship' ||
field.type === 'upload' ||
field.type === 'select' ||
field.type === 'number') &&
field.hasMany) ||
((field.type === 'relationship' || field.type === 'upload') &&
Array.isArray(field.relationTo)) ||
field.localized
) {
return false
}
if (
(field.type === 'group' || field.type === 'tab') &&
value &&
typeof value === 'object' &&
!shouldUseOptimizedUpsertRow({
data: value as Record<string, unknown>,
fields: field.flattenedFields,
})
) {
return false
}
}
return true
}

View File

@@ -1,7 +1,5 @@
const RawConstraintSymbol = Symbol('RawConstraint')
export const DistinctSymbol = Symbol('DistinctSymbol')
/**
* You can use this to inject a raw query to where
*/

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/email-nodemailer",
"version": "3.48.0",
"version": "3.47.0",
"description": "Payload Nodemailer Email Adapter",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/email-resend",
"version": "3.48.0",
"version": "3.47.0",
"description": "Payload Resend Email Adapter",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/graphql",
"version": "3.48.0",
"version": "3.47.0",
"homepage": "https://payloadcms.com",
"repository": {
"type": "git",

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/live-preview-react",
"version": "3.48.0",
"version": "3.47.0",
"description": "The official React SDK for Payload Live Preview",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/live-preview-vue",
"version": "3.48.0",
"version": "3.47.0",
"description": "The official Vue SDK for Payload Live Preview",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/live-preview",
"version": "3.48.0",
"version": "3.47.0",
"description": "The official live preview JavaScript SDK for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/next",
"version": "3.48.0",
"version": "3.47.0",
"homepage": "https://payloadcms.com",
"repository": {
"type": "git",

View File

@@ -27,7 +27,7 @@ export async function login({ collection, config, email, password, username }: L
token?: string
user: any
}> {
const payload = await getPayload({ config, cron: true })
const payload = await getPayload({ config })
const authConfig = payload.collections[collection]?.config.auth

View File

@@ -14,7 +14,7 @@ export async function logout({
allSessions?: boolean
config: Promise<SanitizedConfig> | SanitizedConfig
}) {
const payload = await getPayload({ config, cron: true })
const payload = await getPayload({ config })
const headers = await nextHeaders()
const authResult = await payload.auth({ headers })

View File

@@ -9,7 +9,7 @@ import { getExistingAuthToken } from '../utilities/getExistingAuthToken.js'
import { setPayloadAuthCookie } from '../utilities/setPayloadAuthCookie.js'
export async function refresh({ config }: { config: any }) {
const payload = await getPayload({ config, cron: true })
const payload = await getPayload({ config })
const headers = await nextHeaders()
const result = await payload.auth({ headers })

View File

@@ -66,7 +66,7 @@ export const initReq = async function ({
const partialResult = await partialReqCache.get(async () => {
const config = await configPromise
const payload = await getPayload({ config, cron: true, importMap })
const payload = await getPayload({ config, importMap })
const languageCode = getRequestLanguage({
config,
cookies,

View File

@@ -58,45 +58,20 @@ export const buildBrowseByFolderView = async (
throw new Error('not-found')
}
const foldersSlug = config.folders.slug
/**
* All visiible folder enabled collection slugs that the user has read permissions for.
*/
const allowReadCollectionSlugs = browseByFolderSlugsFromArgs.filter(
const browseByFolderSlugs = browseByFolderSlugsFromArgs.filter(
(collectionSlug) =>
permissions?.collections?.[collectionSlug]?.read &&
visibleEntities.collections.includes(collectionSlug),
)
const query =
queryFromArgs ||
((queryFromReq
? {
...queryFromReq,
relationTo:
typeof queryFromReq?.relationTo === 'string'
? JSON.parse(queryFromReq.relationTo)
: undefined,
}
: {}) as ListQuery)
/**
* If a folderID is provided and the relationTo query param exists,
* we filter the collection slugs to only those that are allowed to be read.
*
* If no folderID is provided, only folders should be active and displayed (the root view).
*/
let collectionsToDisplay: string[] = []
if (folderID && Array.isArray(query?.relationTo)) {
collectionsToDisplay = query.relationTo.filter(
(slug) => allowReadCollectionSlugs.includes(slug) || slug === foldersSlug,
)
} else if (folderID) {
collectionsToDisplay = [...allowReadCollectionSlugs, foldersSlug]
} else {
collectionsToDisplay = [foldersSlug]
}
const query = queryFromArgs || queryFromReq
const activeCollectionFolderSlugs: string[] =
Array.isArray(query?.relationTo) && query.relationTo.length
? query.relationTo.filter(
(slug) =>
browseByFolderSlugs.includes(slug) || (config.folders && slug === config.folders.slug),
)
: [...browseByFolderSlugs, config.folders.slug]
const {
routes: { admin: adminRoute },
@@ -118,15 +93,14 @@ export const buildBrowseByFolderView = async (
},
})
const sortPreference: FolderSortKeys = browseByFolderPreferences?.sort || 'name'
const sortPreference: FolderSortKeys = browseByFolderPreferences?.sort || '_folderOrDocumentTitle'
const viewPreference = browseByFolderPreferences?.viewPreference || 'grid'
const { breadcrumbs, documents, folderAssignedCollections, FolderResultsComponent, subfolders } =
const { breadcrumbs, documents, FolderResultsComponent, subfolders } =
await getFolderResultsComponentAndData({
browseByFolder: true,
collectionsToDisplay,
activeCollectionSlugs: activeCollectionFolderSlugs,
browseByFolder: false,
displayAs: viewPreference,
folderAssignedCollections: collectionsToDisplay.filter((slug) => slug !== foldersSlug) || [],
folderID,
req: initPageResult.req,
sort: sortPreference,
@@ -168,33 +142,10 @@ export const buildBrowseByFolderView = async (
// serverProps,
// })
// Filter down allCollectionFolderSlugs by the ones the current folder is assingned to
const allAvailableCollectionSlugs =
folderID && Array.isArray(folderAssignedCollections) && folderAssignedCollections.length
? allowReadCollectionSlugs.filter((slug) => folderAssignedCollections.includes(slug))
: allowReadCollectionSlugs
// Filter down activeCollectionFolderSlugs by the ones the current folder is assingned to
const availableActiveCollectionFolderSlugs = collectionsToDisplay.filter((slug) => {
if (slug === foldersSlug) {
return permissions?.collections?.[foldersSlug]?.read
} else {
return !folderAssignedCollections || folderAssignedCollections.includes(slug)
}
})
// Documents cannot be created without a parent folder in this view
const allowCreateCollectionSlugs = (
resolvedFolderID ? [foldersSlug, ...allAvailableCollectionSlugs] : [foldersSlug]
).filter((collectionSlug) => {
if (collectionSlug === foldersSlug) {
return permissions?.collections?.[foldersSlug]?.create
}
return (
permissions?.collections?.[collectionSlug]?.create &&
visibleEntities.collections.includes(collectionSlug)
)
})
// documents cannot be created without a parent folder in this view
const allowCreateCollectionSlugs = resolvedFolderID
? [config.folders.slug, ...browseByFolderSlugs]
: [config.folders.slug]
return {
View: (
@@ -203,8 +154,8 @@ export const buildBrowseByFolderView = async (
{RenderServerComponent({
clientProps: {
// ...folderViewSlots,
activeCollectionFolderSlugs: availableActiveCollectionFolderSlugs,
allCollectionFolderSlugs: allAvailableCollectionSlugs,
activeCollectionFolderSlugs,
allCollectionFolderSlugs: browseByFolderSlugs,
allowCreateCollectionSlugs,
baseFolderPath: `/browse-by-folder`,
breadcrumbs,
@@ -212,7 +163,6 @@ export const buildBrowseByFolderView = async (
disableBulkEdit,
documents,
enableRowSelections,
folderAssignedCollections,
folderFieldName: config.folders.fieldName,
folderID: resolvedFolderID || null,
FolderResultsComponent,

View File

@@ -97,28 +97,23 @@ export const buildCollectionFolderView = async (
},
})
const sortPreference: FolderSortKeys = collectionFolderPreferences?.sort || 'name'
const sortPreference: FolderSortKeys =
collectionFolderPreferences?.sort || '_folderOrDocumentTitle'
const viewPreference = collectionFolderPreferences?.viewPreference || 'grid'
const {
routes: { admin: adminRoute },
} = config
const {
breadcrumbs,
documents,
folderAssignedCollections,
FolderResultsComponent,
subfolders,
} = await getFolderResultsComponentAndData({
browseByFolder: false,
collectionsToDisplay: [config.folders.slug, collectionSlug],
displayAs: viewPreference,
folderAssignedCollections: [collectionSlug],
folderID,
req: initPageResult.req,
sort: sortPreference,
})
const { breadcrumbs, documents, FolderResultsComponent, subfolders } =
await getFolderResultsComponentAndData({
activeCollectionSlugs: [config.folders.slug, collectionSlug],
browseByFolder: false,
displayAs: viewPreference,
folderID,
req: initPageResult.req,
sort: sortPreference,
})
const resolvedFolderID = breadcrumbs[breadcrumbs.length - 1]?.id
@@ -187,7 +182,6 @@ export const buildCollectionFolderView = async (
disableBulkEdit,
documents,
enableRowSelections,
folderAssignedCollections,
folderFieldName: config.folders.fieldName,
folderID: resolvedFolderID || null,
FolderResultsComponent,

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/payload-cloud",
"version": "3.48.0",
"version": "3.47.0",
"description": "The official Payload Cloud plugin",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "payload",
"version": "3.48.0",
"version": "3.47.0",
"description": "Node, React, Headless CMS and Application Framework built on Next.js",
"keywords": [
"admin panel",

View File

@@ -1,7 +1,7 @@
import type { ImportMap } from '../../bin/generateImportMap/index.js'
import type { SanitizedConfig } from '../../config/types.js'
import type { PaginatedDocs } from '../../database/types.js'
import type { CollectionSlug, ColumnPreference, FolderSortKeys } from '../../index.js'
import type { CollectionSlug, ColumnPreference } from '../../index.js'
import type { PayloadRequest, Sort, Where } from '../../types/index.js'
import type { ColumnsFromURL } from '../../utilities/transformColumnPreferences.js'
@@ -78,36 +78,10 @@ export type BuildCollectionFolderViewResult = {
}
export type GetFolderResultsComponentAndDataArgs = {
/**
* If true and no folderID is provided, only folders will be returned.
* If false, the results will include documents from the active collections.
*/
activeCollectionSlugs: CollectionSlug[]
browseByFolder: boolean
/**
* Used to filter document types to include in the results/display.
*
* i.e. ['folders', 'posts'] will only include folders and posts in the results.
*
* collectionsToQuery?
*/
collectionsToDisplay: CollectionSlug[]
/**
* Used to determine how the results should be displayed.
*/
displayAs: 'grid' | 'list'
/**
* Used to filter folders by the collections they are assigned to.
*
* i.e. ['posts'] will only include folders that are assigned to the posts collections.
*/
folderAssignedCollections: CollectionSlug[]
/**
* The ID of the folder to filter results by.
*/
folderID: number | string | undefined
req: PayloadRequest
/**
* The sort order for the results.
*/
sort: FolderSortKeys
sort: string
}

View File

@@ -30,7 +30,6 @@ export type FolderListViewClientProps = {
disableBulkEdit?: boolean
documents: FolderOrDocument[]
enableRowSelections?: boolean
folderAssignedCollections?: SanitizedCollectionConfig['slug'][]
folderFieldName: string
folderID: null | number | string
FolderResultsComponent: React.ReactNode

View File

@@ -107,7 +107,7 @@ export const bin = async () => {
}
if (script === 'jobs:run') {
const payload = await getPayload({ config }) // Do not setup crons here - this bin script can set up its own crons
const payload = await getPayload({ config })
const limit = args.limit ? parseInt(args.limit, 10) : undefined
const queue = args.queue ? args.queue : undefined
const allQueues = !!args.allQueues

View File

@@ -82,7 +82,6 @@ export type HookOperationType =
| 'forgotPassword'
| 'login'
| 'read'
| 'readDistinct'
| 'refresh'
| 'resetPassword'
| 'update'

View File

@@ -1,46 +0,0 @@
import { status as httpStatus } from 'http-status'
import type { PayloadHandler } from '../../config/types.js'
import type { Where } from '../../types/index.js'
import { APIError } from '../../errors/APIError.js'
import { getRequestCollection } from '../../utilities/getRequestEntity.js'
import { headersWithCors } from '../../utilities/headersWithCors.js'
import { isNumber } from '../../utilities/isNumber.js'
import { findDistinctOperation } from '../operations/findDistinct.js'
export const findDistinctHandler: PayloadHandler = async (req) => {
const collection = getRequestCollection(req)
const { depth, field, limit, page, sort, where } = req.query as {
depth?: string
field?: string
limit?: string
page?: string
sort?: string
sortOrder?: string
where?: Where
}
if (!field) {
throw new APIError('field must be specified', httpStatus.BAD_REQUEST)
}
const result = await findDistinctOperation({
collection,
depth: isNumber(depth) ? Number(depth) : undefined,
field,
limit: isNumber(limit) ? Number(limit) : undefined,
page: isNumber(page) ? Number(page) : undefined,
req,
sort: typeof sort === 'string' ? sort.split(',') : undefined,
where,
})
return Response.json(result, {
headers: headersWithCors({
headers: new Headers(),
req,
}),
status: httpStatus.OK,
})
}

View File

@@ -9,7 +9,6 @@ import { docAccessHandler } from './docAccess.js'
import { duplicateHandler } from './duplicate.js'
import { findHandler } from './find.js'
import { findByIDHandler } from './findByID.js'
import { findDistinctHandler } from './findDistinct.js'
import { findVersionByIDHandler } from './findVersionByID.js'
import { findVersionsHandler } from './findVersions.js'
import { previewHandler } from './preview.js'
@@ -49,12 +48,6 @@ export const defaultCollectionEndpoints: Endpoint[] = [
method: 'get',
path: '/versions',
},
// Might be uncommented in the future
// {
// handler: findDistinctHandler,
// method: 'get',
// path: '/distinct',
// },
{
handler: duplicateHandler,
method: 'post',

View File

@@ -1,189 +0,0 @@
import httpStatus from 'http-status'
import type { AccessResult } from '../../config/types.js'
import type { PaginatedDistinctDocs } from '../../database/types.js'
import type { PayloadRequest, PopulateType, Sort, Where } from '../../types/index.js'
import type { Collection } from '../config/types.js'
import { executeAccess } from '../../auth/executeAccess.js'
import { combineQueries } from '../../database/combineQueries.js'
import { validateQueryPaths } from '../../database/queryValidation/validateQueryPaths.js'
import { sanitizeWhereQuery } from '../../database/sanitizeWhereQuery.js'
import { APIError } from '../../errors/APIError.js'
import { Forbidden } from '../../errors/Forbidden.js'
import { relationshipPopulationPromise } from '../../fields/hooks/afterRead/relationshipPopulationPromise.js'
import { getFieldByPath } from '../../utilities/getFieldByPath.js'
import { killTransaction } from '../../utilities/killTransaction.js'
import { buildAfterOperation } from './utils.js'
export type Arguments = {
collection: Collection
depth?: number
disableErrors?: boolean
field: string
limit?: number
locale?: string
overrideAccess?: boolean
page?: number
populate?: PopulateType
req?: PayloadRequest
showHiddenFields?: boolean
sort?: Sort
where?: Where
}
export const findDistinctOperation = async (
incomingArgs: Arguments,
): Promise<PaginatedDistinctDocs<Record<string, unknown>>> => {
let args = incomingArgs
try {
// /////////////////////////////////////
// beforeOperation - Collection
// /////////////////////////////////////
if (args.collection.config.hooks?.beforeOperation?.length) {
for (const hook of args.collection.config.hooks.beforeOperation) {
args =
(await hook({
args,
collection: args.collection.config,
context: args.req!.context,
operation: 'readDistinct',
req: args.req!,
})) || args
}
}
const {
collection: { config: collectionConfig },
disableErrors,
overrideAccess,
populate,
showHiddenFields = false,
where,
} = args
const req = args.req!
const { locale, payload } = req
// /////////////////////////////////////
// Access
// /////////////////////////////////////
let accessResult: AccessResult
if (!overrideAccess) {
accessResult = await executeAccess({ disableErrors, req }, collectionConfig.access.read)
// If errors are disabled, and access returns false, return empty results
if (accessResult === false) {
return {
hasNextPage: false,
hasPrevPage: false,
limit: args.limit || 0,
nextPage: null,
page: 1,
pagingCounter: 1,
prevPage: null,
totalDocs: 0,
totalPages: 0,
values: [],
}
}
}
// /////////////////////////////////////
// Find Distinct
// /////////////////////////////////////
const fullWhere = combineQueries(where!, accessResult!)
sanitizeWhereQuery({ fields: collectionConfig.flattenedFields, payload, where: fullWhere })
await validateQueryPaths({
collectionConfig,
overrideAccess: overrideAccess!,
req,
where: where ?? {},
})
const fieldResult = getFieldByPath({
fields: collectionConfig.flattenedFields,
path: args.field,
})
if (!fieldResult) {
throw new APIError(
`Field ${args.field} was not found in the collection ${collectionConfig.slug}`,
httpStatus.BAD_REQUEST,
)
}
if (fieldResult.field.hidden && !showHiddenFields) {
throw new Forbidden(req.t)
}
if (fieldResult.field.access?.read) {
const hasAccess = await fieldResult.field.access.read({ req })
if (!hasAccess) {
throw new Forbidden(req.t)
}
}
let result = await payload.db.findDistinct({
collection: collectionConfig.slug,
field: args.field,
limit: args.limit,
locale: locale!,
page: args.page,
req,
sort: args.sort,
where: fullWhere,
})
if (
(fieldResult.field.type === 'relationship' || fieldResult.field.type === 'upload') &&
args.depth
) {
const populationPromises: Promise<void>[] = []
for (const doc of result.values) {
populationPromises.push(
relationshipPopulationPromise({
currentDepth: 0,
depth: args.depth,
draft: false,
fallbackLocale: req.fallbackLocale || null,
field: fieldResult.field,
locale: req.locale || null,
overrideAccess: args.overrideAccess ?? true,
parentIsLocalized: false,
populate,
req,
showHiddenFields: false,
siblingDoc: doc,
}),
)
}
await Promise.all(populationPromises)
}
// /////////////////////////////////////
// afterOperation - Collection
// /////////////////////////////////////
result = await buildAfterOperation({
args,
collection: collectionConfig,
operation: 'findDistinct',
result,
})
// /////////////////////////////////////
// Return results
// /////////////////////////////////////
return result
} catch (error: unknown) {
await killTransaction(args.req!)
throw error
}
}

View File

@@ -1,138 +0,0 @@
import type {
CollectionSlug,
DataFromCollectionSlug,
Document,
PaginatedDistinctDocs,
Payload,
PayloadRequest,
PopulateType,
RequestContext,
Sort,
TypedLocale,
Where,
} from '../../../index.js'
import type { CreateLocalReqOptions } from '../../../utilities/createLocalReq.js'
import { APIError, createLocalReq } from '../../../index.js'
import { findDistinctOperation } from '../findDistinct.js'
export type Options<
TSlug extends CollectionSlug,
TField extends keyof DataFromCollectionSlug<TSlug>,
> = {
/**
* the Collection slug to operate against.
*/
collection: TSlug
/**
* [Context](https://payloadcms.com/docs/hooks/context), which will then be passed to `context` and `req.context`,
* which can be read by hooks. Useful if you want to pass additional information to the hooks which
* shouldn't be necessarily part of the document, for example a `triggerBeforeChange` option which can be read by the BeforeChange hook
* to determine if it should run or not.
*/
context?: RequestContext
/**
* [Control auto-population](https://payloadcms.com/docs/queries/depth) of nested relationship and upload fields.
*/
depth?: number
/**
* When set to `true`, errors will not be thrown.
*/
disableErrors?: boolean
/**
* The field to get distinct values for
*/
field: TField
/**
* The maximum distinct field values to be returned.
* By default the operation returns all the values.
*/
limit?: number
/**
* Specify [locale](https://payloadcms.com/docs/configuration/localization) for any returned documents.
*/
locale?: 'all' | TypedLocale
/**
* Skip access control.
* Set to `false` if you want to respect Access Control for the operation, for example when fetching data for the fron-end.
* @default true
*/
overrideAccess?: boolean
/**
* Get a specific page number (if limit is specified)
* @default 1
*/
page?: number
/**
* Specify [populate](https://payloadcms.com/docs/queries/select#populate) to control which fields to include to the result from populated documents.
*/
populate?: PopulateType
/**
* The `PayloadRequest` object. You can pass it to thread the current [transaction](https://payloadcms.com/docs/database/transactions), user and locale to the operation.
* Recommended to pass when using the Local API from hooks, as usually you want to execute the operation within the current transaction.
*/
req?: Partial<PayloadRequest>
/**
* Opt-in to receiving hidden fields. By default, they are hidden from returned documents in accordance to your config.
* @default false
*/
showHiddenFields?: boolean
/**
* Sort the documents, can be a string or an array of strings
* @example '-createdAt' // Sort DESC by createdAt
* @example ['group', '-createdAt'] // sort by 2 fields, ASC group and DESC createdAt
*/
sort?: Sort
/**
* If you set `overrideAccess` to `false`, you can pass a user to use against the access control checks.
*/
user?: Document
/**
* A filter [query](https://payloadcms.com/docs/queries/overview)
*/
where?: Where
}
export async function findDistinct<
TSlug extends CollectionSlug,
TField extends keyof DataFromCollectionSlug<TSlug> & string,
>(
payload: Payload,
options: Options<TSlug, TField>,
): Promise<PaginatedDistinctDocs<Record<TField, DataFromCollectionSlug<TSlug>[TField]>>> {
const {
collection: collectionSlug,
depth = 0,
disableErrors,
field,
limit,
overrideAccess = true,
page,
populate,
showHiddenFields,
sort,
where,
} = options
const collection = payload.collections[collectionSlug]
if (!collection) {
throw new APIError(
`The collection with slug ${String(collectionSlug)} can't be found. Find Operation.`,
)
}
return findDistinctOperation({
collection,
depth,
disableErrors,
field,
limit,
overrideAccess,
page,
populate,
req: await createLocalReq(options as CreateLocalReqOptions, payload),
showHiddenFields,
sort,
where,
}) as Promise<PaginatedDistinctDocs<Record<TField, DataFromCollectionSlug<TSlug>[TField]>>>
}

View File

@@ -12,7 +12,6 @@ import type { deleteOperation } from './delete.js'
import type { deleteByIDOperation } from './deleteByID.js'
import type { findOperation } from './find.js'
import type { findByIDOperation } from './findByID.js'
import type { findDistinctOperation } from './findDistinct.js'
import type { updateOperation } from './update.js'
import type { updateByIDOperation } from './updateByID.js'
@@ -31,7 +30,6 @@ export type AfterOperationMap<TOperationGeneric extends CollectionSlug> = {
boolean,
SelectFromCollectionSlug<TOperationGeneric>
>
findDistinct: typeof findDistinctOperation
forgotPassword: typeof forgotPasswordOperation
login: typeof loginOperation<TOperationGeneric>
refresh: typeof refreshOperation
@@ -83,11 +81,6 @@ export type AfterOperationArg<TOperationGeneric extends CollectionSlug> = {
operation: 'findByID'
result: Awaited<ReturnType<AfterOperationMap<TOperationGeneric>['findByID']>>
}
| {
args: Parameters<AfterOperationMap<TOperationGeneric>['findDistinct']>[0]
operation: 'findDistinct'
result: Awaited<ReturnType<AfterOperationMap<TOperationGeneric>['findDistinct']>>
}
| {
args: Parameters<AfterOperationMap<TOperationGeneric>['forgotPassword']>[0]
operation: 'forgotPassword'

View File

@@ -163,17 +163,14 @@ export const addDefaultsToConfig = (config: Config): Config => {
...(config.auth || {}),
}
if (
config.folders !== false &&
config.collections.some((collection) => Boolean(collection.folders))
) {
const hasFolderCollections = config.collections.some((collection) => Boolean(collection.folders))
if (hasFolderCollections) {
config.folders = {
slug: config.folders?.slug ?? foldersSlug,
browseByFolder: config.folders?.browseByFolder ?? true,
collectionOverrides: config.folders?.collectionOverrides || undefined,
collectionSpecific: config.folders?.collectionSpecific ?? true,
debug: config.folders?.debug ?? false,
fieldName: config.folders?.fieldName ?? parentFolderFieldName,
slug: foldersSlug,
browseByFolder: true,
debug: false,
fieldName: parentFolderFieldName,
...(config.folders || {}),
}
} else {
config.folders = false

View File

@@ -3,7 +3,6 @@ import type { AcceptedLanguages } from '@payloadcms/translations'
import { en } from '@payloadcms/translations/languages/en'
import { deepMergeSimple } from '@payloadcms/translations/utilities'
import type { CollectionSlug, GlobalSlug, SanitizedCollectionConfig } from '../index.js'
import type { SanitizedJobsConfig } from '../queues/config/types/index.js'
import type {
Config,
@@ -19,10 +18,15 @@ import { sanitizeCollection } from '../collections/config/sanitize.js'
import { migrationsCollection } from '../database/migrations/migrationsCollection.js'
import { DuplicateCollection, InvalidConfiguration } from '../errors/index.js'
import { defaultTimezones } from '../fields/baseFields/timezone/defaultTimezones.js'
import { addFolderCollection } from '../folders/addFolderCollection.js'
import { addFolderFieldToCollection } from '../folders/addFolderFieldToCollection.js'
import { addFolderCollections } from '../folders/addFolderCollections.js'
import { sanitizeGlobal } from '../globals/config/sanitize.js'
import { baseBlockFields, formatLabels, sanitizeFields } from '../index.js'
import {
baseBlockFields,
type CollectionSlug,
formatLabels,
type GlobalSlug,
sanitizeFields,
} from '../index.js'
import {
getLockedDocumentsCollection,
lockedDocumentsCollectionSlug,
@@ -187,6 +191,8 @@ export const sanitizeConfig = async (incomingConfig: Config): Promise<SanitizedC
const collectionSlugs = new Set<CollectionSlug>()
await addFolderCollections(config as unknown as Config)
const validRelationships = [
...(config.collections?.map((c) => c.slug) ?? []),
jobsCollectionSlug,
@@ -194,10 +200,6 @@ export const sanitizeConfig = async (incomingConfig: Config): Promise<SanitizedC
preferencesCollectionSlug,
]
if (config.folders !== false) {
validRelationships.push(config.folders!.slug)
}
/**
* Blocks sanitization needs to happen before collections, as collection/global join field sanitization needs config.blocks
* to be populated with the sanitized blocks
@@ -234,8 +236,6 @@ export const sanitizeConfig = async (incomingConfig: Config): Promise<SanitizedC
}
}
const folderEnabledCollections: SanitizedCollectionConfig[] = []
for (let i = 0; i < config.collections!.length; i++) {
if (collectionSlugs.has(config.collections![i]!.slug)) {
throw new DuplicateCollection('slug', config.collections![i]!.slug)
@@ -257,25 +257,12 @@ export const sanitizeConfig = async (incomingConfig: Config): Promise<SanitizedC
}
}
if (config.folders !== false && config.collections![i]!.folders) {
addFolderFieldToCollection({
collection: config.collections![i]!,
collectionSpecific: config.folders!.collectionSpecific,
folderFieldName: config.folders!.fieldName,
folderSlug: config.folders!.slug,
})
}
config.collections![i] = await sanitizeCollection(
config as unknown as Config,
config.collections![i]!,
richTextSanitizationPromises,
validRelationships,
)
if (config.folders !== false && config.collections![i]!.folders) {
folderEnabledCollections.push(config.collections![i]!)
}
}
if (config.globals!.length > 0) {
@@ -345,16 +332,6 @@ export const sanitizeConfig = async (incomingConfig: Config): Promise<SanitizedC
configWithDefaults.collections!.push(sanitizedJobsCollection)
}
if (config.folders !== false && folderEnabledCollections.length) {
await addFolderCollection({
collectionSpecific: config.folders!.collectionSpecific,
config: config as unknown as Config,
folderEnabledCollections,
richTextSanitizationPromises,
validRelationships,
})
}
configWithDefaults.collections!.push(
await sanitizeCollection(
config as unknown as Config,

View File

@@ -257,13 +257,6 @@ export type InitOptions = {
* and the backend functionality
*/
config: Promise<SanitizedConfig> | SanitizedConfig
/**
* If set to `true`, payload will initialize crons for things like autorunning jobs on initialization.
*
* @default false
*/
cron?: boolean
/**
* Disable connect to the database on init
*/
@@ -275,6 +268,7 @@ export type InitOptions = {
disableOnInit?: boolean
importMap?: ImportMap
/**
* A function that is called immediately following startup that receives the Payload instance as it's only argument.
*/

View File

@@ -63,8 +63,6 @@ export interface BaseDatabaseAdapter {
find: Find
findDistinct: FindDistinct
findGlobal: FindGlobal
findGlobalVersions: FindGlobalVersions
@@ -84,15 +82,16 @@ export interface BaseDatabaseAdapter {
* Run any migration up functions that have not yet been performed and update the status
*/
migrate: (args?: { migrations?: Migration[] }) => Promise<void>
/**
* Run any migration down functions that have been performed
*/
migrateDown: () => Promise<void>
/**
* Drop the current database and run all migrate up functions
*/
migrateFresh: (args: { forceAcceptWarning?: boolean }) => Promise<void>
/**
* Run all migration down functions before running up
*/
@@ -105,7 +104,6 @@ export interface BaseDatabaseAdapter {
* Read the current state of migrations and output the result to show which have been run
*/
migrateStatus: () => Promise<void>
/**
* Path to read and write migration files from
*/
@@ -115,6 +113,7 @@ export interface BaseDatabaseAdapter {
* The name of the database adapter
*/
name: string
/**
* Full package name of the database adapter
*
@@ -125,7 +124,6 @@ export interface BaseDatabaseAdapter {
* reference to the instance of payload
*/
payload: Payload
queryDrafts: QueryDrafts
/**
@@ -153,6 +151,7 @@ export interface BaseDatabaseAdapter {
updateMany: UpdateMany
updateOne: UpdateOne
updateVersion: UpdateVersion
upsert: Upsert
}
@@ -482,34 +481,6 @@ export type CreateArgs = {
select?: SelectType
}
export type FindDistinctArgs = {
collection: CollectionSlug
field: string
limit?: number
locale?: string
page?: number
req?: Partial<PayloadRequest>
sort?: Sort
where?: Where
}
export type PaginatedDistinctDocs<T extends Record<string, unknown>> = {
hasNextPage: boolean
hasPrevPage: boolean
limit: number
nextPage?: null | number | undefined
page: number
pagingCounter: number
prevPage?: null | number | undefined
totalDocs: number
totalPages: number
values: T[]
}
export type FindDistinct = (
args: FindDistinctArgs,
) => Promise<PaginatedDistinctDocs<Record<string, any>>>
export type Create = (args: CreateArgs) => Promise<Document>
export type UpdateOneArgs = {

View File

@@ -1,51 +0,0 @@
import type { Config, SanitizedConfig } from '../config/types.js'
import type { CollectionConfig } from '../index.js'
import { sanitizeCollection } from '../collections/config/sanitize.js'
import { createFolderCollection } from './createFolderCollection.js'
export async function addFolderCollection({
collectionSpecific,
config,
folderEnabledCollections,
richTextSanitizationPromises = [],
validRelationships = [],
}: {
collectionSpecific: boolean
config: NonNullable<Config>
folderEnabledCollections: CollectionConfig[]
richTextSanitizationPromises?: Array<(config: SanitizedConfig) => Promise<void>>
validRelationships?: string[]
}): Promise<void> {
if (config.folders === false) {
return
}
let folderCollectionConfig = createFolderCollection({
slug: config.folders!.slug as string,
collectionSpecific,
debug: config.folders!.debug,
folderEnabledCollections,
folderFieldName: config.folders!.fieldName as string,
})
const collectionIndex = config.collections!.push(folderCollectionConfig)
if (
Array.isArray(config.folders?.collectionOverrides) &&
config?.folders.collectionOverrides.length
) {
for (const override of config.folders.collectionOverrides) {
folderCollectionConfig = await override({ collection: folderCollectionConfig })
}
}
const sanitizedCollectionWithOverrides = await sanitizeCollection(
config as unknown as Config,
folderCollectionConfig,
richTextSanitizationPromises,
validRelationships,
)
config.collections![collectionIndex - 1] = sanitizedCollectionWithOverrides
}

View File

@@ -0,0 +1,56 @@
import type { Config } from '../config/types.js'
import type { CollectionSlug } from '../index.js'
import { createFolderCollection } from './createFolderCollection.js'
export async function addFolderCollections(config: NonNullable<Config>): Promise<void> {
if (!config.collections || !config.folders) {
return
}
const enabledCollectionSlugs: CollectionSlug[] = []
const debug = Boolean(config?.folders?.debug)
const folderFieldName = config?.folders?.fieldName as unknown as string
const folderSlug = config?.folders?.slug as unknown as CollectionSlug
for (let i = 0; i < config.collections.length; i++) {
const collection = config.collections[i]
if (collection && collection?.folders) {
collection.fields.push({
name: folderFieldName,
type: 'relationship',
admin: {
allowCreate: false,
allowEdit: false,
components: {
Cell: '@payloadcms/ui/rsc#FolderTableCell',
Field: '@payloadcms/ui/rsc#FolderEditField',
},
},
index: true,
label: 'Folder',
relationTo: folderSlug,
})
enabledCollectionSlugs.push(collection.slug)
}
}
if (enabledCollectionSlugs.length) {
let folderCollection = createFolderCollection({
slug: folderSlug,
collectionSlugs: enabledCollectionSlugs,
debug,
folderFieldName,
})
if (
Array.isArray(config?.folders?.collectionOverrides) &&
config?.folders.collectionOverrides.length
) {
for (const override of config.folders.collectionOverrides) {
folderCollection = await override({ collection: folderCollection })
}
}
config.collections.push(folderCollection)
}
}

View File

@@ -1,33 +0,0 @@
import type { SanitizedCollectionConfig } from '../index.js'
import { buildFolderField } from './buildFolderField.js'
export const addFolderFieldToCollection = ({
collection,
collectionSpecific,
folderFieldName,
folderSlug,
}: {
collection: SanitizedCollectionConfig
collectionSpecific: boolean
folderFieldName: string
folderSlug: string
}): void => {
collection.fields.push(
buildFolderField({
collectionSpecific,
folderFieldName,
folderSlug,
overrides: {
admin: {
allowCreate: false,
allowEdit: false,
components: {
Cell: '@payloadcms/ui/rsc#FolderTableCell',
Field: '@payloadcms/ui/rsc#FolderField',
},
},
},
}),
)
}

View File

@@ -1,108 +0,0 @@
import type { SingleRelationshipField } from '../fields/config/types.js'
import type { Document } from '../types/index.js'
import { extractID } from '../utilities/extractID.js'
export const buildFolderField = ({
collectionSpecific,
folderFieldName,
folderSlug,
overrides = {},
}: {
collectionSpecific: boolean
folderFieldName: string
folderSlug: string
overrides?: Partial<SingleRelationshipField>
}): SingleRelationshipField => {
const field: SingleRelationshipField = {
name: folderFieldName,
type: 'relationship',
admin: {},
hasMany: false,
index: true,
label: 'Folder',
relationTo: folderSlug,
validate: async (value, { collectionSlug, data, overrideAccess, previousValue, req }) => {
if (!collectionSpecific) {
// if collection scoping is not enabled, no validation required since folders can contain any type of document
return true
}
if (!value) {
// no folder, no validation required
return true
}
const newID = extractID<Document>(value)
if (previousValue && extractID<Document>(previousValue) === newID) {
// value did not change, no validation required
return true
} else {
// need to validat the folder value allows this collection type
let parentFolder: Document = null
if (typeof value === 'string' || typeof value === 'number') {
// need to populate the value with the document
parentFolder = await req.payload.findByID({
id: newID,
collection: folderSlug,
depth: 0, // no need to populate nested folders
overrideAccess,
req,
select: {
folderType: true, // only need to check folderType
},
user: req.user,
})
}
if (parentFolder && collectionSlug) {
const parentFolderTypes: string[] = (parentFolder.folderType as string[]) || []
// if the parent folder has no folder types, it accepts all collections
if (parentFolderTypes.length === 0) {
return true
}
// validation for a folder document
if (collectionSlug === folderSlug) {
// ensure the parent accepts ALL folder types
const folderTypes: string[] = 'folderType' in data ? (data.folderType as string[]) : []
const invalidSlugs = folderTypes.filter((validCollectionSlug: string) => {
return !parentFolderTypes.includes(validCollectionSlug)
})
if (invalidSlugs.length === 0) {
return true
} else {
return `Folder with ID ${newID} does not allow documents of type ${invalidSlugs.join(', ')}`
}
}
// validation for a non-folder document
if (parentFolderTypes.includes(collectionSlug)) {
return true
} else {
return `Folder with ID ${newID} does not allow documents of type ${collectionSlug}`
}
} else {
return `Folder with ID ${newID} not found in collection ${folderSlug}`
}
}
},
}
if (overrides?.admin) {
field.admin = {
...field.admin,
...(overrides.admin || {}),
}
if (overrides.admin.components) {
field.admin.components = {
...field.admin.components,
...(overrides.admin.components || {}),
}
}
}
return field
}

View File

@@ -1,129 +1,74 @@
import type { CollectionConfig } from '../collections/config/types.js'
import type { Field, Option, SelectField } from '../fields/config/types.js'
import { defaultAccess } from '../auth/defaultAccess.js'
import { buildFolderField } from './buildFolderField.js'
import { foldersSlug } from './constants.js'
import { populateFolderDataEndpoint } from './endpoints/populateFolderData.js'
import { deleteSubfoldersBeforeDelete } from './hooks/deleteSubfoldersAfterDelete.js'
import { dissasociateAfterDelete } from './hooks/dissasociateAfterDelete.js'
import { ensureSafeCollectionsChange } from './hooks/ensureSafeCollectionsChange.js'
import { reparentChildFolder } from './hooks/reparentChildFolder.js'
type CreateFolderCollectionArgs = {
collectionSpecific: boolean
collectionSlugs: string[]
debug?: boolean
folderEnabledCollections: CollectionConfig[]
folderFieldName: string
slug: string
}
export const createFolderCollection = ({
slug,
collectionSpecific,
collectionSlugs,
debug,
folderEnabledCollections,
folderFieldName,
}: CreateFolderCollectionArgs): CollectionConfig => {
const { collectionOptions, collectionSlugs } = folderEnabledCollections.reduce(
(acc, collection: CollectionConfig) => {
acc.collectionSlugs.push(collection.slug)
acc.collectionOptions.push({
label: collection.labels?.plural || collection.slug,
value: collection.slug,
})
return acc
}: CreateFolderCollectionArgs): CollectionConfig => ({
slug,
admin: {
hidden: !debug,
useAsTitle: 'name',
},
endpoints: [populateFolderDataEndpoint],
fields: [
{
name: 'name',
type: 'text',
index: true,
required: true,
},
{
collectionOptions: [] as Option[],
collectionSlugs: [] as string[],
},
)
return {
slug,
access: {
create: defaultAccess,
delete: defaultAccess,
read: defaultAccess,
readVersions: defaultAccess,
update: defaultAccess,
},
admin: {
hidden: !debug,
useAsTitle: 'name',
},
fields: [
{
name: 'name',
type: 'text',
index: true,
required: true,
name: folderFieldName,
type: 'relationship',
admin: {
hidden: !debug,
},
buildFolderField({
collectionSpecific,
index: true,
relationTo: slug,
},
{
name: 'documentsAndFolders',
type: 'join',
admin: {
hidden: !debug,
},
collection: [slug, ...collectionSlugs],
hasMany: true,
on: folderFieldName,
},
],
hooks: {
afterChange: [
reparentChildFolder({
folderFieldName,
folderSlug: slug,
overrides: {
admin: {
hidden: !debug,
},
},
}),
{
name: 'documentsAndFolders',
type: 'join',
admin: {
hidden: !debug,
},
collection: [slug, ...collectionSlugs],
hasMany: true,
on: folderFieldName,
},
...(collectionSpecific
? [
{
name: 'folderType',
type: 'select',
admin: {
components: {
Field: {
clientProps: {
options: collectionOptions,
},
path: '@payloadcms/ui#FolderTypeField',
},
},
position: 'sidebar',
},
hasMany: true,
options: collectionOptions,
} satisfies SelectField,
]
: ([] as Field[])),
],
hooks: {
afterChange: [
reparentChildFolder({
folderFieldName,
}),
],
afterDelete: [
dissasociateAfterDelete({
collectionSlugs,
folderFieldName,
}),
],
beforeDelete: [deleteSubfoldersBeforeDelete({ folderFieldName, folderSlug: slug })],
beforeValidate: [
...(collectionSpecific ? [ensureSafeCollectionsChange({ foldersSlug })] : []),
],
},
labels: {
plural: 'Folders',
singular: 'Folder',
},
typescript: {
interface: 'FolderInterface',
},
}
}
afterDelete: [
dissasociateAfterDelete({
collectionSlugs,
folderFieldName,
}),
],
beforeDelete: [deleteSubfoldersBeforeDelete({ folderFieldName, folderSlug: slug })],
},
labels: {
plural: 'Folders',
singular: 'Folder',
},
typescript: {
interface: 'FolderInterface',
},
})

View File

@@ -0,0 +1,135 @@
import httpStatus from 'http-status'
import type { Endpoint, Where } from '../../index.js'
import { buildFolderWhereConstraints } from '../utils/buildFolderWhereConstraints.js'
import { getFolderData } from '../utils/getFolderData.js'
export const populateFolderDataEndpoint: Endpoint = {
handler: async (req) => {
if (!req?.user) {
return Response.json(
{
message: 'Unauthorized request.',
},
{
status: httpStatus.UNAUTHORIZED,
},
)
}
if (
!(
req.payload.config.folders &&
Boolean(req.payload.collections?.[req.payload.config.folders.slug])
)
) {
return Response.json(
{
message: 'Folders are not configured',
},
{
status: httpStatus.NOT_FOUND,
},
)
}
// if collectionSlug exists, we need to create constraints for that _specific collection_ and the folder collection
// if collectionSlug does not exist, we need to create constraints for _all folder enabled collections_ and the folder collection
let documentWhere: undefined | Where
let folderWhere: undefined | Where
const collectionSlug = req.searchParams?.get('collectionSlug')
if (collectionSlug) {
const collectionConfig = req.payload.collections?.[collectionSlug]?.config
if (!collectionConfig) {
return Response.json(
{
message: `Collection with slug "${collectionSlug}" not found`,
},
{
status: httpStatus.NOT_FOUND,
},
)
}
const collectionConstraints = await buildFolderWhereConstraints({
collectionConfig,
folderID: req.searchParams?.get('folderID') || undefined,
localeCode: typeof req?.locale === 'string' ? req.locale : undefined,
req,
search: req.searchParams?.get('search') || undefined,
sort: req.searchParams?.get('sort') || undefined,
})
if (collectionConstraints) {
documentWhere = collectionConstraints
}
} else {
// loop over all folder enabled collections and build constraints for each
for (const collectionSlug of Object.keys(req.payload.collections)) {
const collectionConfig = req.payload.collections[collectionSlug]?.config
if (collectionConfig?.folders) {
const collectionConstraints = await buildFolderWhereConstraints({
collectionConfig,
folderID: req.searchParams?.get('folderID') || undefined,
localeCode: typeof req?.locale === 'string' ? req.locale : undefined,
req,
search: req.searchParams?.get('search') || undefined,
})
if (collectionConstraints) {
if (!documentWhere) {
documentWhere = { or: [] }
}
if (!Array.isArray(documentWhere.or)) {
documentWhere.or = [documentWhere]
} else if (Array.isArray(documentWhere.or)) {
documentWhere.or.push(collectionConstraints)
}
}
}
}
}
const folderCollectionConfig =
req.payload.collections?.[req.payload.config.folders.slug]?.config
if (!folderCollectionConfig) {
return Response.json(
{
message: 'Folder collection not found',
},
{
status: httpStatus.NOT_FOUND,
},
)
}
const folderConstraints = await buildFolderWhereConstraints({
collectionConfig: folderCollectionConfig,
folderID: req.searchParams?.get('folderID') || undefined,
localeCode: typeof req?.locale === 'string' ? req.locale : undefined,
req,
search: req.searchParams?.get('search') || undefined,
})
if (folderConstraints) {
folderWhere = folderConstraints
}
const data = await getFolderData({
collectionSlug: req.searchParams?.get('collectionSlug') || undefined,
documentWhere: documentWhere ? documentWhere : undefined,
folderID: req.searchParams?.get('folderID') || undefined,
folderWhere,
req,
})
return Response.json(data)
},
method: 'get',
path: '/populate-folder-data',
}

View File

@@ -1,144 +0,0 @@
import { APIError, type CollectionBeforeValidateHook, type CollectionSlug } from '../../index.js'
import { extractID } from '../../utilities/extractID.js'
import { getTranslatedLabel } from '../../utilities/getTranslatedLabel.js'
export const ensureSafeCollectionsChange =
({ foldersSlug }: { foldersSlug: CollectionSlug }): CollectionBeforeValidateHook =>
async ({ data, originalDoc, req }) => {
const currentFolderID = extractID(originalDoc || {})
const parentFolderID = extractID(data?.folder || originalDoc?.folder || {})
if (Array.isArray(data?.folderType) && data.folderType.length > 0) {
const folderType = data.folderType as string[]
const currentlyAssignedCollections: string[] | undefined =
Array.isArray(originalDoc?.folderType) && originalDoc.folderType.length > 0
? originalDoc.folderType
: undefined
/**
* Check if the assigned collections have changed.
* example:
* - originalAssignedCollections: ['posts', 'pages']
* - folderType: ['posts']
*
* The user is narrowing the types of documents that can be associated with this folder.
* If the user is only expanding the types of documents that can be associated with this folder,
* we do not need to do anything.
*/
const newCollections = currentlyAssignedCollections
? // user is narrowing the current scope of the folder
currentlyAssignedCollections.filter((c) => !folderType.includes(c))
: // user is adding a scope to the folder
folderType
if (newCollections && newCollections.length > 0) {
let hasDependentDocuments = false
if (typeof currentFolderID === 'string' || typeof currentFolderID === 'number') {
const childDocumentsResult = await req.payload.findByID({
id: currentFolderID,
collection: foldersSlug,
joins: {
documentsAndFolders: {
limit: 100_000_000,
where: {
or: [
{
relationTo: {
in: newCollections,
},
},
],
},
},
},
overrideAccess: true,
req,
})
hasDependentDocuments = childDocumentsResult.documentsAndFolders.docs.length > 0
}
// matches folders that are directly related to the removed collections
let hasDependentFolders = false
if (
!hasDependentDocuments &&
(typeof currentFolderID === 'string' || typeof currentFolderID === 'number')
) {
const childFoldersResult = await req.payload.find({
collection: foldersSlug,
limit: 1,
req,
where: {
and: [
{
folderType: {
in: newCollections,
},
},
{
folder: {
equals: currentFolderID,
},
},
],
},
})
hasDependentFolders = childFoldersResult.totalDocs > 0
}
if (hasDependentDocuments || hasDependentFolders) {
const translatedLabels = newCollections.map((collectionSlug) => {
if (req.payload.collections[collectionSlug]?.config.labels.singular) {
return getTranslatedLabel(
req.payload.collections[collectionSlug]?.config.labels.plural,
req.i18n,
)
}
return collectionSlug
})
throw new APIError(
`The folder "${data.name || originalDoc.name}" contains ${hasDependentDocuments ? 'documents' : 'folders'} that still belong to the following collections: ${translatedLabels.join(', ')}`,
400,
)
}
return data
}
} else if (
(data?.folderType === null ||
(Array.isArray(data?.folderType) && data?.folderType.length === 0)) &&
parentFolderID
) {
// attempting to set the folderType to catch-all, so we need to ensure that the parent allows this
let parentFolder
if (typeof parentFolderID === 'string' || typeof parentFolderID === 'number') {
try {
parentFolder = await req.payload.findByID({
id: parentFolderID,
collection: foldersSlug,
overrideAccess: true,
req,
select: {
name: true,
folderType: true,
},
user: req.user,
})
} catch (_) {
// parent folder does not exist
}
}
if (
parentFolder &&
parentFolder?.folderType &&
Array.isArray(parentFolder.folderType) &&
parentFolder.folderType.length > 0
) {
throw new APIError(
`The folder "${data?.name || originalDoc.name}" must have folder-type set since its parent folder ${parentFolder?.name ? `"${parentFolder?.name}" ` : ''}has a folder-type set.`,
400,
)
}
}
return data
}

View File

@@ -10,12 +10,10 @@ export type FolderInterface = {
}[]
}
folder?: FolderInterface | (number | string | undefined)
folderType: CollectionSlug[]
name: string
} & TypeWithID
export type FolderBreadcrumb = {
folderType?: CollectionSlug[]
id: null | number | string
name: string
}
@@ -60,7 +58,6 @@ export type FolderOrDocument = {
_folderOrDocumentTitle: string
createdAt?: string
folderID?: number | string
folderType: CollectionSlug[]
id: number | string
updatedAt?: string
} & DocumentMediaData
@@ -69,7 +66,6 @@ export type FolderOrDocument = {
export type GetFolderDataResult = {
breadcrumbs: FolderBreadcrumb[] | null
documents: FolderOrDocument[]
folderAssignedCollections: CollectionSlug[] | undefined
subfolders: FolderOrDocument[]
}
@@ -89,12 +85,6 @@ export type RootFoldersConfiguration = {
}: {
collection: CollectionConfig
}) => CollectionConfig | Promise<CollectionConfig>)[]
/**
* If true, you can scope folders to specific collections.
*
* @default true
*/
collectionSpecific?: boolean
/**
* Ability to view hidden fields and collections related to folders
*
@@ -124,6 +114,9 @@ export type CollectionFoldersConfiguration = {
browseByFolder?: boolean
}
type BaseFolderSortKeys = 'createdAt' | 'name' | 'updatedAt'
type BaseFolderSortKeys = keyof Pick<
FolderOrDocument['value'],
'_folderOrDocumentTitle' | 'createdAt' | 'updatedAt'
>
export type FolderSortKeys = `-${BaseFolderSortKeys}` | BaseFolderSortKeys

View File

@@ -23,7 +23,6 @@ export function formatFolderOrDocumentItem({
_folderOrDocumentTitle: String((useAsTitle && value?.[useAsTitle]) || value['id']),
createdAt: value?.createdAt,
folderID: value?.[folderFieldName],
folderType: value?.folderType || [],
updatedAt: value?.updatedAt,
}

View File

@@ -27,7 +27,6 @@ export const getFolderBreadcrumbs = async ({
select: {
name: true,
[folderFieldName]: true,
folderType: true,
},
user,
where: {
@@ -43,7 +42,6 @@ export const getFolderBreadcrumbs = async ({
breadcrumbs.push({
id: folder.id,
name: folder.name,
folderType: folder.folderType,
})
if (folder[folderFieldName]) {
return getFolderBreadcrumbs({

View File

@@ -1,6 +1,6 @@
import type { CollectionSlug } from '../../index.js'
import type { PayloadRequest, Where } from '../../types/index.js'
import type { FolderOrDocument, FolderSortKeys, GetFolderDataResult } from '../types.js'
import type { GetFolderDataResult } from '../types.js'
import { parseDocumentID } from '../../index.js'
import { getFolderBreadcrumbs } from './getFolderBreadcrumbs.js'
@@ -29,7 +29,6 @@ type Args = {
*/
folderWhere?: Where
req: PayloadRequest
sort: FolderSortKeys
}
/**
* Query for documents, subfolders and breadcrumbs for a given folder
@@ -40,7 +39,6 @@ export const getFolderData = async ({
folderID: _folderID,
folderWhere,
req,
sort = 'name',
}: Args): Promise<GetFolderDataResult> => {
const { payload } = req
@@ -67,16 +65,15 @@ export const getFolderData = async ({
parentFolderID,
req,
})
const [breadcrumbs, result] = await Promise.all([
const [breadcrumbs, documentsAndSubfolders] = await Promise.all([
breadcrumbsPromise,
documentAndSubfolderPromise,
])
return {
breadcrumbs,
documents: sortDocs({ docs: result.documents, sort }),
folderAssignedCollections: result.folderAssignedCollections,
subfolders: sortDocs({ docs: result.subfolders, sort }),
documents: documentsAndSubfolders.documents,
subfolders: documentsAndSubfolders.subfolders,
}
} else {
// subfolders and documents are queried separately
@@ -99,40 +96,10 @@ export const getFolderData = async ({
subfoldersPromise,
documentsPromise,
])
return {
breadcrumbs,
documents: sortDocs({ docs: documents, sort }),
folderAssignedCollections: collectionSlug ? [collectionSlug] : undefined,
subfolders: sortDocs({ docs: subfolders, sort }),
documents,
subfolders,
}
}
}
function sortDocs({
docs,
sort,
}: {
docs: FolderOrDocument[]
sort?: FolderSortKeys
}): FolderOrDocument[] {
if (!sort) {
return docs
}
const isDesc = typeof sort === 'string' && sort.startsWith('-')
const sortKey = (isDesc ? sort.slice(1) : sort) as FolderSortKeys
return docs.sort((a, b) => {
let result = 0
if (sortKey === 'name') {
result = a.value._folderOrDocumentTitle.localeCompare(b.value._folderOrDocumentTitle)
} else if (sortKey === 'createdAt') {
result =
new Date(a.value.createdAt || '').getTime() - new Date(b.value.createdAt || '').getTime()
} else if (sortKey === 'updatedAt') {
result =
new Date(a.value.updatedAt || '').getTime() - new Date(b.value.updatedAt || '').getTime()
}
return isDesc ? -result : result
})
}

View File

@@ -1,5 +1,4 @@
import type { PaginatedDocs } from '../../database/types.js'
import type { CollectionSlug } from '../../index.js'
import type { Document, PayloadRequest, Where } from '../../types/index.js'
import type { FolderOrDocument } from '../types.js'
@@ -9,7 +8,6 @@ import { formatFolderOrDocumentItem } from './formatFolderOrDocumentItem.js'
type QueryDocumentsAndFoldersResults = {
documents: FolderOrDocument[]
folderAssignedCollections: CollectionSlug[]
subfolders: FolderOrDocument[]
}
type QueryDocumentsAndFoldersArgs = {
@@ -87,9 +85,5 @@ export async function queryDocumentsAndFoldersFromJoin({
},
)
return {
documents: results.documents,
folderAssignedCollections: subfolderDoc?.docs[0]?.folderType || [],
subfolders: results.subfolders,
}
return results
}

View File

@@ -40,7 +40,7 @@ import {
} from './auth/operations/local/verifyEmail.js'
export type { FieldState } from './admin/forms/Form.js'
import type { InitOptions, SanitizedConfig } from './config/types.js'
import type { BaseDatabaseAdapter, PaginatedDistinctDocs, PaginatedDocs } from './database/types.js'
import type { BaseDatabaseAdapter, PaginatedDocs } from './database/types.js'
import type { InitializedEmailAdapter } from './email/types.js'
import type { DataFromGlobalSlug, Globals, SelectFromGlobalSlug } from './globals/config/types.js'
import type {
@@ -72,10 +72,6 @@ import {
findByIDLocal,
type Options as FindByIDOptions,
} from './collections/operations/local/findByID.js'
import {
findDistinct as findDistinctLocal,
type Options as FindDistinctOptions,
} from './collections/operations/local/findDistinct.js'
import {
findVersionByIDLocal,
type Options as FindVersionByIDOptions,
@@ -468,20 +464,6 @@ export class BasePayload {
return findByIDLocal<TSlug, TDisableErrors, TSelect>(this, options)
}
/**
* @description Find distinct field values
* @param options
* @returns result with distinct field values
*/
findDistinct = async <
TSlug extends CollectionSlug,
TField extends keyof DataFromCollectionSlug<TSlug> & string,
>(
options: FindDistinctOptions<TSlug, TField>,
): Promise<PaginatedDistinctDocs<Record<TField, DataFromCollectionSlug<TSlug>[TField]>>> => {
return findDistinctLocal(this, options)
}
findGlobal = async <TSlug extends GlobalSlug, TSelect extends SelectFromGlobalSlug<TSlug>>(
options: FindGlobalOptions<TSlug, TSelect>,
): Promise<TransformGlobalWithSelect<TSlug, TSelect>> => {
@@ -854,7 +836,7 @@ export class BasePayload {
throw error
}
if (this.config.jobs.enabled && this.config.jobs.autoRun && !isNextBuild() && options.cron) {
if (this.config.jobs.enabled && this.config.jobs.autoRun && !isNextBuild()) {
const DEFAULT_CRON = '* * * * *'
const DEFAULT_LIMIT = 10
@@ -992,7 +974,7 @@ export const reload = async (
}
export const getPayload = async (
options: Pick<InitOptions, 'config' | 'cron' | 'importMap'>,
options: Pick<InitOptions, 'config' | 'importMap'>,
): Promise<Payload> => {
if (!options?.config) {
throw new Error('Error: the payload config is required for getPayload to work.')
@@ -1127,8 +1109,6 @@ export { generateImportMap } from './bin/generateImportMap/index.js'
export type { ImportMap } from './bin/generateImportMap/index.js'
export { genImportMapIterateFields } from './bin/generateImportMap/iterateFields.js'
export { migrate as migrateCLI } from './bin/migrate.js'
export {
type ClientCollectionConfig,
createClientCollectionConfig,
@@ -1175,6 +1155,7 @@ export type {
} from './collections/config/types.js'
export type { CompoundIndex } from './collections/config/types.js'
export type { SanitizedCompoundIndex } from './collections/config/types.js'
export { createDataloaderCacheKey, getDataLoader } from './collections/dataloader.js'
export { countOperation } from './collections/operations/count.js'
@@ -1190,8 +1171,8 @@ export { findVersionsOperation } from './collections/operations/findVersions.js'
export { restoreVersionOperation } from './collections/operations/restoreVersion.js'
export { updateOperation } from './collections/operations/update.js'
export { updateByIDOperation } from './collections/operations/updateByID.js'
export { buildConfig } from './config/build.js'
export {
type ClientConfig,
createClientConfig,
@@ -1199,8 +1180,8 @@ export {
serverOnlyConfigProperties,
type UnsanitizedClientConfig,
} from './config/client.js'
export { defaults } from './config/defaults.js'
export { defaults } from './config/defaults.js'
export { type OrderableEndpointBody } from './config/orderable/index.js'
export { sanitizeConfig } from './config/sanitize.js'
export type * from './config/types.js'
@@ -1255,7 +1236,6 @@ export type {
Destroy,
Find,
FindArgs,
FindDistinct,
FindGlobal,
FindGlobalArgs,
FindGlobalVersions,
@@ -1269,7 +1249,6 @@ export type {
Migration,
MigrationData,
MigrationTemplateArgs,
PaginatedDistinctDocs,
PaginatedDocs,
QueryDrafts,
QueryDraftsArgs,
@@ -1318,11 +1297,10 @@ export {
ValidationError,
ValidationErrorName,
} from './errors/index.js'
export type { ValidationFieldError } from './errors/index.js'
export { baseBlockFields } from './fields/baseFields/baseBlockFields.js'
export { baseIDField } from './fields/baseFields/baseIDField.js'
export {
createClientField,
createClientFields,
@@ -1330,10 +1308,10 @@ export {
type ServerOnlyFieldProperties,
} from './fields/config/client.js'
export interface FieldCustom extends Record<string, any> {}
export { sanitizeFields } from './fields/config/sanitize.js'
export interface FieldCustom extends Record<string, any> {}
export type {
AdminClient,
ArrayField,
@@ -1443,13 +1421,14 @@ export type {
} from './fields/config/types.js'
export { getDefaultValue } from './fields/getDefaultValue.js'
export { traverseFields as afterChangeTraverseFields } from './fields/hooks/afterChange/traverseFields.js'
export { promise as afterReadPromise } from './fields/hooks/afterRead/promise.js'
export { traverseFields as afterReadTraverseFields } from './fields/hooks/afterRead/traverseFields.js'
export { traverseFields as beforeChangeTraverseFields } from './fields/hooks/beforeChange/traverseFields.js'
export { traverseFields as beforeValidateTraverseFields } from './fields/hooks/beforeValidate/traverseFields.js'
export { sortableFieldTypes } from './fields/sortableFieldTypes.js'
export { validations } from './fields/validations.js'
export type {
ArrayFieldValidation,
@@ -1502,8 +1481,8 @@ export type {
GlobalConfig,
SanitizedGlobalConfig,
} from './globals/config/types.js'
export { docAccessOperation as docAccessOperationGlobal } from './globals/operations/docAccess.js'
export { findOneOperation } from './globals/operations/findOne.js'
export { findVersionByIDOperation as findVersionByIDOperationGlobal } from './globals/operations/findVersionByID.js'
export { findVersionsOperation as findVersionsOperationGlobal } from './globals/operations/findVersions.js'
@@ -1526,8 +1505,8 @@ export type {
} from './preferences/types.js'
export type { QueryPreset } from './query-presets/types.js'
export { jobAfterRead } from './queues/config/index.js'
export type { JobsConfig, RunJobAccess, RunJobAccessArgs } from './queues/config/types/index.js'
export type {
RunInlineTaskFunction,
RunTaskFunction,
@@ -1551,14 +1530,14 @@ export type {
WorkflowHandler,
WorkflowTypes,
} from './queues/config/types/workflowTypes.js'
export { importHandlerPath } from './queues/operations/runJobs/runJob/importHandlerPath.js'
export { getLocalI18n } from './translations/getLocalI18n.js'
export * from './types/index.js'
export { getFileByPath } from './uploads/getFileByPath.js'
export { _internal_safeFetchGlobal } from './uploads/safeFetch.js'
export type * from './uploads/types.js'
export { addDataAndFileToRequest } from './utilities/addDataAndFileToRequest.js'
export { addLocalesToRequestFromData, sanitizeLocales } from './utilities/addLocalesToRequest.js'
export { commitTransaction } from './utilities/commitTransaction.js'
@@ -1630,8 +1609,8 @@ export { versionDefaults } from './versions/defaults.js'
export { deleteCollectionVersions } from './versions/deleteCollectionVersions.js'
export { appendVersionToQueryKey } from './versions/drafts/appendVersionToQueryKey.js'
export { getQueryDraftsSort } from './versions/drafts/getQueryDraftsSort.js'
export { enforceMaxVersions } from './versions/enforceMaxVersions.js'
export { getLatestCollectionVersion } from './versions/getLatestCollectionVersion.js'
export { getLatestGlobalVersion } from './versions/getLatestGlobalVersion.js'
export { saveVersion } from './versions/saveVersion.js'

View File

@@ -121,7 +121,6 @@ export type JobsConfig = {
/**
* A function that will be executed before Payload picks up jobs which are configured by the `jobs.autorun` function.
* If this function returns true, jobs will be queried and picked up. If it returns false, jobs will not be run.
* @default undefined - if this function is not defined, jobs will be run - as if () => true was passed.
* @param payload
* @returns boolean
*/

View File

@@ -1,86 +0,0 @@
import { Where } from '../types/index.js'
import { combineWhereConstraints } from './combineWhereConstraints.js'
describe('combineWhereConstraints', () => {
it('should merge matching constraint keys', async () => {
const constraint: Where = {
test: {
equals: 'value',
},
}
// should merge and queries
const andConstraint: Where = {
and: [constraint],
}
expect(combineWhereConstraints([andConstraint], 'and')).toEqual(andConstraint)
// should merge multiple and queries
expect(combineWhereConstraints([andConstraint, andConstraint], 'and')).toEqual({
and: [constraint, constraint],
})
// should merge or queries
const orConstraint: Where = {
or: [constraint],
}
expect(combineWhereConstraints([orConstraint], 'or')).toEqual(orConstraint)
// should merge multiple or queries
expect(combineWhereConstraints([orConstraint, orConstraint], 'or')).toEqual({
or: [constraint, constraint],
})
})
it('should push mismatching constraints keys into `as` key', async () => {
const constraint: Where = {
test: {
equals: 'value',
},
}
// should push `and` into `or` key
const andConstraint: Where = {
and: [constraint],
}
expect(combineWhereConstraints([andConstraint], 'or')).toEqual({
or: [andConstraint],
})
// should push `or` into `and` key
const orConstraint: Where = {
or: [constraint],
}
expect(combineWhereConstraints([orConstraint], 'and')).toEqual({
and: [orConstraint],
})
// should merge `and` but push `or` into `and` key
expect(combineWhereConstraints([andConstraint, orConstraint], 'and')).toEqual({
and: [constraint, orConstraint],
})
})
it('should push non and/or constraint key into `as` key', async () => {
const basicConstraint: Where = {
test: {
equals: 'value',
},
}
expect(combineWhereConstraints([basicConstraint], 'and')).toEqual({
and: [basicConstraint],
})
expect(combineWhereConstraints([basicConstraint], 'or')).toEqual({
or: [basicConstraint],
})
})
it('should return an empty object when no constraints are provided', async () => {
expect(combineWhereConstraints([], 'and')).toEqual({})
expect(combineWhereConstraints([], 'or')).toEqual({})
})
it('should return an empty object when all constraints are empty', async () => {
expect(combineWhereConstraints([{}, {}, undefined], 'and')).toEqual({})
expect(combineWhereConstraints([{}, {}, undefined], 'or')).toEqual({})
})
})

View File

@@ -8,27 +8,12 @@ export function combineWhereConstraints(
return {}
}
const reducedConstraints = constraints.reduce<Partial<Where>>(
(acc: Partial<Where>, constraint) => {
return {
[as]: constraints.filter((constraint): constraint is Where => {
if (constraint && typeof constraint === 'object' && Object.keys(constraint).length > 0) {
if (as in constraint) {
// merge the objects under the shared key
acc[as] = [...(acc[as] as Where[]), ...(constraint[as] as Where[])]
} else {
// the constraint does not share the key
acc[as]?.push(constraint)
}
return true
}
return acc
},
{ [as]: [] } satisfies Where,
)
if (reducedConstraints[as]?.length === 0) {
// If there are no constraints, return an empty object
return {}
return false
}),
}
return reducedConstraints as Where
}

View File

@@ -27,7 +27,7 @@ export const createPayloadRequest = async ({
request,
}: Args): Promise<PayloadRequest> => {
const cookies = parseCookies(request.headers)
const payload = await getPayload({ config: configPromise, cron: true })
const payload = await getPayload({ config: configPromise })
const { config } = payload
const localization = config.localization

View File

@@ -39,7 +39,7 @@ export const routeError = async ({
if (!payload) {
try {
payload = await getPayload({ config: configArg, cron: true })
payload = await getPayload({ config: configArg })
} catch (ignore) {
return Response.json(
{

View File

@@ -5,7 +5,6 @@ import {
fieldAffectsData,
fieldHasSubFields,
fieldShouldBeLocalized,
tabHasName,
} from '../fields/config/types.js'
const traverseArrayOrBlocksField = ({
@@ -17,7 +16,6 @@ const traverseArrayOrBlocksField = ({
fillEmpty,
leavesFirst,
parentIsLocalized,
parentPath,
parentRef,
}: {
callback: TraverseFieldsCallback
@@ -28,7 +26,6 @@ const traverseArrayOrBlocksField = ({
fillEmpty: boolean
leavesFirst: boolean
parentIsLocalized: boolean
parentPath?: string
parentRef?: unknown
}) => {
if (fillEmpty) {
@@ -41,7 +38,6 @@ const traverseArrayOrBlocksField = ({
isTopLevel: false,
leavesFirst,
parentIsLocalized: parentIsLocalized || field.localized,
parentPath: `${parentPath}${field.name}.`,
parentRef,
})
}
@@ -59,7 +55,6 @@ const traverseArrayOrBlocksField = ({
isTopLevel: false,
leavesFirst,
parentIsLocalized: parentIsLocalized || field.localized,
parentPath: `${parentPath}${field.name}.`,
parentRef,
})
}
@@ -93,7 +88,6 @@ const traverseArrayOrBlocksField = ({
isTopLevel: false,
leavesFirst,
parentIsLocalized: parentIsLocalized || field.localized,
parentPath: `${parentPath}${field.name}.`,
parentRef,
ref,
})
@@ -111,7 +105,6 @@ export type TraverseFieldsCallback = (args: {
*/
next?: () => void
parentIsLocalized: boolean
parentPath: string
/**
* The parent reference object
*/
@@ -137,7 +130,6 @@ type TraverseFieldsArgs = {
*/
leavesFirst?: boolean
parentIsLocalized?: boolean
parentPath?: string
parentRef?: Record<string, unknown> | unknown
ref?: Record<string, unknown> | unknown
}
@@ -160,7 +152,6 @@ export const traverseFields = ({
isTopLevel = true,
leavesFirst = false,
parentIsLocalized,
parentPath = '',
parentRef = {},
ref = {},
}: TraverseFieldsArgs): void => {
@@ -181,19 +172,12 @@ export const traverseFields = ({
if (
!leavesFirst &&
callback &&
callback({ field, next, parentIsLocalized: parentIsLocalized!, parentPath, parentRef, ref })
callback({ field, next, parentIsLocalized: parentIsLocalized!, parentRef, ref })
) {
return true
} else if (leavesFirst) {
callbackStack.push(() =>
callback({
field,
next,
parentIsLocalized: parentIsLocalized!,
parentPath,
parentRef,
ref,
}),
callback({ field, next, parentIsLocalized: parentIsLocalized!, parentRef, ref }),
)
}
@@ -236,7 +220,6 @@ export const traverseFields = ({
field: { ...tab, type: 'tab' },
next,
parentIsLocalized: parentIsLocalized!,
parentPath,
parentRef: currentParentRef,
ref: tabRef,
})
@@ -248,7 +231,6 @@ export const traverseFields = ({
field: { ...tab, type: 'tab' },
next,
parentIsLocalized: parentIsLocalized!,
parentPath,
parentRef: currentParentRef,
ref: tabRef,
}),
@@ -272,7 +254,6 @@ export const traverseFields = ({
isTopLevel: false,
leavesFirst,
parentIsLocalized: true,
parentPath: `${parentPath}${tab.name}.`,
parentRef: currentParentRef,
ref: tabRef[key as keyof typeof tabRef],
})
@@ -287,7 +268,6 @@ export const traverseFields = ({
field: { ...tab, type: 'tab' },
next,
parentIsLocalized: parentIsLocalized!,
parentPath,
parentRef: currentParentRef,
ref: tabRef,
})
@@ -299,7 +279,6 @@ export const traverseFields = ({
field: { ...tab, type: 'tab' },
next,
parentIsLocalized: parentIsLocalized!,
parentPath,
parentRef: currentParentRef,
ref: tabRef,
}),
@@ -317,7 +296,6 @@ export const traverseFields = ({
isTopLevel: false,
leavesFirst,
parentIsLocalized: false,
parentPath: tabHasName(tab) ? `${parentPath}${tab.name}` : parentPath,
parentRef: currentParentRef,
ref: tabRef,
})
@@ -374,7 +352,6 @@ export const traverseFields = ({
isTopLevel: false,
leavesFirst,
parentIsLocalized: true,
parentPath: field.name ? `${parentPath}${field.name}` : parentPath,
parentRef: currentParentRef,
ref: currentRef[key as keyof typeof currentRef],
})
@@ -449,7 +426,6 @@ export const traverseFields = ({
isTopLevel: false,
leavesFirst,
parentIsLocalized,
parentPath,
parentRef: currentParentRef,
ref: currentRef,
})

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/plugin-cloud-storage",
"version": "3.48.0",
"version": "3.47.0",
"description": "The official cloud storage plugin for Payload CMS",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/plugin-form-builder",
"version": "3.48.0",
"version": "3.47.0",
"description": "Form builder plugin for Payload CMS",
"keywords": [
"payload",

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/plugin-import-export",
"version": "3.48.0",
"version": "3.47.0",
"description": "Import-Export plugin for Payload",
"keywords": [
"payload",

View File

@@ -83,24 +83,13 @@ export const reduceFields = ({
(tabFields, tab) => {
if ('fields' in tab) {
const isNamedTab = 'name' in tab && tab.name
const newPath = isNamedTab ? `${path}${path ? '.' : ''}${tab.name}` : path
return [
...tabFields,
...reduceFields({
disabledFields,
fields: tab.fields,
labelPrefix: isNamedTab
? combineLabel({
field: {
name: tab.name,
label: tab.label ?? tab.name,
} as any,
prefix: labelPrefix,
})
: labelPrefix,
path: newPath,
labelPrefix,
path: isNamedTab ? createNestedClientFieldPath(path, field) : path,
}),
]
}
@@ -114,11 +103,7 @@ export const reduceFields = ({
const val = createNestedClientFieldPath(path, field)
// If the field is disabled, skip it
if (
disabledFields.some(
(disabledField) => val === disabledField || val.startsWith(`${disabledField}.`),
)
) {
if (disabledFields.includes(val)) {
return fieldsToUse
}

View File

@@ -18,9 +18,8 @@ import type {
PluginImportExportTranslations,
} from '../../translations/index.js'
import { buildDisabledFieldRegex } from '../../utilities/buildDisabledFieldRegex.js'
import './index.scss'
import { useImportExport } from '../ImportExportProvider/index.js'
import './index.scss'
const baseClass = 'preview'
@@ -47,11 +46,12 @@ export const Preview = () => {
(collection) => collection.slug === collectionSlug,
)
const disabledFieldRegexes: RegExp[] = React.useMemo(() => {
const disabledFieldPaths =
collectionConfig?.admin?.custom?.['plugin-import-export']?.disabledFields ?? []
return disabledFieldPaths.map(buildDisabledFieldRegex)
const disabledFieldsUnderscored = React.useMemo(() => {
return (
collectionConfig?.admin?.custom?.['plugin-import-export']?.disabledFields?.map((f: string) =>
f.replace(/\./g, '_'),
) ?? []
)
}, [collectionConfig])
const isCSV = format === 'csv'
@@ -101,16 +101,11 @@ export const Preview = () => {
Array.isArray(fields) && fields.length > 0
? fields.flatMap((field) => {
const regex = fieldToRegex(field)
return allKeys.filter(
(key) =>
regex.test(key) &&
!disabledFieldRegexes.some((disabledRegex) => disabledRegex.test(key)),
)
return allKeys.filter((key) => regex.test(key))
})
: allKeys.filter(
(key) =>
!defaultMetaFields.includes(key) &&
!disabledFieldRegexes.some((regex) => regex.test(key)),
!defaultMetaFields.includes(key) && !disabledFieldsUnderscored.includes(key),
)
const fieldKeys =
@@ -155,7 +150,7 @@ export const Preview = () => {
}, [
collectionConfig,
collectionSlug,
disabledFieldRegexes,
disabledFieldsUnderscored,
draft,
fields,
i18n,

View File

@@ -5,7 +5,6 @@ import { stringify } from 'csv-stringify/sync'
import { APIError } from 'payload'
import { Readable } from 'stream'
import { buildDisabledFieldRegex } from '../utilities/buildDisabledFieldRegex.js'
import { flattenObject } from './flattenObject.js'
import { getCustomFieldFunctions } from './getCustomFieldFunctions.js'
import { getFilename } from './getFilename.js'
@@ -109,22 +108,15 @@ export const createExport = async (args: CreateExportArgs) => {
fields: collectionConfig.flattenedFields,
})
const disabledFields =
const disabledFieldsDot =
collectionConfig.admin?.custom?.['plugin-import-export']?.disabledFields ?? []
const disabledRegexes: RegExp[] = disabledFields.map(buildDisabledFieldRegex)
const disabledFields = disabledFieldsDot.map((f: string) => f.replace(/\./g, '_'))
const filterDisabled = (row: Record<string, unknown>): Record<string, unknown> => {
const filtered: Record<string, unknown> = {}
for (const [key, value] of Object.entries(row)) {
const isDisabled = disabledRegexes.some((regex) => regex.test(key))
if (!isDisabled) {
filtered[key] = value
}
for (const key of disabledFields) {
delete row[key]
}
return filtered
return row
}
if (download) {

Some files were not shown because too many files have changed in this diff Show More