Merge remote-tracking branch 'origin/main' into chore/bump-next-15.4.1
This commit is contained in:
3
.github/workflows/main.yml
vendored
3
.github/workflows/main.yml
vendored
@@ -153,6 +153,7 @@ jobs:
|
||||
matrix:
|
||||
database:
|
||||
- mongodb
|
||||
- firestore
|
||||
- postgres
|
||||
- postgres-custom-schema
|
||||
- postgres-uuid
|
||||
@@ -718,6 +719,8 @@ jobs:
|
||||
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
|
||||
|
||||
- name: Analyze esbuild bundle size
|
||||
# Temporarily disable this for community PRs until this can be implemented in a separate workflow
|
||||
if: github.event.pull_request.head.repo.fork == false
|
||||
uses: exoego/esbuild-bundle-analyzer@v1
|
||||
with:
|
||||
metafiles: 'packages/payload/meta_index.json,packages/payload/meta_shared.json,packages/ui/meta_client.json,packages/ui/meta_shared.json,packages/next/meta_index.json,packages/richtext-lexical/meta_client.json'
|
||||
|
||||
@@ -30,18 +30,22 @@ export default buildConfig({
|
||||
|
||||
## Options
|
||||
|
||||
| Option | Description |
|
||||
| -------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `autoPluralization` | Tell Mongoose to auto-pluralize any collection names if it encounters any singular words used as collection `slug`s. |
|
||||
| `connectOptions` | Customize MongoDB connection options. Payload will connect to your MongoDB database using default options which you can override and extend to include all the [options](https://mongoosejs.com/docs/connections.html#options) available to mongoose. |
|
||||
| `collectionsSchemaOptions` | Customize Mongoose schema options for collections. |
|
||||
| `disableIndexHints` | Set to true to disable hinting to MongoDB to use 'id' as index. This is currently done when counting documents for pagination, as it increases the speed of the count function used in that query. Disabling this optimization might fix some problems with AWS DocumentDB. Defaults to false |
|
||||
| `migrationDir` | Customize the directory that migrations are stored. |
|
||||
| `transactionOptions` | An object with configuration properties used in [transactions](https://www.mongodb.com/docs/manual/core/transactions/) or `false` which will disable the use of transactions. |
|
||||
| `collation` | Enable language-specific string comparison with customizable options. Available on MongoDB 3.4+. Defaults locale to "en". Example: `{ strength: 3 }`. For a full list of collation options and their definitions, see the [MongoDB documentation](https://www.mongodb.com/docs/manual/reference/collation/). |
|
||||
| `allowAdditionalKeys` | By default, Payload strips all additional keys from MongoDB data that don't exist in the Payload schema. If you have some data that you want to include to the result but it doesn't exist in Payload, you can set this to `true`. Be careful as Payload access control _won't_ work for this data. |
|
||||
| `allowIDOnCreate` | Set to `true` to use the `id` passed in data on the create API operations without using a custom ID field. |
|
||||
| `disableFallbackSort` | Set to `true` to disable the adapter adding a fallback sort when sorting by non-unique fields, this can affect performance in some cases but it ensures a consistent order of results. |
|
||||
| Option | Description |
|
||||
| ---------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `autoPluralization` | Tell Mongoose to auto-pluralize any collection names if it encounters any singular words used as collection `slug`s. |
|
||||
| `connectOptions` | Customize MongoDB connection options. Payload will connect to your MongoDB database using default options which you can override and extend to include all the [options](https://mongoosejs.com/docs/connections.html#options) available to mongoose. |
|
||||
| `collectionsSchemaOptions` | Customize Mongoose schema options for collections. |
|
||||
| `disableIndexHints` | Set to true to disable hinting to MongoDB to use 'id' as index. This is currently done when counting documents for pagination, as it increases the speed of the count function used in that query. Disabling this optimization might fix some problems with AWS DocumentDB. Defaults to false |
|
||||
| `migrationDir` | Customize the directory that migrations are stored. |
|
||||
| `transactionOptions` | An object with configuration properties used in [transactions](https://www.mongodb.com/docs/manual/core/transactions/) or `false` which will disable the use of transactions. |
|
||||
| `collation` | Enable language-specific string comparison with customizable options. Available on MongoDB 3.4+. Defaults locale to "en". Example: `{ strength: 3 }`. For a full list of collation options and their definitions, see the [MongoDB documentation](https://www.mongodb.com/docs/manual/reference/collation/). |
|
||||
| `allowAdditionalKeys` | By default, Payload strips all additional keys from MongoDB data that don't exist in the Payload schema. If you have some data that you want to include to the result but it doesn't exist in Payload, you can set this to `true`. Be careful as Payload access control _won't_ work for this data. |
|
||||
| `allowIDOnCreate` | Set to `true` to use the `id` passed in data on the create API operations without using a custom ID field. |
|
||||
| `disableFallbackSort` | Set to `true` to disable the adapter adding a fallback sort when sorting by non-unique fields, this can affect performance in some cases but it ensures a consistent order of results. |
|
||||
| `useAlternativeDropDatabase` | Set to `true` to use an alternative `dropDatabase` implementation that calls `collection.deleteMany({})` on every collection instead of sending a raw `dropDatabase` command. Payload only uses `dropDatabase` for testing purposes. Defaults to `false`. |
|
||||
| `useBigIntForNumberIDs` | Set to `true` to use `BigInt` for custom ID fields of type `'number'`. Useful for databases that don't support `double` or `int32` IDs. Defaults to `false`. |
|
||||
| `useJoinAggregations` | Set to `false` to disable join aggregations (which use correlated subqueries) and instead populate join fields via multiple `find` queries. Defaults to `true`. |
|
||||
| `usePipelineInSortLookup` | Set to `false` to disable the use of `pipeline` in the `$lookup` aggregation in sorting. Defaults to `true`. |
|
||||
|
||||
## Access to Mongoose models
|
||||
|
||||
@@ -56,9 +60,21 @@ You can access Mongoose models as follows:
|
||||
|
||||
## Using other MongoDB implementations
|
||||
|
||||
Limitations with [DocumentDB](https://aws.amazon.com/documentdb/) and [Azure Cosmos DB](https://azure.microsoft.com/en-us/products/cosmos-db):
|
||||
You can import the `compatabilityOptions` object to get the recommended settings for other MongoDB implementations. Since these databases aren't officially supported by payload, you may still encounter issues even with these settings (please create an issue or PR if you believe these options should be updated):
|
||||
|
||||
- For Azure Cosmos DB you must pass `transactionOptions: false` to the adapter options. Azure Cosmos DB does not support transactions that update two and more documents in different collections, which is a common case when using Payload (via hooks).
|
||||
- For Azure Cosmos DB the root config property `indexSortableFields` must be set to `true`.
|
||||
- The [Join Field](../fields/join) is not supported in DocumentDB and Azure Cosmos DB, as we internally use MongoDB aggregations to query data for that field, which are limited there. This can be changed in the future.
|
||||
- For DocumentDB pass `disableIndexHints: true` to disable hinting to the DB to use `id` as index which can cause problems with DocumentDB.
|
||||
```ts
|
||||
import { mongooseAdapter, compatabilityOptions } from '@payloadcms/db-mongodb'
|
||||
|
||||
export default buildConfig({
|
||||
db: mongooseAdapter({
|
||||
url: process.env.DATABASE_URI,
|
||||
// For example, if you're using firestore:
|
||||
...compatabilityOptions.firestore,
|
||||
}),
|
||||
})
|
||||
```
|
||||
|
||||
We export compatability options for [DocumentDB](https://aws.amazon.com/documentdb/), [Azure Cosmos DB](https://azure.microsoft.com/en-us/products/cosmos-db) and [Firestore](https://cloud.google.com/firestore/mongodb-compatibility/docs/overview). Known limitations:
|
||||
|
||||
- Azure Cosmos DB does not support transactions that update two or more documents in different collections, which is a common case when using Payload (via hooks).
|
||||
- Azure Cosmos DB the root config property `indexSortableFields` must be set to `true`.
|
||||
|
||||
@@ -194,6 +194,27 @@ const result = await payload.count({
|
||||
})
|
||||
```
|
||||
|
||||
### FindDistinct#collection-find-distinct
|
||||
|
||||
```js
|
||||
// Result will be an object with:
|
||||
// {
|
||||
// values: ['value-1', 'value-2'], // array of distinct values,
|
||||
// field: 'title', // the field
|
||||
// totalDocs: 10, // count of the distinct values satisfies query,
|
||||
// perPage: 10, // count of distinct values per page (based on provided limit)
|
||||
// }
|
||||
const result = await payload.findDistinct({
|
||||
collection: 'posts', // required
|
||||
locale: 'en',
|
||||
where: {}, // pass a `where` query here
|
||||
user: dummyUser,
|
||||
overrideAccess: false,
|
||||
field: 'title',
|
||||
sort: 'title',
|
||||
})
|
||||
```
|
||||
|
||||
### Update by ID#collection-update-by-id
|
||||
|
||||
```js
|
||||
|
||||
@@ -58,7 +58,7 @@ To learn more, see the [Custom Components Performance](../admin/custom-component
|
||||
|
||||
### Block references
|
||||
|
||||
Use [Block References](../fields/blocks#block-references) to share the same block across multiple fields without bloating the config. This will reduce the number of fields to traverse when processing permissions, etc. and can can significantly reduce the amount of data sent from the server to the client in the Admin Panel.
|
||||
Use [Block References](../fields/blocks#block-references) to share the same block across multiple fields without bloating the config. This will reduce the number of fields to traverse when processing permissions, etc. and can significantly reduce the amount of data sent from the server to the client in the Admin Panel.
|
||||
|
||||
For example, if you have a block that is used in multiple fields, you can define it once and reference it in each field.
|
||||
|
||||
|
||||
@@ -6,6 +6,8 @@ import { anyone } from './access/anyone'
|
||||
import { checkRole } from './access/checkRole'
|
||||
import { loginAfterCreate } from './hooks/loginAfterCreate'
|
||||
import { protectRoles } from './hooks/protectRoles'
|
||||
import { access } from 'fs'
|
||||
import { create } from 'domain'
|
||||
|
||||
export const Users: CollectionConfig = {
|
||||
slug: 'users',
|
||||
@@ -32,6 +34,34 @@ export const Users: CollectionConfig = {
|
||||
afterChange: [loginAfterCreate],
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'email',
|
||||
type: 'email',
|
||||
required: true,
|
||||
unique: true,
|
||||
access: {
|
||||
read: adminsAndUser,
|
||||
update: adminsAndUser,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'password',
|
||||
type: 'password',
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'Leave blank to keep the current password.',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'resetPasswordToken',
|
||||
type: 'text',
|
||||
hidden: true,
|
||||
},
|
||||
{
|
||||
name: 'resetPasswordExpiration',
|
||||
type: 'date',
|
||||
hidden: true,
|
||||
},
|
||||
{
|
||||
name: 'firstName',
|
||||
type: 'text',
|
||||
@@ -45,6 +75,11 @@ export const Users: CollectionConfig = {
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
saveToJWT: true,
|
||||
access: {
|
||||
read: admins,
|
||||
update: admins,
|
||||
create: admins,
|
||||
},
|
||||
hooks: {
|
||||
beforeChange: [protectRoles],
|
||||
},
|
||||
|
||||
@@ -112,6 +112,7 @@
|
||||
"test:e2e:prod:ci": "pnpm prepare-run-test-against-prod:ci && pnpm runts ./test/runE2E.ts --prod",
|
||||
"test:e2e:prod:ci:noturbo": "pnpm prepare-run-test-against-prod:ci && pnpm runts ./test/runE2E.ts --prod --no-turbo",
|
||||
"test:int": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
|
||||
"test:int:firestore": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=firestore DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
|
||||
"test:int:postgres": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=postgres DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
|
||||
"test:int:sqlite": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=sqlite DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
|
||||
"test:types": "tstyche",
|
||||
|
||||
@@ -36,6 +36,25 @@ export const connect: Connect = async function connect(
|
||||
|
||||
try {
|
||||
this.connection = (await mongoose.connect(urlToConnect, connectionOptions)).connection
|
||||
if (this.useAlternativeDropDatabase) {
|
||||
if (this.connection.db) {
|
||||
// Firestore doesn't support dropDatabase, so we monkey patch
|
||||
// dropDatabase to delete all documents from all collections instead
|
||||
this.connection.db.dropDatabase = async function (): Promise<boolean> {
|
||||
const existingCollections = await this.listCollections().toArray()
|
||||
await Promise.all(
|
||||
existingCollections.map(async (collectionInfo) => {
|
||||
const collection = this.collection(collectionInfo.name)
|
||||
await collection.deleteMany({})
|
||||
}),
|
||||
)
|
||||
return true
|
||||
}
|
||||
this.connection.dropDatabase = async function () {
|
||||
await this.db?.dropDatabase()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we are running a replica set with MongoDB Memory Server,
|
||||
// wait until the replica set elects a primary before proceeding
|
||||
|
||||
@@ -12,6 +12,7 @@ import { buildJoinAggregation } from './utilities/buildJoinAggregation.js'
|
||||
import { buildProjectionFromSelect } from './utilities/buildProjectionFromSelect.js'
|
||||
import { getCollection } from './utilities/getEntity.js'
|
||||
import { getSession } from './utilities/getSession.js'
|
||||
import { resolveJoins } from './utilities/resolveJoins.js'
|
||||
import { transform } from './utilities/transform.js'
|
||||
|
||||
export const find: Find = async function find(
|
||||
@@ -155,6 +156,16 @@ export const find: Find = async function find(
|
||||
result = await Model.paginate(query, paginationOptions)
|
||||
}
|
||||
|
||||
if (!this.useJoinAggregations) {
|
||||
await resolveJoins({
|
||||
adapter: this,
|
||||
collectionSlug,
|
||||
docs: result.docs as Record<string, unknown>[],
|
||||
joins,
|
||||
locale,
|
||||
})
|
||||
}
|
||||
|
||||
transform({
|
||||
adapter: this,
|
||||
data: result.docs,
|
||||
|
||||
141
packages/db-mongodb/src/findDistinct.ts
Normal file
141
packages/db-mongodb/src/findDistinct.ts
Normal file
@@ -0,0 +1,141 @@
|
||||
import type { PipelineStage } from 'mongoose'
|
||||
|
||||
import { type FindDistinct, getFieldByPath } from 'payload'
|
||||
|
||||
import type { MongooseAdapter } from './index.js'
|
||||
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import { buildSortParam } from './queries/buildSortParam.js'
|
||||
import { getCollection } from './utilities/getEntity.js'
|
||||
import { getSession } from './utilities/getSession.js'
|
||||
|
||||
export const findDistinct: FindDistinct = async function (this: MongooseAdapter, args) {
|
||||
const { collectionConfig, Model } = getCollection({
|
||||
adapter: this,
|
||||
collectionSlug: args.collection,
|
||||
})
|
||||
|
||||
const session = await getSession(this, args.req)
|
||||
|
||||
const { where = {} } = args
|
||||
|
||||
const sortAggregation: PipelineStage[] = []
|
||||
|
||||
const sort = buildSortParam({
|
||||
adapter: this,
|
||||
config: this.payload.config,
|
||||
fields: collectionConfig.flattenedFields,
|
||||
locale: args.locale,
|
||||
sort: args.sort ?? args.field,
|
||||
sortAggregation,
|
||||
timestamps: true,
|
||||
})
|
||||
|
||||
const query = await buildQuery({
|
||||
adapter: this,
|
||||
collectionSlug: args.collection,
|
||||
fields: collectionConfig.flattenedFields,
|
||||
locale: args.locale,
|
||||
where,
|
||||
})
|
||||
|
||||
const fieldPathResult = getFieldByPath({
|
||||
fields: collectionConfig.flattenedFields,
|
||||
path: args.field,
|
||||
})
|
||||
let fieldPath = args.field
|
||||
if (fieldPathResult?.pathHasLocalized && args.locale) {
|
||||
fieldPath = fieldPathResult.localizedPath.replace('<locale>', args.locale)
|
||||
}
|
||||
|
||||
const page = args.page || 1
|
||||
|
||||
const sortProperty = Object.keys(sort)[0]! // assert because buildSortParam always returns at least 1 key.
|
||||
const sortDirection = sort[sortProperty] === 'asc' ? 1 : -1
|
||||
|
||||
const pipeline: PipelineStage[] = [
|
||||
{
|
||||
$match: query,
|
||||
},
|
||||
...(sortAggregation.length > 0 ? sortAggregation : []),
|
||||
|
||||
{
|
||||
$group: {
|
||||
_id: {
|
||||
_field: `$${fieldPath}`,
|
||||
...(sortProperty === fieldPath
|
||||
? {}
|
||||
: {
|
||||
_sort: `$${sortProperty}`,
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$sort: {
|
||||
[sortProperty === fieldPath ? '_id._field' : '_id._sort']: sortDirection,
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
const getValues = async () => {
|
||||
return Model.aggregate(pipeline, { session }).then((res) =>
|
||||
res.map((each) => ({
|
||||
[args.field]: JSON.parse(JSON.stringify(each._id._field)),
|
||||
})),
|
||||
)
|
||||
}
|
||||
|
||||
if (args.limit) {
|
||||
pipeline.push({
|
||||
$skip: (page - 1) * args.limit,
|
||||
})
|
||||
pipeline.push({ $limit: args.limit })
|
||||
const totalDocs = await Model.aggregate(
|
||||
[
|
||||
{
|
||||
$match: query,
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: `$${fieldPath}`,
|
||||
},
|
||||
},
|
||||
{ $count: 'count' },
|
||||
],
|
||||
{
|
||||
session,
|
||||
},
|
||||
).then((res) => res[0]?.count ?? 0)
|
||||
const totalPages = Math.ceil(totalDocs / args.limit)
|
||||
const hasPrevPage = page > 1
|
||||
const hasNextPage = totalPages > page
|
||||
const pagingCounter = (page - 1) * args.limit + 1
|
||||
|
||||
return {
|
||||
hasNextPage,
|
||||
hasPrevPage,
|
||||
limit: args.limit,
|
||||
nextPage: hasNextPage ? page + 1 : null,
|
||||
page,
|
||||
pagingCounter,
|
||||
prevPage: hasPrevPage ? page - 1 : null,
|
||||
totalDocs,
|
||||
totalPages,
|
||||
values: await getValues(),
|
||||
}
|
||||
}
|
||||
|
||||
const values = await getValues()
|
||||
|
||||
return {
|
||||
hasNextPage: false,
|
||||
hasPrevPage: false,
|
||||
limit: 0,
|
||||
page: 1,
|
||||
pagingCounter: 1,
|
||||
totalDocs: values.length,
|
||||
totalPages: 1,
|
||||
values,
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,7 @@ import { buildJoinAggregation } from './utilities/buildJoinAggregation.js'
|
||||
import { buildProjectionFromSelect } from './utilities/buildProjectionFromSelect.js'
|
||||
import { getCollection } from './utilities/getEntity.js'
|
||||
import { getSession } from './utilities/getSession.js'
|
||||
import { resolveJoins } from './utilities/resolveJoins.js'
|
||||
import { transform } from './utilities/transform.js'
|
||||
|
||||
export const findOne: FindOne = async function findOne(
|
||||
@@ -67,6 +68,16 @@ export const findOne: FindOne = async function findOne(
|
||||
doc = await Model.findOne(query, {}, options)
|
||||
}
|
||||
|
||||
if (doc && !this.useJoinAggregations) {
|
||||
await resolveJoins({
|
||||
adapter: this,
|
||||
collectionSlug,
|
||||
docs: [doc] as Record<string, unknown>[],
|
||||
joins,
|
||||
locale,
|
||||
})
|
||||
}
|
||||
|
||||
if (!doc) {
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -42,6 +42,7 @@ import { deleteOne } from './deleteOne.js'
|
||||
import { deleteVersions } from './deleteVersions.js'
|
||||
import { destroy } from './destroy.js'
|
||||
import { find } from './find.js'
|
||||
import { findDistinct } from './findDistinct.js'
|
||||
import { findGlobal } from './findGlobal.js'
|
||||
import { findGlobalVersions } from './findGlobalVersions.js'
|
||||
import { findOne } from './findOne.js'
|
||||
@@ -143,6 +144,29 @@ export interface Args {
|
||||
|
||||
/** The URL to connect to MongoDB or false to start payload and prevent connecting */
|
||||
url: false | string
|
||||
|
||||
/**
|
||||
* Set to `true` to use an alternative `dropDatabase` implementation that calls `collection.deleteMany({})` on every collection instead of sending a raw `dropDatabase` command.
|
||||
* Payload only uses `dropDatabase` for testing purposes.
|
||||
* @default false
|
||||
*/
|
||||
useAlternativeDropDatabase?: boolean
|
||||
/**
|
||||
* Set to `true` to use `BigInt` for custom ID fields of type `'number'`.
|
||||
* Useful for databases that don't support `double` or `int32` IDs.
|
||||
* @default false
|
||||
*/
|
||||
useBigIntForNumberIDs?: boolean
|
||||
/**
|
||||
* Set to `false` to disable join aggregations (which use correlated subqueries) and instead populate join fields via multiple `find` queries.
|
||||
* @default true
|
||||
*/
|
||||
useJoinAggregations?: boolean
|
||||
/**
|
||||
* Set to `false` to disable the use of `pipeline` in the `$lookup` aggregation in sorting.
|
||||
* @default true
|
||||
*/
|
||||
usePipelineInSortLookup?: boolean
|
||||
}
|
||||
|
||||
export type MongooseAdapter = {
|
||||
@@ -159,6 +183,10 @@ export type MongooseAdapter = {
|
||||
up: (args: MigrateUpArgs) => Promise<void>
|
||||
}[]
|
||||
sessions: Record<number | string, ClientSession>
|
||||
useAlternativeDropDatabase: boolean
|
||||
useBigIntForNumberIDs: boolean
|
||||
useJoinAggregations: boolean
|
||||
usePipelineInSortLookup: boolean
|
||||
versions: {
|
||||
[slug: string]: CollectionModel
|
||||
}
|
||||
@@ -194,6 +222,10 @@ declare module 'payload' {
|
||||
updateVersion: <T extends TypeWithID = TypeWithID>(
|
||||
args: { options?: QueryOptions } & UpdateVersionArgs<T>,
|
||||
) => Promise<TypeWithVersion<T>>
|
||||
useAlternativeDropDatabase: boolean
|
||||
useBigIntForNumberIDs: boolean
|
||||
useJoinAggregations: boolean
|
||||
usePipelineInSortLookup: boolean
|
||||
versions: {
|
||||
[slug: string]: CollectionModel
|
||||
}
|
||||
@@ -214,6 +246,10 @@ export function mongooseAdapter({
|
||||
prodMigrations,
|
||||
transactionOptions = {},
|
||||
url,
|
||||
useAlternativeDropDatabase = false,
|
||||
useBigIntForNumberIDs = false,
|
||||
useJoinAggregations = true,
|
||||
usePipelineInSortLookup = true,
|
||||
}: Args): DatabaseAdapterObj {
|
||||
function adapter({ payload }: { payload: Payload }) {
|
||||
const migrationDir = findMigrationDir(migrationDirArg)
|
||||
@@ -262,6 +298,7 @@ export function mongooseAdapter({
|
||||
destroy,
|
||||
disableFallbackSort,
|
||||
find,
|
||||
findDistinct,
|
||||
findGlobal,
|
||||
findGlobalVersions,
|
||||
findOne,
|
||||
@@ -279,6 +316,10 @@ export function mongooseAdapter({
|
||||
updateOne,
|
||||
updateVersion,
|
||||
upsert,
|
||||
useAlternativeDropDatabase,
|
||||
useBigIntForNumberIDs,
|
||||
useJoinAggregations,
|
||||
usePipelineInSortLookup,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -290,6 +331,8 @@ export function mongooseAdapter({
|
||||
}
|
||||
}
|
||||
|
||||
export { compatabilityOptions } from './utilities/compatabilityOptions.js'
|
||||
|
||||
/**
|
||||
* Attempt to find migrations directory.
|
||||
*
|
||||
|
||||
@@ -143,7 +143,12 @@ export const buildSchema = (args: {
|
||||
const idField = schemaFields.find((field) => fieldAffectsData(field) && field.name === 'id')
|
||||
if (idField) {
|
||||
fields = {
|
||||
_id: idField.type === 'number' ? Number : String,
|
||||
_id:
|
||||
idField.type === 'number'
|
||||
? payload.db.useBigIntForNumberIDs
|
||||
? mongoose.Schema.Types.BigInt
|
||||
: Number
|
||||
: String,
|
||||
}
|
||||
schemaFields = schemaFields.filter(
|
||||
(field) => !(fieldAffectsData(field) && field.name === 'id'),
|
||||
@@ -900,7 +905,11 @@ const getRelationshipValueType = (field: RelationshipField | UploadField, payloa
|
||||
}
|
||||
|
||||
if (customIDType === 'number') {
|
||||
return mongoose.Schema.Types.Number
|
||||
if (payload.db.useBigIntForNumberIDs) {
|
||||
return mongoose.Schema.Types.BigInt
|
||||
} else {
|
||||
return mongoose.Schema.Types.Number
|
||||
}
|
||||
}
|
||||
|
||||
return mongoose.Schema.Types.String
|
||||
|
||||
@@ -99,31 +99,57 @@ const relationshipSort = ({
|
||||
sortFieldPath = foreignFieldPath.localizedPath.replace('<locale>', locale)
|
||||
}
|
||||
|
||||
if (
|
||||
!sortAggregation.some((each) => {
|
||||
return '$lookup' in each && each.$lookup.as === `__${path}`
|
||||
})
|
||||
) {
|
||||
const as = `__${relationshipPath.replace(/\./g, '__')}`
|
||||
|
||||
// If we have not already sorted on this relationship yet, we need to add a lookup stage
|
||||
if (!sortAggregation.some((each) => '$lookup' in each && each.$lookup.as === as)) {
|
||||
let localField = versions ? `version.${relationshipPath}` : relationshipPath
|
||||
|
||||
if (adapter.usePipelineInSortLookup) {
|
||||
const flattenedField = `__${localField.replace(/\./g, '__')}_lookup`
|
||||
sortAggregation.push({
|
||||
$addFields: {
|
||||
[flattenedField]: `$${localField}`,
|
||||
},
|
||||
})
|
||||
localField = flattenedField
|
||||
}
|
||||
|
||||
sortAggregation.push({
|
||||
$lookup: {
|
||||
as: `__${path}`,
|
||||
as,
|
||||
foreignField: '_id',
|
||||
from: foreignCollection.Model.collection.name,
|
||||
localField: versions ? `version.${relationshipPath}` : relationshipPath,
|
||||
pipeline: [
|
||||
{
|
||||
$project: {
|
||||
[sortFieldPath]: true,
|
||||
localField,
|
||||
...(!adapter.usePipelineInSortLookup && {
|
||||
pipeline: [
|
||||
{
|
||||
$project: {
|
||||
[sortFieldPath]: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
sort[`__${path}.${sortFieldPath}`] = sortDirection
|
||||
|
||||
return true
|
||||
if (adapter.usePipelineInSortLookup) {
|
||||
sortAggregation.push({
|
||||
$unset: localField,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (!adapter.usePipelineInSortLookup) {
|
||||
const lookup = sortAggregation.find(
|
||||
(each) => '$lookup' in each && each.$lookup.as === as,
|
||||
) as PipelineStage.Lookup
|
||||
const pipeline = lookup.$lookup.pipeline![0] as PipelineStage.Project
|
||||
pipeline.$project[sortFieldPath] = true
|
||||
}
|
||||
|
||||
sort[`${as}.${sortFieldPath}`] = sortDirection
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ import { buildJoinAggregation } from './utilities/buildJoinAggregation.js'
|
||||
import { buildProjectionFromSelect } from './utilities/buildProjectionFromSelect.js'
|
||||
import { getCollection } from './utilities/getEntity.js'
|
||||
import { getSession } from './utilities/getSession.js'
|
||||
import { resolveJoins } from './utilities/resolveJoins.js'
|
||||
import { transform } from './utilities/transform.js'
|
||||
|
||||
export const queryDrafts: QueryDrafts = async function queryDrafts(
|
||||
@@ -158,6 +159,17 @@ export const queryDrafts: QueryDrafts = async function queryDrafts(
|
||||
result = await Model.paginate(versionQuery, paginationOptions)
|
||||
}
|
||||
|
||||
if (!this.useJoinAggregations) {
|
||||
await resolveJoins({
|
||||
adapter: this,
|
||||
collectionSlug,
|
||||
docs: result.docs as Record<string, unknown>[],
|
||||
joins,
|
||||
locale,
|
||||
versions: true,
|
||||
})
|
||||
}
|
||||
|
||||
transform({
|
||||
adapter: this,
|
||||
data: result.docs,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { MongooseUpdateQueryOptions } from 'mongoose'
|
||||
import type { MongooseUpdateQueryOptions, UpdateQuery } from 'mongoose'
|
||||
import type { UpdateOne } from 'payload'
|
||||
|
||||
import type { MongooseAdapter } from './index.js'
|
||||
@@ -50,15 +50,20 @@ export const updateOne: UpdateOne = async function updateOne(
|
||||
|
||||
let result
|
||||
|
||||
transform({ adapter: this, data, fields, operation: 'write' })
|
||||
const $inc: Record<string, number> = {}
|
||||
let updateData: UpdateQuery<any> = data
|
||||
transform({ $inc, adapter: this, data, fields, operation: 'write' })
|
||||
if (Object.keys($inc).length) {
|
||||
updateData = { $inc, $set: updateData }
|
||||
}
|
||||
|
||||
try {
|
||||
if (returning === false) {
|
||||
await Model.updateOne(query, data, options)
|
||||
await Model.updateOne(query, updateData, options)
|
||||
transform({ adapter: this, data, fields, operation: 'read' })
|
||||
return null
|
||||
} else {
|
||||
result = await Model.findOneAndUpdate(query, data, options)
|
||||
result = await Model.findOneAndUpdate(query, updateData, options)
|
||||
}
|
||||
} catch (error) {
|
||||
handleError({ collection: collectionSlug, error, req })
|
||||
|
||||
@@ -76,7 +76,11 @@ export const aggregatePaginate = async ({
|
||||
countPromise = Model.estimatedDocumentCount(query)
|
||||
} else {
|
||||
const hint = adapter.disableIndexHints !== true ? { _id: 1 } : undefined
|
||||
countPromise = Model.countDocuments(query, { collation, hint, session })
|
||||
countPromise = Model.countDocuments(query, {
|
||||
collation,
|
||||
session,
|
||||
...(hint ? { hint } : {}),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -44,6 +44,9 @@ export const buildJoinAggregation = async ({
|
||||
projection,
|
||||
versions,
|
||||
}: BuildJoinAggregationArgs): Promise<PipelineStage[] | undefined> => {
|
||||
if (!adapter.useJoinAggregations) {
|
||||
return
|
||||
}
|
||||
if (
|
||||
(Object.keys(collectionConfig.joins).length === 0 &&
|
||||
collectionConfig.polymorphicJoins.length == 0) ||
|
||||
|
||||
25
packages/db-mongodb/src/utilities/compatabilityOptions.ts
Normal file
25
packages/db-mongodb/src/utilities/compatabilityOptions.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import type { Args } from '../index.js'
|
||||
|
||||
/**
|
||||
* Each key is a mongo-compatible database and the value
|
||||
* is the recommended `mongooseAdapter` settings for compatability.
|
||||
*/
|
||||
export const compatabilityOptions = {
|
||||
cosmosdb: {
|
||||
transactionOptions: false,
|
||||
useJoinAggregations: false,
|
||||
usePipelineInSortLookup: false,
|
||||
},
|
||||
documentdb: {
|
||||
disableIndexHints: true,
|
||||
},
|
||||
firestore: {
|
||||
disableIndexHints: true,
|
||||
ensureIndexes: false,
|
||||
transactionOptions: false,
|
||||
useAlternativeDropDatabase: true,
|
||||
useBigIntForNumberIDs: true,
|
||||
useJoinAggregations: false,
|
||||
usePipelineInSortLookup: false,
|
||||
},
|
||||
} satisfies Record<string, Partial<Args>>
|
||||
647
packages/db-mongodb/src/utilities/resolveJoins.ts
Normal file
647
packages/db-mongodb/src/utilities/resolveJoins.ts
Normal file
@@ -0,0 +1,647 @@
|
||||
import type { JoinQuery, SanitizedJoins, Where } from 'payload'
|
||||
|
||||
import {
|
||||
appendVersionToQueryKey,
|
||||
buildVersionCollectionFields,
|
||||
combineQueries,
|
||||
getQueryDraftsSort,
|
||||
} from 'payload'
|
||||
import { fieldShouldBeLocalized } from 'payload/shared'
|
||||
|
||||
import type { MongooseAdapter } from '../index.js'
|
||||
|
||||
import { buildQuery } from '../queries/buildQuery.js'
|
||||
import { buildSortParam } from '../queries/buildSortParam.js'
|
||||
import { transform } from './transform.js'
|
||||
|
||||
export type ResolveJoinsArgs = {
|
||||
/** The MongoDB adapter instance */
|
||||
adapter: MongooseAdapter
|
||||
/** The slug of the collection being queried */
|
||||
collectionSlug: string
|
||||
/** Array of documents to resolve joins for */
|
||||
docs: Record<string, unknown>[]
|
||||
/** Join query specifications (which joins to resolve and how) */
|
||||
joins?: JoinQuery
|
||||
/** Optional locale for localized queries */
|
||||
locale?: string
|
||||
/** Optional projection for the join query */
|
||||
projection?: Record<string, true>
|
||||
/** Whether to resolve versions instead of published documents */
|
||||
versions?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves join relationships for a collection of documents.
|
||||
* This function fetches related documents based on join configurations and
|
||||
* attaches them to the original documents with pagination support.
|
||||
*/
|
||||
export async function resolveJoins({
|
||||
adapter,
|
||||
collectionSlug,
|
||||
docs,
|
||||
joins,
|
||||
locale,
|
||||
projection,
|
||||
versions = false,
|
||||
}: ResolveJoinsArgs): Promise<void> {
|
||||
// Early return if no joins are specified or no documents to process
|
||||
if (!joins || docs.length === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
// Get the collection configuration from the adapter
|
||||
const collectionConfig = adapter.payload.collections[collectionSlug]?.config
|
||||
if (!collectionConfig) {
|
||||
return
|
||||
}
|
||||
|
||||
// Build a map of join paths to their configurations for quick lookup
|
||||
// This flattens the nested join structure into a single map keyed by join path
|
||||
const joinMap: Record<string, { targetCollection: string } & SanitizedJoin> = {}
|
||||
|
||||
// Add regular joins
|
||||
for (const [target, joinList] of Object.entries(collectionConfig.joins)) {
|
||||
for (const join of joinList) {
|
||||
joinMap[join.joinPath] = { ...join, targetCollection: target }
|
||||
}
|
||||
}
|
||||
|
||||
// Add polymorphic joins
|
||||
for (const join of collectionConfig.polymorphicJoins || []) {
|
||||
// For polymorphic joins, we use the collections array as the target
|
||||
joinMap[join.joinPath] = { ...join, targetCollection: join.field.collection as string }
|
||||
}
|
||||
|
||||
// Process each requested join concurrently
|
||||
const joinPromises = Object.entries(joins).map(async ([joinPath, joinQuery]) => {
|
||||
if (!joinQuery) {
|
||||
return null
|
||||
}
|
||||
|
||||
// If a projection is provided, and the join path is not in the projection, skip it
|
||||
if (projection && !projection[joinPath]) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Get the join definition from our map
|
||||
const joinDef = joinMap[joinPath]
|
||||
if (!joinDef) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Normalize collections to always be an array for unified processing
|
||||
const allCollections = Array.isArray(joinDef.field.collection)
|
||||
? joinDef.field.collection
|
||||
: [joinDef.field.collection]
|
||||
|
||||
// Use the provided locale or fall back to the default locale for localized fields
|
||||
const localizationConfig = adapter.payload.config.localization
|
||||
const effectiveLocale =
|
||||
locale ||
|
||||
(typeof localizationConfig === 'object' &&
|
||||
localizationConfig &&
|
||||
localizationConfig.defaultLocale)
|
||||
|
||||
// Extract relationTo filter from the where clause to determine which collections to query
|
||||
const relationToFilter = extractRelationToFilter(joinQuery.where || {})
|
||||
|
||||
// Determine which collections to query based on relationTo filter
|
||||
const collections = relationToFilter
|
||||
? allCollections.filter((col) => relationToFilter.includes(col))
|
||||
: allCollections
|
||||
|
||||
// Check if this is a polymorphic collection join (where field.collection is an array)
|
||||
const isPolymorphicJoin = Array.isArray(joinDef.field.collection)
|
||||
|
||||
// Apply pagination settings
|
||||
const limit = joinQuery.limit ?? joinDef.field.defaultLimit ?? 10
|
||||
const page = joinQuery.page ?? 1
|
||||
const skip = (page - 1) * limit
|
||||
|
||||
// Process collections concurrently
|
||||
const collectionPromises = collections.map(async (joinCollectionSlug) => {
|
||||
const targetConfig = adapter.payload.collections[joinCollectionSlug]?.config
|
||||
if (!targetConfig) {
|
||||
return null
|
||||
}
|
||||
|
||||
const useDrafts = versions && Boolean(targetConfig.versions?.drafts)
|
||||
let JoinModel
|
||||
if (useDrafts) {
|
||||
JoinModel = adapter.versions[targetConfig.slug]
|
||||
} else {
|
||||
JoinModel = adapter.collections[targetConfig.slug]
|
||||
}
|
||||
|
||||
if (!JoinModel) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Extract all parent document IDs to use in the join query
|
||||
const parentIDs = docs.map((d) => (versions ? (d.parent ?? d._id ?? d.id) : (d._id ?? d.id)))
|
||||
|
||||
// Build the base query
|
||||
let whereQuery: null | Record<string, unknown> = null
|
||||
whereQuery = isPolymorphicJoin
|
||||
? filterWhereForCollection(
|
||||
joinQuery.where || {},
|
||||
targetConfig.flattenedFields,
|
||||
true, // exclude relationTo for individual collections
|
||||
)
|
||||
: joinQuery.where || {}
|
||||
|
||||
// Skip this collection if the WHERE clause cannot be satisfied for polymorphic collection joins
|
||||
if (whereQuery === null) {
|
||||
return null
|
||||
}
|
||||
whereQuery = useDrafts
|
||||
? await JoinModel.buildQuery({
|
||||
locale,
|
||||
payload: adapter.payload,
|
||||
where: combineQueries(appendVersionToQueryKey(whereQuery as Where), {
|
||||
latest: {
|
||||
equals: true,
|
||||
},
|
||||
}),
|
||||
})
|
||||
: await buildQuery({
|
||||
adapter,
|
||||
collectionSlug: joinCollectionSlug,
|
||||
fields: targetConfig.flattenedFields,
|
||||
locale,
|
||||
where: whereQuery as Where,
|
||||
})
|
||||
|
||||
// Handle localized paths and version prefixes
|
||||
let dbFieldName = joinDef.field.on
|
||||
|
||||
if (effectiveLocale && typeof localizationConfig === 'object' && localizationConfig) {
|
||||
const pathSegments = joinDef.field.on.split('.')
|
||||
const transformedSegments: string[] = []
|
||||
const fields = useDrafts
|
||||
? buildVersionCollectionFields(adapter.payload.config, targetConfig, true)
|
||||
: targetConfig.flattenedFields
|
||||
|
||||
for (let i = 0; i < pathSegments.length; i++) {
|
||||
const segment = pathSegments[i]!
|
||||
transformedSegments.push(segment)
|
||||
|
||||
// Check if this segment corresponds to a localized field
|
||||
const fieldAtSegment = fields.find((f) => f.name === segment)
|
||||
if (fieldAtSegment && fieldAtSegment.localized) {
|
||||
transformedSegments.push(effectiveLocale)
|
||||
}
|
||||
}
|
||||
|
||||
dbFieldName = transformedSegments.join('.')
|
||||
}
|
||||
|
||||
// Add version prefix for draft queries
|
||||
if (useDrafts) {
|
||||
dbFieldName = `version.${dbFieldName}`
|
||||
}
|
||||
|
||||
// Check if the target field is a polymorphic relationship
|
||||
const isPolymorphic = joinDef.targetField
|
||||
? Array.isArray(joinDef.targetField.relationTo)
|
||||
: false
|
||||
|
||||
if (isPolymorphic) {
|
||||
// For polymorphic relationships, we need to match both relationTo and value
|
||||
whereQuery[`${dbFieldName}.relationTo`] = collectionSlug
|
||||
whereQuery[`${dbFieldName}.value`] = { $in: parentIDs }
|
||||
} else {
|
||||
// For regular relationships and polymorphic collection joins
|
||||
whereQuery[dbFieldName] = { $in: parentIDs }
|
||||
}
|
||||
|
||||
// Build the sort parameters for the query
|
||||
const fields = useDrafts
|
||||
? buildVersionCollectionFields(adapter.payload.config, targetConfig, true)
|
||||
: targetConfig.flattenedFields
|
||||
|
||||
const sort = buildSortParam({
|
||||
adapter,
|
||||
config: adapter.payload.config,
|
||||
fields,
|
||||
locale,
|
||||
sort: useDrafts
|
||||
? getQueryDraftsSort({
|
||||
collectionConfig: targetConfig,
|
||||
sort: joinQuery.sort || joinDef.field.defaultSort || targetConfig.defaultSort,
|
||||
})
|
||||
: joinQuery.sort || joinDef.field.defaultSort || targetConfig.defaultSort,
|
||||
timestamps: true,
|
||||
})
|
||||
|
||||
const projection = buildJoinProjection(dbFieldName, useDrafts, sort)
|
||||
|
||||
const [results, dbCount] = await Promise.all([
|
||||
JoinModel.find(whereQuery, projection, {
|
||||
sort,
|
||||
...(isPolymorphicJoin ? {} : { limit, skip }),
|
||||
}).lean(),
|
||||
isPolymorphicJoin ? Promise.resolve(0) : JoinModel.countDocuments(whereQuery),
|
||||
])
|
||||
|
||||
const count = isPolymorphicJoin ? results.length : dbCount
|
||||
|
||||
transform({
|
||||
adapter,
|
||||
data: results,
|
||||
fields: useDrafts
|
||||
? buildVersionCollectionFields(adapter.payload.config, targetConfig, false)
|
||||
: targetConfig.fields,
|
||||
operation: 'read',
|
||||
})
|
||||
|
||||
// Return results with collection info for grouping
|
||||
return {
|
||||
collectionSlug: joinCollectionSlug,
|
||||
count,
|
||||
dbFieldName,
|
||||
results,
|
||||
sort,
|
||||
useDrafts,
|
||||
}
|
||||
})
|
||||
|
||||
const collectionResults = await Promise.all(collectionPromises)
|
||||
|
||||
// Group the results by parent ID
|
||||
const grouped: Record<
|
||||
string,
|
||||
{
|
||||
docs: Record<string, unknown>[]
|
||||
sort: Record<string, string>
|
||||
}
|
||||
> = {}
|
||||
|
||||
let totalCount = 0
|
||||
for (const collectionResult of collectionResults) {
|
||||
if (!collectionResult) {
|
||||
continue
|
||||
}
|
||||
|
||||
const { collectionSlug, count, dbFieldName, results, sort, useDrafts } = collectionResult
|
||||
|
||||
totalCount += count
|
||||
|
||||
for (const result of results) {
|
||||
if (useDrafts) {
|
||||
result.id = result.parent
|
||||
}
|
||||
|
||||
const parentValues = getByPathWithArrays(result, dbFieldName) as (
|
||||
| { relationTo: string; value: number | string }
|
||||
| number
|
||||
| string
|
||||
)[]
|
||||
|
||||
if (parentValues.length === 0) {
|
||||
continue
|
||||
}
|
||||
|
||||
for (let parentValue of parentValues) {
|
||||
if (!parentValue) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (typeof parentValue === 'object') {
|
||||
parentValue = parentValue.value
|
||||
}
|
||||
|
||||
const joinData = {
|
||||
relationTo: collectionSlug,
|
||||
value: result.id,
|
||||
}
|
||||
|
||||
const parentKey = parentValue as string
|
||||
if (!grouped[parentKey]) {
|
||||
grouped[parentKey] = {
|
||||
docs: [],
|
||||
sort,
|
||||
}
|
||||
}
|
||||
|
||||
// Always store the ObjectID reference in polymorphic format
|
||||
grouped[parentKey].docs.push({
|
||||
...result,
|
||||
__joinData: joinData,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const results of Object.values(grouped)) {
|
||||
results.docs.sort((a, b) => {
|
||||
for (const [fieldName, sortOrder] of Object.entries(results.sort)) {
|
||||
const sort = sortOrder === 'asc' ? 1 : -1
|
||||
const aValue = a[fieldName] as Date | number | string
|
||||
const bValue = b[fieldName] as Date | number | string
|
||||
if (aValue < bValue) {
|
||||
return -1 * sort
|
||||
}
|
||||
if (aValue > bValue) {
|
||||
return 1 * sort
|
||||
}
|
||||
}
|
||||
return 0
|
||||
})
|
||||
results.docs = results.docs.map(
|
||||
(doc) => (isPolymorphicJoin ? doc.__joinData : doc.id) as Record<string, unknown>,
|
||||
)
|
||||
}
|
||||
|
||||
// Determine if the join field should be localized
|
||||
const localeSuffix =
|
||||
fieldShouldBeLocalized({
|
||||
field: joinDef.field,
|
||||
parentIsLocalized: joinDef.parentIsLocalized,
|
||||
}) &&
|
||||
adapter.payload.config.localization &&
|
||||
effectiveLocale
|
||||
? `.${effectiveLocale}`
|
||||
: ''
|
||||
|
||||
// Adjust the join path with locale suffix if needed
|
||||
const localizedJoinPath = `${joinPath}${localeSuffix}`
|
||||
|
||||
return {
|
||||
grouped,
|
||||
isPolymorphicJoin,
|
||||
joinQuery,
|
||||
limit,
|
||||
localizedJoinPath,
|
||||
page,
|
||||
skip,
|
||||
totalCount,
|
||||
}
|
||||
})
|
||||
|
||||
// Wait for all join operations to complete
|
||||
const joinResults = await Promise.all(joinPromises)
|
||||
|
||||
// Process the results and attach them to documents
|
||||
for (const joinResult of joinResults) {
|
||||
if (!joinResult) {
|
||||
continue
|
||||
}
|
||||
|
||||
const { grouped, isPolymorphicJoin, joinQuery, limit, localizedJoinPath, skip, totalCount } =
|
||||
joinResult
|
||||
|
||||
// Attach the joined data to each parent document
|
||||
for (const doc of docs) {
|
||||
const id = (versions ? (doc.parent ?? doc._id ?? doc.id) : (doc._id ?? doc.id)) as string
|
||||
const all = grouped[id]?.docs || []
|
||||
|
||||
// Calculate the slice for pagination
|
||||
// When limit is 0, it means unlimited - return all results
|
||||
const slice = isPolymorphicJoin
|
||||
? limit === 0
|
||||
? all
|
||||
: all.slice(skip, skip + limit)
|
||||
: // For non-polymorphic joins, we assume that page and limit were applied at the database level
|
||||
all
|
||||
|
||||
// Create the join result object with pagination metadata
|
||||
const value: Record<string, unknown> = {
|
||||
docs: slice,
|
||||
hasNextPage: limit === 0 ? false : totalCount > skip + slice.length,
|
||||
}
|
||||
|
||||
// Include total count if requested
|
||||
if (joinQuery.count) {
|
||||
value.totalDocs = totalCount
|
||||
}
|
||||
|
||||
// Navigate to the correct nested location in the document and set the join data
|
||||
// This handles nested join paths like "user.posts" by creating intermediate objects
|
||||
const segments = localizedJoinPath.split('.')
|
||||
let ref: Record<string, unknown>
|
||||
if (versions) {
|
||||
if (!doc.version) {
|
||||
doc.version = {}
|
||||
}
|
||||
ref = doc.version as Record<string, unknown>
|
||||
} else {
|
||||
ref = doc
|
||||
}
|
||||
|
||||
for (let i = 0; i < segments.length - 1; i++) {
|
||||
const seg = segments[i]!
|
||||
if (!ref[seg]) {
|
||||
ref[seg] = {}
|
||||
}
|
||||
ref = ref[seg] as Record<string, unknown>
|
||||
}
|
||||
// Set the final join data at the target path
|
||||
ref[segments[segments.length - 1]!] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts relationTo filter values from a WHERE clause
|
||||
* @param where - The WHERE clause to search
|
||||
* @returns Array of collection slugs if relationTo filter found, null otherwise
|
||||
*/
|
||||
function extractRelationToFilter(where: Record<string, unknown>): null | string[] {
|
||||
if (!where || typeof where !== 'object') {
|
||||
return null
|
||||
}
|
||||
|
||||
// Check for direct relationTo conditions
|
||||
if (where.relationTo && typeof where.relationTo === 'object') {
|
||||
const relationTo = where.relationTo as Record<string, unknown>
|
||||
if (relationTo.in && Array.isArray(relationTo.in)) {
|
||||
return relationTo.in as string[]
|
||||
}
|
||||
if (relationTo.equals) {
|
||||
return [relationTo.equals as string]
|
||||
}
|
||||
}
|
||||
|
||||
// Check for relationTo in logical operators
|
||||
if (where.and && Array.isArray(where.and)) {
|
||||
for (const condition of where.and) {
|
||||
const result = extractRelationToFilter(condition)
|
||||
if (result) {
|
||||
return result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (where.or && Array.isArray(where.or)) {
|
||||
for (const condition of where.or) {
|
||||
const result = extractRelationToFilter(condition)
|
||||
if (result) {
|
||||
return result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters a WHERE clause to only include fields that exist in the target collection
|
||||
* This is needed for polymorphic joins where different collections have different fields
|
||||
* @param where - The original WHERE clause
|
||||
* @param availableFields - The fields available in the target collection
|
||||
* @param excludeRelationTo - Whether to exclude relationTo field (for individual collections)
|
||||
* @returns A filtered WHERE clause, or null if the query cannot match this collection
|
||||
*/
|
||||
function filterWhereForCollection(
|
||||
where: Record<string, unknown>,
|
||||
availableFields: Array<{ name: string }>,
|
||||
excludeRelationTo: boolean = false,
|
||||
): null | Record<string, unknown> {
|
||||
if (!where || typeof where !== 'object') {
|
||||
return where
|
||||
}
|
||||
|
||||
const fieldNames = new Set(availableFields.map((f) => f.name))
|
||||
// Add special fields that are available in polymorphic relationships
|
||||
if (!excludeRelationTo) {
|
||||
fieldNames.add('relationTo')
|
||||
}
|
||||
|
||||
const filtered: Record<string, unknown> = {}
|
||||
|
||||
for (const [key, value] of Object.entries(where)) {
|
||||
if (key === 'and') {
|
||||
// Handle AND operator - all conditions must be satisfiable
|
||||
if (Array.isArray(value)) {
|
||||
const filteredConditions: Record<string, unknown>[] = []
|
||||
|
||||
for (const condition of value) {
|
||||
const filteredCondition = filterWhereForCollection(
|
||||
condition,
|
||||
availableFields,
|
||||
excludeRelationTo,
|
||||
)
|
||||
|
||||
// If any condition in AND cannot be satisfied, the whole AND fails
|
||||
if (filteredCondition === null) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (Object.keys(filteredCondition).length > 0) {
|
||||
filteredConditions.push(filteredCondition)
|
||||
}
|
||||
}
|
||||
|
||||
if (filteredConditions.length > 0) {
|
||||
filtered[key] = filteredConditions
|
||||
}
|
||||
}
|
||||
} else if (key === 'or') {
|
||||
// Handle OR operator - at least one condition must be satisfiable
|
||||
if (Array.isArray(value)) {
|
||||
const filteredConditions = value
|
||||
.map((condition) =>
|
||||
filterWhereForCollection(condition, availableFields, excludeRelationTo),
|
||||
)
|
||||
.filter((condition) => condition !== null && Object.keys(condition).length > 0)
|
||||
|
||||
if (filteredConditions.length > 0) {
|
||||
filtered[key] = filteredConditions
|
||||
}
|
||||
// If no OR conditions can be satisfied, we still continue (OR is more permissive)
|
||||
}
|
||||
} else if (key === 'relationTo' && excludeRelationTo) {
|
||||
// Skip relationTo field for non-polymorphic collections
|
||||
continue
|
||||
} else if (fieldNames.has(key)) {
|
||||
// Include the condition if the field exists in this collection
|
||||
filtered[key] = value
|
||||
} else {
|
||||
// Field doesn't exist in this collection - this makes the query unsatisfiable
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
return filtered
|
||||
}
|
||||
|
||||
type SanitizedJoin = SanitizedJoins[string][number]
|
||||
|
||||
/**
|
||||
* Builds projection for join queries
|
||||
*/
|
||||
function buildJoinProjection(
|
||||
baseFieldName: string,
|
||||
useDrafts: boolean,
|
||||
sort: Record<string, string>,
|
||||
): Record<string, 1> {
|
||||
const projection: Record<string, 1> = {
|
||||
_id: 1,
|
||||
[baseFieldName]: 1,
|
||||
}
|
||||
|
||||
if (useDrafts) {
|
||||
projection.parent = 1
|
||||
}
|
||||
|
||||
for (const fieldName of Object.keys(sort)) {
|
||||
projection[fieldName] = 1
|
||||
}
|
||||
|
||||
return projection
|
||||
}
|
||||
|
||||
/**
|
||||
* Enhanced utility function to safely traverse nested object properties using dot notation
|
||||
* Handles arrays by searching through array elements for matching values
|
||||
* @param doc - The document to traverse
|
||||
* @param path - Dot-separated path (e.g., "array.category")
|
||||
* @returns Array of values found at the specified path (for arrays) or single value
|
||||
*/
|
||||
function getByPathWithArrays(doc: unknown, path: string): unknown[] {
|
||||
const segments = path.split('.')
|
||||
let current = doc
|
||||
|
||||
for (let i = 0; i < segments.length; i++) {
|
||||
const segment = segments[i]!
|
||||
|
||||
if (current === undefined || current === null) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Get the value at the current segment
|
||||
const value = (current as Record<string, unknown>)[segment]
|
||||
|
||||
if (value === undefined || value === null) {
|
||||
return []
|
||||
}
|
||||
|
||||
// If this is the last segment, return the value(s)
|
||||
if (i === segments.length - 1) {
|
||||
return Array.isArray(value) ? value : [value]
|
||||
}
|
||||
|
||||
// If the value is an array and we have more segments to traverse
|
||||
if (Array.isArray(value)) {
|
||||
const remainingPath = segments.slice(i + 1).join('.')
|
||||
const results: unknown[] = []
|
||||
|
||||
// Search through each array element
|
||||
for (const item of value) {
|
||||
if (item && typeof item === 'object') {
|
||||
const subResults = getByPathWithArrays(item, remainingPath)
|
||||
results.push(...subResults)
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// Continue traversing
|
||||
current = value
|
||||
}
|
||||
|
||||
return []
|
||||
}
|
||||
@@ -208,6 +208,7 @@ const sanitizeDate = ({
|
||||
}
|
||||
|
||||
type Args = {
|
||||
$inc?: Record<string, number>
|
||||
/** instance of the adapter */
|
||||
adapter: MongooseAdapter
|
||||
/** data to transform, can be an array of documents or a single document */
|
||||
@@ -396,6 +397,7 @@ const stripFields = ({
|
||||
}
|
||||
|
||||
export const transform = ({
|
||||
$inc,
|
||||
adapter,
|
||||
data,
|
||||
fields,
|
||||
@@ -406,7 +408,7 @@ export const transform = ({
|
||||
}: Args) => {
|
||||
if (Array.isArray(data)) {
|
||||
for (const item of data) {
|
||||
transform({ adapter, data: item, fields, globalSlug, operation, validateRelationships })
|
||||
transform({ $inc, adapter, data: item, fields, globalSlug, operation, validateRelationships })
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -424,6 +426,11 @@ export const transform = ({
|
||||
data.id = data.id.toHexString()
|
||||
}
|
||||
|
||||
// Handle BigInt conversion for custom ID fields of type 'number'
|
||||
if (adapter.useBigIntForNumberIDs && typeof data.id === 'bigint') {
|
||||
data.id = Number(data.id)
|
||||
}
|
||||
|
||||
if (!adapter.allowAdditionalKeys) {
|
||||
stripFields({
|
||||
config,
|
||||
@@ -438,13 +445,27 @@ export const transform = ({
|
||||
data.globalType = globalSlug
|
||||
}
|
||||
|
||||
const sanitize: TraverseFieldsCallback = ({ field, ref: incomingRef }) => {
|
||||
const sanitize: TraverseFieldsCallback = ({ field, parentPath, ref: incomingRef }) => {
|
||||
if (!incomingRef || typeof incomingRef !== 'object') {
|
||||
return
|
||||
}
|
||||
|
||||
const ref = incomingRef as Record<string, unknown>
|
||||
|
||||
if (
|
||||
$inc &&
|
||||
field.type === 'number' &&
|
||||
operation === 'write' &&
|
||||
field.name in ref &&
|
||||
ref[field.name]
|
||||
) {
|
||||
const value = ref[field.name]
|
||||
if (value && typeof value === 'object' && '$inc' in value && typeof value.$inc === 'number') {
|
||||
$inc[`${parentPath}${field.name}`] = value.$inc
|
||||
delete ref[field.name]
|
||||
}
|
||||
}
|
||||
|
||||
if (field.type === 'date' && operation === 'read' && field.name in ref && ref[field.name]) {
|
||||
if (config.localization && fieldShouldBeLocalized({ field, parentIsLocalized })) {
|
||||
const fieldRef = ref[field.name] as Record<string, unknown>
|
||||
|
||||
@@ -17,6 +17,7 @@ import {
|
||||
deleteVersions,
|
||||
destroy,
|
||||
find,
|
||||
findDistinct,
|
||||
findGlobal,
|
||||
findGlobalVersions,
|
||||
findMigrationDir,
|
||||
@@ -120,6 +121,7 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter>
|
||||
json: true,
|
||||
},
|
||||
fieldConstraints: {},
|
||||
findDistinct,
|
||||
generateSchema: createSchemaGenerator({
|
||||
columnToCodeConverter,
|
||||
corePackageSuffix: 'pg-core',
|
||||
|
||||
@@ -6,13 +6,13 @@ import type { CountDistinct, SQLiteAdapter } from './types.js'
|
||||
|
||||
export const countDistinct: CountDistinct = async function countDistinct(
|
||||
this: SQLiteAdapter,
|
||||
{ db, joins, tableName, where },
|
||||
{ column, db, joins, tableName, where },
|
||||
) {
|
||||
// When we don't have any joins - use a simple COUNT(*) query.
|
||||
if (joins.length === 0) {
|
||||
const countResult = await db
|
||||
.select({
|
||||
count: count(),
|
||||
count: column ? count(sql`DISTINCT ${column}`) : count(),
|
||||
})
|
||||
.from(this.tables[tableName])
|
||||
.where(where)
|
||||
@@ -25,7 +25,7 @@ export const countDistinct: CountDistinct = async function countDistinct(
|
||||
})
|
||||
.from(this.tables[tableName])
|
||||
.where(where)
|
||||
.groupBy(this.tables[tableName].id)
|
||||
.groupBy(column ?? this.tables[tableName].id)
|
||||
.limit(1)
|
||||
.$dynamic()
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
deleteVersions,
|
||||
destroy,
|
||||
find,
|
||||
findDistinct,
|
||||
findGlobal,
|
||||
findGlobalVersions,
|
||||
findMigrationDir,
|
||||
@@ -101,6 +102,7 @@ export function sqliteAdapter(args: Args): DatabaseAdapterObj<SQLiteAdapter> {
|
||||
json: true,
|
||||
},
|
||||
fieldConstraints: {},
|
||||
findDistinct,
|
||||
generateSchema: createSchemaGenerator({
|
||||
columnToCodeConverter,
|
||||
corePackageSuffix: 'sqlite-core',
|
||||
|
||||
@@ -5,6 +5,7 @@ import type { DrizzleConfig, Relation, Relations, SQL } from 'drizzle-orm'
|
||||
import type { LibSQLDatabase } from 'drizzle-orm/libsql'
|
||||
import type {
|
||||
AnySQLiteColumn,
|
||||
SQLiteColumn,
|
||||
SQLiteInsertOnConflictDoUpdateConfig,
|
||||
SQLiteTableWithColumns,
|
||||
SQLiteTransactionConfig,
|
||||
@@ -87,6 +88,7 @@ export type GenericTable = SQLiteTableWithColumns<{
|
||||
export type GenericRelation = Relations<string, Record<string, Relation<string>>>
|
||||
|
||||
export type CountDistinct = (args: {
|
||||
column?: SQLiteColumn<any>
|
||||
db: LibSQLDatabase
|
||||
joins: BuildQueryJoinAliases
|
||||
tableName: string
|
||||
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
deleteVersions,
|
||||
destroy,
|
||||
find,
|
||||
findDistinct,
|
||||
findGlobal,
|
||||
findGlobalVersions,
|
||||
findMigrationDir,
|
||||
@@ -174,6 +175,7 @@ export function vercelPostgresAdapter(args: Args = {}): DatabaseAdapterObj<Verce
|
||||
dropDatabase,
|
||||
execute,
|
||||
find,
|
||||
findDistinct,
|
||||
findGlobal,
|
||||
findGlobalVersions,
|
||||
readReplicaOptions: args.readReplicas,
|
||||
|
||||
108
packages/drizzle/src/findDistinct.ts
Normal file
108
packages/drizzle/src/findDistinct.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
import type { FindDistinct, SanitizedCollectionConfig } from 'payload'
|
||||
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { DrizzleAdapter, GenericColumn } from './types.js'
|
||||
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import { selectDistinct } from './queries/selectDistinct.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
import { DistinctSymbol } from './utilities/rawConstraint.js'
|
||||
|
||||
export const findDistinct: FindDistinct = async function (this: DrizzleAdapter, args) {
|
||||
const db = await getTransaction(this, args.req)
|
||||
const collectionConfig: SanitizedCollectionConfig =
|
||||
this.payload.collections[args.collection].config
|
||||
const page = args.page || 1
|
||||
const offset = args.limit ? (page - 1) * args.limit : undefined
|
||||
const tableName = this.tableNameMap.get(toSnakeCase(collectionConfig.slug))
|
||||
|
||||
const { joins, orderBy, selectFields, where } = buildQuery({
|
||||
adapter: this,
|
||||
fields: collectionConfig.flattenedFields,
|
||||
locale: args.locale,
|
||||
sort: args.sort ?? args.field,
|
||||
tableName,
|
||||
where: {
|
||||
and: [
|
||||
args.where ?? {},
|
||||
{
|
||||
[args.field]: {
|
||||
equals: DistinctSymbol,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
orderBy.pop()
|
||||
|
||||
const selectDistinctResult = await selectDistinct({
|
||||
adapter: this,
|
||||
db,
|
||||
forceRun: true,
|
||||
joins,
|
||||
query: ({ query }) => {
|
||||
query = query.orderBy(() => orderBy.map(({ column, order }) => order(column)))
|
||||
|
||||
if (args.limit) {
|
||||
if (offset) {
|
||||
query = query.offset(offset)
|
||||
}
|
||||
|
||||
query = query.limit(args.limit)
|
||||
}
|
||||
|
||||
return query
|
||||
},
|
||||
selectFields: {
|
||||
_selected: selectFields['_selected'],
|
||||
...(orderBy[0].column === selectFields['_selected'] ? {} : { _order: orderBy[0].column }),
|
||||
} as Record<string, GenericColumn>,
|
||||
tableName,
|
||||
where,
|
||||
})
|
||||
|
||||
const values = selectDistinctResult.map((each) => ({
|
||||
[args.field]: (each as Record<string, any>)._selected,
|
||||
}))
|
||||
|
||||
if (args.limit) {
|
||||
const totalDocs = await this.countDistinct({
|
||||
column: selectFields['_selected'],
|
||||
db,
|
||||
joins,
|
||||
tableName,
|
||||
where,
|
||||
})
|
||||
|
||||
const totalPages = Math.ceil(totalDocs / args.limit)
|
||||
const hasPrevPage = page > 1
|
||||
const hasNextPage = totalPages > page
|
||||
const pagingCounter = (page - 1) * args.limit + 1
|
||||
|
||||
return {
|
||||
hasNextPage,
|
||||
hasPrevPage,
|
||||
limit: args.limit,
|
||||
nextPage: hasNextPage ? page + 1 : null,
|
||||
page,
|
||||
pagingCounter,
|
||||
prevPage: hasPrevPage ? page - 1 : null,
|
||||
totalDocs,
|
||||
totalPages,
|
||||
values,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
hasNextPage: false,
|
||||
hasPrevPage: false,
|
||||
limit: 0,
|
||||
page: 1,
|
||||
pagingCounter: 1,
|
||||
totalDocs: values.length,
|
||||
totalPages: 1,
|
||||
values,
|
||||
}
|
||||
}
|
||||
@@ -12,6 +12,7 @@ export { deleteVersions } from './deleteVersions.js'
|
||||
export { destroy } from './destroy.js'
|
||||
export { find } from './find.js'
|
||||
export { chainMethods } from './find/chainMethods.js'
|
||||
export { findDistinct } from './findDistinct.js'
|
||||
export { findGlobal } from './findGlobal.js'
|
||||
export { findGlobalVersions } from './findGlobalVersions.js'
|
||||
export { findMigrationDir } from './findMigrationDir.js'
|
||||
|
||||
@@ -6,13 +6,13 @@ import type { BasePostgresAdapter, CountDistinct } from './types.js'
|
||||
|
||||
export const countDistinct: CountDistinct = async function countDistinct(
|
||||
this: BasePostgresAdapter,
|
||||
{ db, joins, tableName, where },
|
||||
{ column, db, joins, tableName, where },
|
||||
) {
|
||||
// When we don't have any joins - use a simple COUNT(*) query.
|
||||
if (joins.length === 0) {
|
||||
const countResult = await db
|
||||
.select({
|
||||
count: count(),
|
||||
count: column ? count(sql`DISTINCT ${column}`) : count(),
|
||||
})
|
||||
.from(this.tables[tableName])
|
||||
.where(where)
|
||||
@@ -26,7 +26,7 @@ export const countDistinct: CountDistinct = async function countDistinct(
|
||||
})
|
||||
.from(this.tables[tableName])
|
||||
.where(where)
|
||||
.groupBy(this.tables[tableName].id)
|
||||
.groupBy(column || this.tables[tableName].id)
|
||||
.limit(1)
|
||||
.$dynamic()
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ import type {
|
||||
UniqueConstraintBuilder,
|
||||
} from 'drizzle-orm/pg-core'
|
||||
import type { PgTableFn } from 'drizzle-orm/pg-core/table'
|
||||
import type { SQLiteColumn } from 'drizzle-orm/sqlite-core'
|
||||
import type { Payload, PayloadRequest } from 'payload'
|
||||
import type { ClientConfig, QueryResult } from 'pg'
|
||||
|
||||
@@ -64,6 +65,7 @@ export type GenericRelation = Relations<string, Record<string, Relation<string>>
|
||||
export type PostgresDB = NodePgDatabase<Record<string, unknown>>
|
||||
|
||||
export type CountDistinct = (args: {
|
||||
column?: PgColumn<any> | SQLiteColumn<any>
|
||||
db: PostgresDB | TransactionPg
|
||||
joins: BuildQueryJoinAliases
|
||||
tableName: string
|
||||
|
||||
@@ -10,6 +10,7 @@ import type { DrizzleAdapter, GenericColumn } from '../types.js'
|
||||
import type { BuildQueryJoinAliases } from './buildQuery.js'
|
||||
|
||||
import { getNameFromDrizzleTable } from '../utilities/getNameFromDrizzleTable.js'
|
||||
import { DistinctSymbol } from '../utilities/rawConstraint.js'
|
||||
import { buildAndOrConditions } from './buildAndOrConditions.js'
|
||||
import { getTableColumnFromPath } from './getTableColumnFromPath.js'
|
||||
import { sanitizeQueryValue } from './sanitizeQueryValue.js'
|
||||
@@ -108,6 +109,17 @@ export function parseParams({
|
||||
value: val,
|
||||
})
|
||||
|
||||
const resolvedColumn =
|
||||
rawColumn ||
|
||||
(aliasTable && tableName === getNameFromDrizzleTable(table)
|
||||
? aliasTable[columnName]
|
||||
: table[columnName])
|
||||
|
||||
if (val === DistinctSymbol) {
|
||||
selectFields['_selected'] = resolvedColumn
|
||||
break
|
||||
}
|
||||
|
||||
queryConstraints.forEach(({ columnName: col, table: constraintTable, value }) => {
|
||||
if (typeof value === 'string' && value.indexOf('%') > -1) {
|
||||
constraints.push(adapter.operators.like(constraintTable[col], value))
|
||||
@@ -281,12 +293,6 @@ export function parseParams({
|
||||
break
|
||||
}
|
||||
|
||||
const resolvedColumn =
|
||||
rawColumn ||
|
||||
(aliasTable && tableName === getNameFromDrizzleTable(table)
|
||||
? aliasTable[columnName]
|
||||
: table[columnName])
|
||||
|
||||
if (queryOperator === 'not_equals' && queryValue !== null) {
|
||||
constraints.push(
|
||||
or(
|
||||
|
||||
@@ -14,6 +14,7 @@ import type { BuildQueryJoinAliases } from './buildQuery.js'
|
||||
type Args = {
|
||||
adapter: DrizzleAdapter
|
||||
db: DrizzleAdapter['drizzle'] | DrizzleTransaction
|
||||
forceRun?: boolean
|
||||
joins: BuildQueryJoinAliases
|
||||
query?: (args: { query: SQLiteSelect }) => SQLiteSelect
|
||||
selectFields: Record<string, GenericColumn>
|
||||
@@ -27,13 +28,14 @@ type Args = {
|
||||
export const selectDistinct = ({
|
||||
adapter,
|
||||
db,
|
||||
forceRun,
|
||||
joins,
|
||||
query: queryModifier = ({ query }) => query,
|
||||
selectFields,
|
||||
tableName,
|
||||
where,
|
||||
}: Args): QueryPromise<{ id: number | string }[] & Record<string, GenericColumn>> => {
|
||||
if (Object.keys(joins).length > 0) {
|
||||
if (forceRun || Object.keys(joins).length > 0) {
|
||||
let query: SQLiteSelect
|
||||
const table = adapter.tables[tableName]
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ import { traverseFields } from './traverseFields.js'
|
||||
type Args = {
|
||||
adapter: DrizzleAdapter
|
||||
data: Record<string, unknown>
|
||||
enableAtomicWrites?: boolean
|
||||
fields: FlattenedField[]
|
||||
parentIsLocalized?: boolean
|
||||
path?: string
|
||||
@@ -17,6 +18,7 @@ type Args = {
|
||||
export const transformForWrite = ({
|
||||
adapter,
|
||||
data,
|
||||
enableAtomicWrites,
|
||||
fields,
|
||||
parentIsLocalized,
|
||||
path = '',
|
||||
@@ -48,6 +50,7 @@ export const transformForWrite = ({
|
||||
blocksToDelete: rowToInsert.blocksToDelete,
|
||||
columnPrefix: '',
|
||||
data,
|
||||
enableAtomicWrites,
|
||||
fieldPrefix: '',
|
||||
fields,
|
||||
locales: rowToInsert.locales,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import type { FlattenedField } from 'payload'
|
||||
|
||||
import { sql } from 'drizzle-orm'
|
||||
import { APIError, type FlattenedField } from 'payload'
|
||||
import { fieldIsVirtual, fieldShouldBeLocalized } from 'payload/shared'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
@@ -41,6 +40,7 @@ type Args = {
|
||||
*/
|
||||
columnPrefix: string
|
||||
data: Record<string, unknown>
|
||||
enableAtomicWrites?: boolean
|
||||
existingLocales?: Record<string, unknown>[]
|
||||
/**
|
||||
* A prefix that will retain camel-case formatting, representing prior fields
|
||||
@@ -87,6 +87,7 @@ export const traverseFields = ({
|
||||
blocksToDelete,
|
||||
columnPrefix,
|
||||
data,
|
||||
enableAtomicWrites,
|
||||
existingLocales,
|
||||
fieldPrefix,
|
||||
fields,
|
||||
@@ -268,6 +269,7 @@ export const traverseFields = ({
|
||||
blocksToDelete,
|
||||
columnPrefix: `${columnName}_`,
|
||||
data: localeData as Record<string, unknown>,
|
||||
enableAtomicWrites,
|
||||
existingLocales,
|
||||
fieldPrefix: `${fieldName}_`,
|
||||
fields: field.flattenedFields,
|
||||
@@ -553,6 +555,22 @@ export const traverseFields = ({
|
||||
formattedValue = JSON.stringify(value)
|
||||
}
|
||||
|
||||
if (
|
||||
field.type === 'number' &&
|
||||
value &&
|
||||
typeof value === 'object' &&
|
||||
'$inc' in value &&
|
||||
typeof value.$inc === 'number'
|
||||
) {
|
||||
if (!enableAtomicWrites) {
|
||||
throw new APIError(
|
||||
'The passed data must not contain any nested fields for atomic writes',
|
||||
)
|
||||
}
|
||||
|
||||
formattedValue = sql.raw(`${columnName} + ${value.$inc}`)
|
||||
}
|
||||
|
||||
if (field.type === 'date') {
|
||||
if (typeof value === 'number' && !Number.isNaN(value)) {
|
||||
formattedValue = new Date(value).toISOString()
|
||||
|
||||
@@ -89,6 +89,7 @@ export type TransactionPg = PgTransaction<
|
||||
export type DrizzleTransaction = TransactionPg | TransactionSQLite
|
||||
|
||||
export type CountDistinct = (args: {
|
||||
column?: PgColumn<any> | SQLiteColumn<any>
|
||||
db: DrizzleTransaction | LibSQLDatabase | PostgresDB
|
||||
joins: BuildQueryJoinAliases
|
||||
tableName: string
|
||||
|
||||
@@ -1,67 +1,15 @@
|
||||
import type { LibSQLDatabase } from 'drizzle-orm/libsql'
|
||||
import type { FlattenedField, UpdateOne } from 'payload'
|
||||
import type { UpdateOne } from 'payload'
|
||||
|
||||
import { eq } from 'drizzle-orm'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { buildFindManyArgs } from './find/buildFindManyArgs.js'
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import { selectDistinct } from './queries/selectDistinct.js'
|
||||
import { transform } from './transform/read/index.js'
|
||||
import { transformForWrite } from './transform/write/index.js'
|
||||
import { upsertRow } from './upsertRow/index.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
/**
|
||||
* Checks whether we should use the upsertRow function for the passed data and otherwise use a simple SQL SET call.
|
||||
* We need to use upsertRow only when the data has arrays, blocks, hasMany select/text/number, localized fields, complex relationships.
|
||||
*/
|
||||
const shouldUseUpsertRow = ({
|
||||
data,
|
||||
fields,
|
||||
}: {
|
||||
data: Record<string, unknown>
|
||||
fields: FlattenedField[]
|
||||
}) => {
|
||||
for (const key in data) {
|
||||
const value = data[key]
|
||||
const field = fields.find((each) => each.name === key)
|
||||
|
||||
if (!field) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (
|
||||
field.type === 'array' ||
|
||||
field.type === 'blocks' ||
|
||||
((field.type === 'text' ||
|
||||
field.type === 'relationship' ||
|
||||
field.type === 'upload' ||
|
||||
field.type === 'select' ||
|
||||
field.type === 'number') &&
|
||||
field.hasMany) ||
|
||||
((field.type === 'relationship' || field.type === 'upload') &&
|
||||
Array.isArray(field.relationTo)) ||
|
||||
field.localized
|
||||
) {
|
||||
return true
|
||||
}
|
||||
|
||||
if (
|
||||
(field.type === 'group' || field.type === 'tab') &&
|
||||
value &&
|
||||
typeof value === 'object' &&
|
||||
shouldUseUpsertRow({ data: value as Record<string, unknown>, fields: field.flattenedFields })
|
||||
) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
export const updateOne: UpdateOne = async function updateOne(
|
||||
this: DrizzleAdapter,
|
||||
{
|
||||
@@ -126,71 +74,23 @@ export const updateOne: UpdateOne = async function updateOne(
|
||||
return null
|
||||
}
|
||||
|
||||
if (!idToUpdate || shouldUseUpsertRow({ data, fields: collection.flattenedFields })) {
|
||||
const result = await upsertRow({
|
||||
id: idToUpdate,
|
||||
adapter: this,
|
||||
data,
|
||||
db,
|
||||
fields: collection.flattenedFields,
|
||||
ignoreResult: returning === false,
|
||||
joinQuery,
|
||||
operation: 'update',
|
||||
req,
|
||||
select,
|
||||
tableName,
|
||||
})
|
||||
|
||||
if (returning === false) {
|
||||
return null
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
const { row } = transformForWrite({
|
||||
const result = await upsertRow({
|
||||
id: idToUpdate,
|
||||
adapter: this,
|
||||
data,
|
||||
db,
|
||||
fields: collection.flattenedFields,
|
||||
ignoreResult: returning === false,
|
||||
joinQuery,
|
||||
operation: 'update',
|
||||
req,
|
||||
select,
|
||||
tableName,
|
||||
})
|
||||
|
||||
const drizzle = db as LibSQLDatabase
|
||||
await drizzle
|
||||
.update(this.tables[tableName])
|
||||
.set(row)
|
||||
// TODO: we can skip fetching idToUpdate here with using the incoming where
|
||||
.where(eq(this.tables[tableName].id, idToUpdate))
|
||||
|
||||
if (returning === false) {
|
||||
return null
|
||||
}
|
||||
|
||||
const findManyArgs = buildFindManyArgs({
|
||||
adapter: this,
|
||||
depth: 0,
|
||||
fields: collection.flattenedFields,
|
||||
joinQuery: false,
|
||||
select,
|
||||
tableName,
|
||||
})
|
||||
|
||||
findManyArgs.where = eq(this.tables[tableName].id, idToUpdate)
|
||||
|
||||
const doc = await db.query[tableName].findFirst(findManyArgs)
|
||||
|
||||
// //////////////////////////////////
|
||||
// TRANSFORM DATA
|
||||
// //////////////////////////////////
|
||||
|
||||
const result = transform({
|
||||
adapter: this,
|
||||
config: this.payload.config,
|
||||
data: doc,
|
||||
fields: collection.flattenedFields,
|
||||
joinQuery: false,
|
||||
tableName,
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import type { LibSQLDatabase } from 'drizzle-orm/libsql'
|
||||
import type { TypeWithID } from 'payload'
|
||||
|
||||
import { eq } from 'drizzle-orm'
|
||||
@@ -12,13 +13,14 @@ import { transformForWrite } from '../transform/write/index.js'
|
||||
import { deleteExistingArrayRows } from './deleteExistingArrayRows.js'
|
||||
import { deleteExistingRowsByPath } from './deleteExistingRowsByPath.js'
|
||||
import { insertArrays } from './insertArrays.js'
|
||||
import { shouldUseOptimizedUpsertRow } from './shouldUseOptimizedUpsertRow.js'
|
||||
|
||||
/**
|
||||
* If `id` is provided, it will update the row with that ID.
|
||||
* If `where` is provided, it will update the row that matches the `where`
|
||||
* If neither `id` nor `where` is provided, it will create a new row.
|
||||
*
|
||||
* This function replaces the entire row and does not support partial updates.
|
||||
* adapter function replaces the entire row and does not support partial updates.
|
||||
*/
|
||||
export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>({
|
||||
id,
|
||||
@@ -39,428 +41,446 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
upsertTarget,
|
||||
where,
|
||||
}: Args): Promise<T> => {
|
||||
// Split out the incoming data into the corresponding:
|
||||
// base row, locales, relationships, blocks, and arrays
|
||||
const rowToInsert = transformForWrite({
|
||||
adapter,
|
||||
data,
|
||||
fields,
|
||||
path,
|
||||
tableName,
|
||||
})
|
||||
|
||||
// First, we insert the main row
|
||||
let insertedRow: Record<string, unknown>
|
||||
|
||||
try {
|
||||
if (operation === 'update') {
|
||||
const target = upsertTarget || adapter.tables[tableName].id
|
||||
|
||||
if (id) {
|
||||
rowToInsert.row.id = id
|
||||
;[insertedRow] = await adapter.insert({
|
||||
db,
|
||||
onConflictDoUpdate: { set: rowToInsert.row, target },
|
||||
tableName,
|
||||
values: rowToInsert.row,
|
||||
})
|
||||
} else {
|
||||
;[insertedRow] = await adapter.insert({
|
||||
db,
|
||||
onConflictDoUpdate: { set: rowToInsert.row, target, where },
|
||||
tableName,
|
||||
values: rowToInsert.row,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
if (adapter.allowIDOnCreate && data.id) {
|
||||
rowToInsert.row.id = data.id
|
||||
}
|
||||
;[insertedRow] = await adapter.insert({
|
||||
db,
|
||||
tableName,
|
||||
values: rowToInsert.row,
|
||||
})
|
||||
}
|
||||
|
||||
const localesToInsert: Record<string, unknown>[] = []
|
||||
const relationsToInsert: Record<string, unknown>[] = []
|
||||
const textsToInsert: Record<string, unknown>[] = []
|
||||
const numbersToInsert: Record<string, unknown>[] = []
|
||||
const blocksToInsert: { [blockType: string]: BlockRowToInsert[] } = {}
|
||||
const selectsToInsert: { [selectTableName: string]: Record<string, unknown>[] } = {}
|
||||
|
||||
// If there are locale rows with data, add the parent and locale to each
|
||||
if (Object.keys(rowToInsert.locales).length > 0) {
|
||||
Object.entries(rowToInsert.locales).forEach(([locale, localeRow]) => {
|
||||
localeRow._parentID = insertedRow.id
|
||||
localeRow._locale = locale
|
||||
localesToInsert.push(localeRow)
|
||||
})
|
||||
}
|
||||
|
||||
// If there are relationships, add parent to each
|
||||
if (rowToInsert.relationships.length > 0) {
|
||||
rowToInsert.relationships.forEach((relation) => {
|
||||
relation.parent = insertedRow.id
|
||||
relationsToInsert.push(relation)
|
||||
})
|
||||
}
|
||||
|
||||
// If there are texts, add parent to each
|
||||
if (rowToInsert.texts.length > 0) {
|
||||
rowToInsert.texts.forEach((textRow) => {
|
||||
textRow.parent = insertedRow.id
|
||||
textsToInsert.push(textRow)
|
||||
})
|
||||
}
|
||||
|
||||
// If there are numbers, add parent to each
|
||||
if (rowToInsert.numbers.length > 0) {
|
||||
rowToInsert.numbers.forEach((numberRow) => {
|
||||
numberRow.parent = insertedRow.id
|
||||
numbersToInsert.push(numberRow)
|
||||
})
|
||||
}
|
||||
|
||||
// If there are selects, add parent to each, and then
|
||||
// store by table name and rows
|
||||
if (Object.keys(rowToInsert.selects).length > 0) {
|
||||
Object.entries(rowToInsert.selects).forEach(([selectTableName, selectRows]) => {
|
||||
selectsToInsert[selectTableName] = []
|
||||
|
||||
selectRows.forEach((row) => {
|
||||
if (typeof row.parent === 'undefined') {
|
||||
row.parent = insertedRow.id
|
||||
}
|
||||
|
||||
selectsToInsert[selectTableName].push(row)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// If there are blocks, add parent to each, and then
|
||||
// store by table name and rows
|
||||
Object.keys(rowToInsert.blocks).forEach((tableName) => {
|
||||
rowToInsert.blocks[tableName].forEach((blockRow) => {
|
||||
blockRow.row._parentID = insertedRow.id
|
||||
if (!blocksToInsert[tableName]) {
|
||||
blocksToInsert[tableName] = []
|
||||
}
|
||||
if (blockRow.row.uuid) {
|
||||
delete blockRow.row.uuid
|
||||
}
|
||||
blocksToInsert[tableName].push(blockRow)
|
||||
})
|
||||
let insertedRow: Record<string, unknown> = { id }
|
||||
if (id && shouldUseOptimizedUpsertRow({ data, fields })) {
|
||||
const { row } = transformForWrite({
|
||||
adapter,
|
||||
data,
|
||||
enableAtomicWrites: true,
|
||||
fields,
|
||||
tableName,
|
||||
})
|
||||
|
||||
// //////////////////////////////////
|
||||
// INSERT LOCALES
|
||||
// //////////////////////////////////
|
||||
const drizzle = db as LibSQLDatabase
|
||||
|
||||
if (localesToInsert.length > 0) {
|
||||
const localeTableName = `${tableName}${adapter.localesSuffix}`
|
||||
const localeTable = adapter.tables[`${tableName}${adapter.localesSuffix}`]
|
||||
await drizzle
|
||||
.update(adapter.tables[tableName])
|
||||
.set(row)
|
||||
// TODO: we can skip fetching idToUpdate here with using the incoming where
|
||||
.where(eq(adapter.tables[tableName].id, id))
|
||||
} else {
|
||||
// Split out the incoming data into the corresponding:
|
||||
// base row, locales, relationships, blocks, and arrays
|
||||
const rowToInsert = transformForWrite({
|
||||
adapter,
|
||||
data,
|
||||
enableAtomicWrites: false,
|
||||
fields,
|
||||
path,
|
||||
tableName,
|
||||
})
|
||||
|
||||
// First, we insert the main row
|
||||
try {
|
||||
if (operation === 'update') {
|
||||
await adapter.deleteWhere({
|
||||
db,
|
||||
tableName: localeTableName,
|
||||
where: eq(localeTable._parentID, insertedRow.id),
|
||||
})
|
||||
}
|
||||
const target = upsertTarget || adapter.tables[tableName].id
|
||||
|
||||
await adapter.insert({
|
||||
db,
|
||||
tableName: localeTableName,
|
||||
values: localesToInsert,
|
||||
})
|
||||
}
|
||||
|
||||
// //////////////////////////////////
|
||||
// INSERT RELATIONSHIPS
|
||||
// //////////////////////////////////
|
||||
|
||||
const relationshipsTableName = `${tableName}${adapter.relationshipsSuffix}`
|
||||
|
||||
if (operation === 'update') {
|
||||
await deleteExistingRowsByPath({
|
||||
adapter,
|
||||
db,
|
||||
localeColumnName: 'locale',
|
||||
parentColumnName: 'parent',
|
||||
parentID: insertedRow.id,
|
||||
pathColumnName: 'path',
|
||||
rows: [...relationsToInsert, ...rowToInsert.relationshipsToDelete],
|
||||
tableName: relationshipsTableName,
|
||||
})
|
||||
}
|
||||
|
||||
if (relationsToInsert.length > 0) {
|
||||
await adapter.insert({
|
||||
db,
|
||||
tableName: relationshipsTableName,
|
||||
values: relationsToInsert,
|
||||
})
|
||||
}
|
||||
|
||||
// //////////////////////////////////
|
||||
// INSERT hasMany TEXTS
|
||||
// //////////////////////////////////
|
||||
|
||||
const textsTableName = `${tableName}_texts`
|
||||
|
||||
if (operation === 'update') {
|
||||
await deleteExistingRowsByPath({
|
||||
adapter,
|
||||
db,
|
||||
localeColumnName: 'locale',
|
||||
parentColumnName: 'parent',
|
||||
parentID: insertedRow.id,
|
||||
pathColumnName: 'path',
|
||||
rows: [...textsToInsert, ...rowToInsert.textsToDelete],
|
||||
tableName: textsTableName,
|
||||
})
|
||||
}
|
||||
|
||||
if (textsToInsert.length > 0) {
|
||||
await adapter.insert({
|
||||
db,
|
||||
tableName: textsTableName,
|
||||
values: textsToInsert,
|
||||
})
|
||||
}
|
||||
|
||||
// //////////////////////////////////
|
||||
// INSERT hasMany NUMBERS
|
||||
// //////////////////////////////////
|
||||
|
||||
const numbersTableName = `${tableName}_numbers`
|
||||
|
||||
if (operation === 'update') {
|
||||
await deleteExistingRowsByPath({
|
||||
adapter,
|
||||
db,
|
||||
localeColumnName: 'locale',
|
||||
parentColumnName: 'parent',
|
||||
parentID: insertedRow.id,
|
||||
pathColumnName: 'path',
|
||||
rows: [...numbersToInsert, ...rowToInsert.numbersToDelete],
|
||||
tableName: numbersTableName,
|
||||
})
|
||||
}
|
||||
|
||||
if (numbersToInsert.length > 0) {
|
||||
await adapter.insert({
|
||||
db,
|
||||
tableName: numbersTableName,
|
||||
values: numbersToInsert,
|
||||
})
|
||||
}
|
||||
|
||||
// //////////////////////////////////
|
||||
// INSERT BLOCKS
|
||||
// //////////////////////////////////
|
||||
|
||||
const insertedBlockRows: Record<string, Record<string, unknown>[]> = {}
|
||||
|
||||
if (operation === 'update') {
|
||||
for (const tableName of rowToInsert.blocksToDelete) {
|
||||
const blockTable = adapter.tables[tableName]
|
||||
await adapter.deleteWhere({
|
||||
if (id) {
|
||||
rowToInsert.row.id = id
|
||||
;[insertedRow] = await adapter.insert({
|
||||
db,
|
||||
onConflictDoUpdate: { set: rowToInsert.row, target },
|
||||
tableName,
|
||||
values: rowToInsert.row,
|
||||
})
|
||||
} else {
|
||||
;[insertedRow] = await adapter.insert({
|
||||
db,
|
||||
onConflictDoUpdate: { set: rowToInsert.row, target, where },
|
||||
tableName,
|
||||
values: rowToInsert.row,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
if (adapter.allowIDOnCreate && data.id) {
|
||||
rowToInsert.row.id = data.id
|
||||
}
|
||||
;[insertedRow] = await adapter.insert({
|
||||
db,
|
||||
tableName,
|
||||
where: eq(blockTable._parentID, insertedRow.id),
|
||||
values: rowToInsert.row,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// When versions are enabled, this is used to track mapping between blocks/arrays ObjectID to their numeric generated representation, then we use it for nested to arrays/blocks select hasMany in versions.
|
||||
const arraysBlocksUUIDMap: Record<string, number | string> = {}
|
||||
const localesToInsert: Record<string, unknown>[] = []
|
||||
const relationsToInsert: Record<string, unknown>[] = []
|
||||
const textsToInsert: Record<string, unknown>[] = []
|
||||
const numbersToInsert: Record<string, unknown>[] = []
|
||||
const blocksToInsert: { [blockType: string]: BlockRowToInsert[] } = {}
|
||||
const selectsToInsert: { [selectTableName: string]: Record<string, unknown>[] } = {}
|
||||
|
||||
for (const [tableName, blockRows] of Object.entries(blocksToInsert)) {
|
||||
insertedBlockRows[tableName] = await adapter.insert({
|
||||
db,
|
||||
tableName,
|
||||
values: blockRows.map(({ row }) => row),
|
||||
})
|
||||
// If there are locale rows with data, add the parent and locale to each
|
||||
if (Object.keys(rowToInsert.locales).length > 0) {
|
||||
Object.entries(rowToInsert.locales).forEach(([locale, localeRow]) => {
|
||||
localeRow._parentID = insertedRow.id
|
||||
localeRow._locale = locale
|
||||
localesToInsert.push(localeRow)
|
||||
})
|
||||
}
|
||||
|
||||
insertedBlockRows[tableName].forEach((row, i) => {
|
||||
blockRows[i].row = row
|
||||
if (
|
||||
typeof row._uuid === 'string' &&
|
||||
(typeof row.id === 'string' || typeof row.id === 'number')
|
||||
) {
|
||||
arraysBlocksUUIDMap[row._uuid] = row.id
|
||||
}
|
||||
})
|
||||
// If there are relationships, add parent to each
|
||||
if (rowToInsert.relationships.length > 0) {
|
||||
rowToInsert.relationships.forEach((relation) => {
|
||||
relation.parent = insertedRow.id
|
||||
relationsToInsert.push(relation)
|
||||
})
|
||||
}
|
||||
|
||||
const blockLocaleIndexMap: number[] = []
|
||||
// If there are texts, add parent to each
|
||||
if (rowToInsert.texts.length > 0) {
|
||||
rowToInsert.texts.forEach((textRow) => {
|
||||
textRow.parent = insertedRow.id
|
||||
textsToInsert.push(textRow)
|
||||
})
|
||||
}
|
||||
|
||||
const blockLocaleRowsToInsert = blockRows.reduce((acc, blockRow, i) => {
|
||||
if (Object.entries(blockRow.locales).length > 0) {
|
||||
Object.entries(blockRow.locales).forEach(([blockLocale, blockLocaleData]) => {
|
||||
if (Object.keys(blockLocaleData).length > 0) {
|
||||
blockLocaleData._parentID = blockRow.row.id
|
||||
blockLocaleData._locale = blockLocale
|
||||
acc.push(blockLocaleData)
|
||||
blockLocaleIndexMap.push(i)
|
||||
// If there are numbers, add parent to each
|
||||
if (rowToInsert.numbers.length > 0) {
|
||||
rowToInsert.numbers.forEach((numberRow) => {
|
||||
numberRow.parent = insertedRow.id
|
||||
numbersToInsert.push(numberRow)
|
||||
})
|
||||
}
|
||||
|
||||
// If there are selects, add parent to each, and then
|
||||
// store by table name and rows
|
||||
if (Object.keys(rowToInsert.selects).length > 0) {
|
||||
Object.entries(rowToInsert.selects).forEach(([selectTableName, selectRows]) => {
|
||||
selectsToInsert[selectTableName] = []
|
||||
|
||||
selectRows.forEach((row) => {
|
||||
if (typeof row.parent === 'undefined') {
|
||||
row.parent = insertedRow.id
|
||||
}
|
||||
|
||||
selectsToInsert[selectTableName].push(row)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// If there are blocks, add parent to each, and then
|
||||
// store by table name and rows
|
||||
Object.keys(rowToInsert.blocks).forEach((tableName) => {
|
||||
rowToInsert.blocks[tableName].forEach((blockRow) => {
|
||||
blockRow.row._parentID = insertedRow.id
|
||||
if (!blocksToInsert[tableName]) {
|
||||
blocksToInsert[tableName] = []
|
||||
}
|
||||
if (blockRow.row.uuid) {
|
||||
delete blockRow.row.uuid
|
||||
}
|
||||
blocksToInsert[tableName].push(blockRow)
|
||||
})
|
||||
})
|
||||
|
||||
// //////////////////////////////////
|
||||
// INSERT LOCALES
|
||||
// //////////////////////////////////
|
||||
|
||||
if (localesToInsert.length > 0) {
|
||||
const localeTableName = `${tableName}${adapter.localesSuffix}`
|
||||
const localeTable = adapter.tables[`${tableName}${adapter.localesSuffix}`]
|
||||
|
||||
if (operation === 'update') {
|
||||
await adapter.deleteWhere({
|
||||
db,
|
||||
tableName: localeTableName,
|
||||
where: eq(localeTable._parentID, insertedRow.id),
|
||||
})
|
||||
}
|
||||
|
||||
return acc
|
||||
}, [])
|
||||
|
||||
if (blockLocaleRowsToInsert.length > 0) {
|
||||
await adapter.insert({
|
||||
db,
|
||||
tableName: `${tableName}${adapter.localesSuffix}`,
|
||||
values: blockLocaleRowsToInsert,
|
||||
tableName: localeTableName,
|
||||
values: localesToInsert,
|
||||
})
|
||||
}
|
||||
|
||||
// //////////////////////////////////
|
||||
// INSERT RELATIONSHIPS
|
||||
// //////////////////////////////////
|
||||
|
||||
const relationshipsTableName = `${tableName}${adapter.relationshipsSuffix}`
|
||||
|
||||
if (operation === 'update') {
|
||||
await deleteExistingRowsByPath({
|
||||
adapter,
|
||||
db,
|
||||
localeColumnName: 'locale',
|
||||
parentColumnName: 'parent',
|
||||
parentID: insertedRow.id,
|
||||
pathColumnName: 'path',
|
||||
rows: [...relationsToInsert, ...rowToInsert.relationshipsToDelete],
|
||||
tableName: relationshipsTableName,
|
||||
})
|
||||
}
|
||||
|
||||
if (relationsToInsert.length > 0) {
|
||||
await adapter.insert({
|
||||
db,
|
||||
tableName: relationshipsTableName,
|
||||
values: relationsToInsert,
|
||||
})
|
||||
}
|
||||
|
||||
// //////////////////////////////////
|
||||
// INSERT hasMany TEXTS
|
||||
// //////////////////////////////////
|
||||
|
||||
const textsTableName = `${tableName}_texts`
|
||||
|
||||
if (operation === 'update') {
|
||||
await deleteExistingRowsByPath({
|
||||
adapter,
|
||||
db,
|
||||
localeColumnName: 'locale',
|
||||
parentColumnName: 'parent',
|
||||
parentID: insertedRow.id,
|
||||
pathColumnName: 'path',
|
||||
rows: [...textsToInsert, ...rowToInsert.textsToDelete],
|
||||
tableName: textsTableName,
|
||||
})
|
||||
}
|
||||
|
||||
if (textsToInsert.length > 0) {
|
||||
await adapter.insert({
|
||||
db,
|
||||
tableName: textsTableName,
|
||||
values: textsToInsert,
|
||||
})
|
||||
}
|
||||
|
||||
// //////////////////////////////////
|
||||
// INSERT hasMany NUMBERS
|
||||
// //////////////////////////////////
|
||||
|
||||
const numbersTableName = `${tableName}_numbers`
|
||||
|
||||
if (operation === 'update') {
|
||||
await deleteExistingRowsByPath({
|
||||
adapter,
|
||||
db,
|
||||
localeColumnName: 'locale',
|
||||
parentColumnName: 'parent',
|
||||
parentID: insertedRow.id,
|
||||
pathColumnName: 'path',
|
||||
rows: [...numbersToInsert, ...rowToInsert.numbersToDelete],
|
||||
tableName: numbersTableName,
|
||||
})
|
||||
}
|
||||
|
||||
if (numbersToInsert.length > 0) {
|
||||
await adapter.insert({
|
||||
db,
|
||||
tableName: numbersTableName,
|
||||
values: numbersToInsert,
|
||||
})
|
||||
}
|
||||
|
||||
// //////////////////////////////////
|
||||
// INSERT BLOCKS
|
||||
// //////////////////////////////////
|
||||
|
||||
const insertedBlockRows: Record<string, Record<string, unknown>[]> = {}
|
||||
|
||||
if (operation === 'update') {
|
||||
for (const tableName of rowToInsert.blocksToDelete) {
|
||||
const blockTable = adapter.tables[tableName]
|
||||
await adapter.deleteWhere({
|
||||
db,
|
||||
tableName,
|
||||
where: eq(blockTable._parentID, insertedRow.id),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// When versions are enabled, adapter is used to track mapping between blocks/arrays ObjectID to their numeric generated representation, then we use it for nested to arrays/blocks select hasMany in versions.
|
||||
const arraysBlocksUUIDMap: Record<string, number | string> = {}
|
||||
|
||||
for (const [tableName, blockRows] of Object.entries(blocksToInsert)) {
|
||||
insertedBlockRows[tableName] = await adapter.insert({
|
||||
db,
|
||||
tableName,
|
||||
values: blockRows.map(({ row }) => row),
|
||||
})
|
||||
|
||||
insertedBlockRows[tableName].forEach((row, i) => {
|
||||
blockRows[i].row = row
|
||||
if (
|
||||
typeof row._uuid === 'string' &&
|
||||
(typeof row.id === 'string' || typeof row.id === 'number')
|
||||
) {
|
||||
arraysBlocksUUIDMap[row._uuid] = row.id
|
||||
}
|
||||
})
|
||||
|
||||
const blockLocaleIndexMap: number[] = []
|
||||
|
||||
const blockLocaleRowsToInsert = blockRows.reduce((acc, blockRow, i) => {
|
||||
if (Object.entries(blockRow.locales).length > 0) {
|
||||
Object.entries(blockRow.locales).forEach(([blockLocale, blockLocaleData]) => {
|
||||
if (Object.keys(blockLocaleData).length > 0) {
|
||||
blockLocaleData._parentID = blockRow.row.id
|
||||
blockLocaleData._locale = blockLocale
|
||||
acc.push(blockLocaleData)
|
||||
blockLocaleIndexMap.push(i)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return acc
|
||||
}, [])
|
||||
|
||||
if (blockLocaleRowsToInsert.length > 0) {
|
||||
await adapter.insert({
|
||||
db,
|
||||
tableName: `${tableName}${adapter.localesSuffix}`,
|
||||
values: blockLocaleRowsToInsert,
|
||||
})
|
||||
}
|
||||
|
||||
await insertArrays({
|
||||
adapter,
|
||||
arrays: blockRows.map(({ arrays }) => arrays),
|
||||
db,
|
||||
parentRows: insertedBlockRows[tableName],
|
||||
uuidMap: arraysBlocksUUIDMap,
|
||||
})
|
||||
}
|
||||
|
||||
// //////////////////////////////////
|
||||
// INSERT ARRAYS RECURSIVELY
|
||||
// //////////////////////////////////
|
||||
|
||||
if (operation === 'update') {
|
||||
for (const arrayTableName of Object.keys(rowToInsert.arrays)) {
|
||||
await deleteExistingArrayRows({
|
||||
adapter,
|
||||
db,
|
||||
parentID: insertedRow.id,
|
||||
tableName: arrayTableName,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
await insertArrays({
|
||||
adapter,
|
||||
arrays: blockRows.map(({ arrays }) => arrays),
|
||||
arrays: [rowToInsert.arrays],
|
||||
db,
|
||||
parentRows: insertedBlockRows[tableName],
|
||||
parentRows: [insertedRow],
|
||||
uuidMap: arraysBlocksUUIDMap,
|
||||
})
|
||||
}
|
||||
|
||||
// //////////////////////////////////
|
||||
// INSERT ARRAYS RECURSIVELY
|
||||
// //////////////////////////////////
|
||||
// //////////////////////////////////
|
||||
// INSERT hasMany SELECTS
|
||||
// //////////////////////////////////
|
||||
|
||||
if (operation === 'update') {
|
||||
for (const arrayTableName of Object.keys(rowToInsert.arrays)) {
|
||||
await deleteExistingArrayRows({
|
||||
adapter,
|
||||
db,
|
||||
parentID: insertedRow.id,
|
||||
tableName: arrayTableName,
|
||||
})
|
||||
}
|
||||
}
|
||||
for (const [selectTableName, tableRows] of Object.entries(selectsToInsert)) {
|
||||
const selectTable = adapter.tables[selectTableName]
|
||||
if (operation === 'update') {
|
||||
await adapter.deleteWhere({
|
||||
db,
|
||||
tableName: selectTableName,
|
||||
where: eq(selectTable.parent, insertedRow.id),
|
||||
})
|
||||
}
|
||||
|
||||
await insertArrays({
|
||||
adapter,
|
||||
arrays: [rowToInsert.arrays],
|
||||
db,
|
||||
parentRows: [insertedRow],
|
||||
uuidMap: arraysBlocksUUIDMap,
|
||||
})
|
||||
if (Object.keys(arraysBlocksUUIDMap).length > 0) {
|
||||
tableRows.forEach((row: any) => {
|
||||
if (row.parent in arraysBlocksUUIDMap) {
|
||||
row.parent = arraysBlocksUUIDMap[row.parent]
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// //////////////////////////////////
|
||||
// INSERT hasMany SELECTS
|
||||
// //////////////////////////////////
|
||||
|
||||
for (const [selectTableName, tableRows] of Object.entries(selectsToInsert)) {
|
||||
const selectTable = adapter.tables[selectTableName]
|
||||
if (operation === 'update') {
|
||||
await adapter.deleteWhere({
|
||||
db,
|
||||
tableName: selectTableName,
|
||||
where: eq(selectTable.parent, insertedRow.id),
|
||||
})
|
||||
if (tableRows.length) {
|
||||
await adapter.insert({
|
||||
db,
|
||||
tableName: selectTableName,
|
||||
values: tableRows,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(arraysBlocksUUIDMap).length > 0) {
|
||||
tableRows.forEach((row: any) => {
|
||||
if (row.parent in arraysBlocksUUIDMap) {
|
||||
row.parent = arraysBlocksUUIDMap[row.parent]
|
||||
// //////////////////////////////////
|
||||
// Error Handling
|
||||
// //////////////////////////////////
|
||||
} catch (caughtError) {
|
||||
// Unique constraint violation error
|
||||
// '23505' is the code for PostgreSQL, and 'SQLITE_CONSTRAINT_UNIQUE' is for SQLite
|
||||
|
||||
let error = caughtError
|
||||
if (typeof caughtError === 'object' && 'cause' in caughtError) {
|
||||
error = caughtError.cause
|
||||
}
|
||||
|
||||
if (error.code === '23505' || error.code === 'SQLITE_CONSTRAINT_UNIQUE') {
|
||||
let fieldName: null | string = null
|
||||
// We need to try and find the right constraint for the field but if we can't we fallback to a generic message
|
||||
if (error.code === '23505') {
|
||||
// For PostgreSQL, we can try to extract the field name from the error constraint
|
||||
if (adapter.fieldConstraints?.[tableName]?.[error.constraint]) {
|
||||
fieldName = adapter.fieldConstraints[tableName]?.[error.constraint]
|
||||
} else {
|
||||
const replacement = `${tableName}_`
|
||||
|
||||
if (error.constraint.includes(replacement)) {
|
||||
const replacedConstraint = error.constraint.replace(replacement, '')
|
||||
|
||||
if (replacedConstraint && adapter.fieldConstraints[tableName]?.[replacedConstraint]) {
|
||||
fieldName = adapter.fieldConstraints[tableName][replacedConstraint]
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (tableRows.length) {
|
||||
await adapter.insert({
|
||||
db,
|
||||
tableName: selectTableName,
|
||||
values: tableRows,
|
||||
})
|
||||
}
|
||||
}
|
||||
if (!fieldName) {
|
||||
// Last case scenario we extract the key and value from the detail on the error
|
||||
const detail = error.detail
|
||||
const regex = /Key \(([^)]+)\)=\(([^)]+)\)/
|
||||
const match: string[] = detail.match(regex)
|
||||
|
||||
// //////////////////////////////////
|
||||
// Error Handling
|
||||
// //////////////////////////////////
|
||||
} catch (caughtError) {
|
||||
// Unique constraint violation error
|
||||
// '23505' is the code for PostgreSQL, and 'SQLITE_CONSTRAINT_UNIQUE' is for SQLite
|
||||
if (match && match[1]) {
|
||||
const key = match[1]
|
||||
|
||||
let error = caughtError
|
||||
if (typeof caughtError === 'object' && 'cause' in caughtError) {
|
||||
error = caughtError.cause
|
||||
}
|
||||
fieldName = key
|
||||
}
|
||||
}
|
||||
} else if (error.code === 'SQLITE_CONSTRAINT_UNIQUE') {
|
||||
/**
|
||||
* For SQLite, we can try to extract the field name from the error message
|
||||
* The message typically looks like:
|
||||
* "UNIQUE constraint failed: table_name.field_name"
|
||||
*/
|
||||
const regex = /UNIQUE constraint failed: ([^.]+)\.([^.]+)/
|
||||
const match: string[] = error.message.match(regex)
|
||||
|
||||
if (error.code === '23505' || error.code === 'SQLITE_CONSTRAINT_UNIQUE') {
|
||||
let fieldName: null | string = null
|
||||
// We need to try and find the right constraint for the field but if we can't we fallback to a generic message
|
||||
if (error.code === '23505') {
|
||||
// For PostgreSQL, we can try to extract the field name from the error constraint
|
||||
if (adapter.fieldConstraints?.[tableName]?.[error.constraint]) {
|
||||
fieldName = adapter.fieldConstraints[tableName]?.[error.constraint]
|
||||
} else {
|
||||
const replacement = `${tableName}_`
|
||||
if (match && match[2]) {
|
||||
if (adapter.fieldConstraints[tableName]) {
|
||||
fieldName = adapter.fieldConstraints[tableName][`${match[2]}_idx`]
|
||||
}
|
||||
|
||||
if (error.constraint.includes(replacement)) {
|
||||
const replacedConstraint = error.constraint.replace(replacement, '')
|
||||
|
||||
if (replacedConstraint && adapter.fieldConstraints[tableName]?.[replacedConstraint]) {
|
||||
fieldName = adapter.fieldConstraints[tableName][replacedConstraint]
|
||||
if (!fieldName) {
|
||||
fieldName = match[2]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!fieldName) {
|
||||
// Last case scenario we extract the key and value from the detail on the error
|
||||
const detail = error.detail
|
||||
const regex = /Key \(([^)]+)\)=\(([^)]+)\)/
|
||||
const match: string[] = detail.match(regex)
|
||||
|
||||
if (match && match[1]) {
|
||||
const key = match[1]
|
||||
|
||||
fieldName = key
|
||||
}
|
||||
}
|
||||
} else if (error.code === 'SQLITE_CONSTRAINT_UNIQUE') {
|
||||
/**
|
||||
* For SQLite, we can try to extract the field name from the error message
|
||||
* The message typically looks like:
|
||||
* "UNIQUE constraint failed: table_name.field_name"
|
||||
*/
|
||||
const regex = /UNIQUE constraint failed: ([^.]+)\.([^.]+)/
|
||||
const match: string[] = error.message.match(regex)
|
||||
|
||||
if (match && match[2]) {
|
||||
if (adapter.fieldConstraints[tableName]) {
|
||||
fieldName = adapter.fieldConstraints[tableName][`${match[2]}_idx`]
|
||||
}
|
||||
|
||||
if (!fieldName) {
|
||||
fieldName = match[2]
|
||||
}
|
||||
}
|
||||
throw new ValidationError(
|
||||
{
|
||||
id,
|
||||
errors: [
|
||||
{
|
||||
message: req?.t ? req.t('error:valueMustBeUnique') : 'Value must be unique',
|
||||
path: fieldName,
|
||||
},
|
||||
],
|
||||
req,
|
||||
},
|
||||
req?.t,
|
||||
)
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
|
||||
throw new ValidationError(
|
||||
{
|
||||
id,
|
||||
errors: [
|
||||
{
|
||||
message: req?.t ? req.t('error:valueMustBeUnique') : 'Value must be unique',
|
||||
path: fieldName,
|
||||
},
|
||||
],
|
||||
req,
|
||||
},
|
||||
req?.t,
|
||||
)
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,52 @@
|
||||
import type { FlattenedField } from 'payload'
|
||||
|
||||
/**
|
||||
* Checks whether we should use the upsertRow function for the passed data and otherwise use a simple SQL SET call.
|
||||
* We need to use upsertRow only when the data has arrays, blocks, hasMany select/text/number, localized fields, complex relationships.
|
||||
*/
|
||||
export const shouldUseOptimizedUpsertRow = ({
|
||||
data,
|
||||
fields,
|
||||
}: {
|
||||
data: Record<string, unknown>
|
||||
fields: FlattenedField[]
|
||||
}) => {
|
||||
for (const key in data) {
|
||||
const value = data[key]
|
||||
const field = fields.find((each) => each.name === key)
|
||||
|
||||
if (!field) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (
|
||||
field.type === 'array' ||
|
||||
field.type === 'blocks' ||
|
||||
((field.type === 'text' ||
|
||||
field.type === 'relationship' ||
|
||||
field.type === 'upload' ||
|
||||
field.type === 'select' ||
|
||||
field.type === 'number') &&
|
||||
field.hasMany) ||
|
||||
((field.type === 'relationship' || field.type === 'upload') &&
|
||||
Array.isArray(field.relationTo)) ||
|
||||
field.localized
|
||||
) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (
|
||||
(field.type === 'group' || field.type === 'tab') &&
|
||||
value &&
|
||||
typeof value === 'object' &&
|
||||
!shouldUseOptimizedUpsertRow({
|
||||
data: value as Record<string, unknown>,
|
||||
fields: field.flattenedFields,
|
||||
})
|
||||
) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
const RawConstraintSymbol = Symbol('RawConstraint')
|
||||
|
||||
export const DistinctSymbol = Symbol('DistinctSymbol')
|
||||
|
||||
/**
|
||||
* You can use this to inject a raw query to where
|
||||
*/
|
||||
|
||||
@@ -82,6 +82,7 @@ export type HookOperationType =
|
||||
| 'forgotPassword'
|
||||
| 'login'
|
||||
| 'read'
|
||||
| 'readDistinct'
|
||||
| 'refresh'
|
||||
| 'resetPassword'
|
||||
| 'update'
|
||||
|
||||
46
packages/payload/src/collections/endpoints/findDistinct.ts
Normal file
46
packages/payload/src/collections/endpoints/findDistinct.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { status as httpStatus } from 'http-status'
|
||||
|
||||
import type { PayloadHandler } from '../../config/types.js'
|
||||
import type { Where } from '../../types/index.js'
|
||||
|
||||
import { APIError } from '../../errors/APIError.js'
|
||||
import { getRequestCollection } from '../../utilities/getRequestEntity.js'
|
||||
import { headersWithCors } from '../../utilities/headersWithCors.js'
|
||||
import { isNumber } from '../../utilities/isNumber.js'
|
||||
import { findDistinctOperation } from '../operations/findDistinct.js'
|
||||
|
||||
export const findDistinctHandler: PayloadHandler = async (req) => {
|
||||
const collection = getRequestCollection(req)
|
||||
const { depth, field, limit, page, sort, where } = req.query as {
|
||||
depth?: string
|
||||
field?: string
|
||||
limit?: string
|
||||
page?: string
|
||||
sort?: string
|
||||
sortOrder?: string
|
||||
where?: Where
|
||||
}
|
||||
|
||||
if (!field) {
|
||||
throw new APIError('field must be specified', httpStatus.BAD_REQUEST)
|
||||
}
|
||||
|
||||
const result = await findDistinctOperation({
|
||||
collection,
|
||||
depth: isNumber(depth) ? Number(depth) : undefined,
|
||||
field,
|
||||
limit: isNumber(limit) ? Number(limit) : undefined,
|
||||
page: isNumber(page) ? Number(page) : undefined,
|
||||
req,
|
||||
sort: typeof sort === 'string' ? sort.split(',') : undefined,
|
||||
where,
|
||||
})
|
||||
|
||||
return Response.json(result, {
|
||||
headers: headersWithCors({
|
||||
headers: new Headers(),
|
||||
req,
|
||||
}),
|
||||
status: httpStatus.OK,
|
||||
})
|
||||
}
|
||||
@@ -9,6 +9,7 @@ import { docAccessHandler } from './docAccess.js'
|
||||
import { duplicateHandler } from './duplicate.js'
|
||||
import { findHandler } from './find.js'
|
||||
import { findByIDHandler } from './findByID.js'
|
||||
import { findDistinctHandler } from './findDistinct.js'
|
||||
import { findVersionByIDHandler } from './findVersionByID.js'
|
||||
import { findVersionsHandler } from './findVersions.js'
|
||||
import { previewHandler } from './preview.js'
|
||||
@@ -48,6 +49,12 @@ export const defaultCollectionEndpoints: Endpoint[] = [
|
||||
method: 'get',
|
||||
path: '/versions',
|
||||
},
|
||||
// Might be uncommented in the future
|
||||
// {
|
||||
// handler: findDistinctHandler,
|
||||
// method: 'get',
|
||||
// path: '/distinct',
|
||||
// },
|
||||
{
|
||||
handler: duplicateHandler,
|
||||
method: 'post',
|
||||
|
||||
189
packages/payload/src/collections/operations/findDistinct.ts
Normal file
189
packages/payload/src/collections/operations/findDistinct.ts
Normal file
@@ -0,0 +1,189 @@
|
||||
import httpStatus from 'http-status'
|
||||
|
||||
import type { AccessResult } from '../../config/types.js'
|
||||
import type { PaginatedDistinctDocs } from '../../database/types.js'
|
||||
import type { PayloadRequest, PopulateType, Sort, Where } from '../../types/index.js'
|
||||
import type { Collection } from '../config/types.js'
|
||||
|
||||
import { executeAccess } from '../../auth/executeAccess.js'
|
||||
import { combineQueries } from '../../database/combineQueries.js'
|
||||
import { validateQueryPaths } from '../../database/queryValidation/validateQueryPaths.js'
|
||||
import { sanitizeWhereQuery } from '../../database/sanitizeWhereQuery.js'
|
||||
import { APIError } from '../../errors/APIError.js'
|
||||
import { Forbidden } from '../../errors/Forbidden.js'
|
||||
import { relationshipPopulationPromise } from '../../fields/hooks/afterRead/relationshipPopulationPromise.js'
|
||||
import { getFieldByPath } from '../../utilities/getFieldByPath.js'
|
||||
import { killTransaction } from '../../utilities/killTransaction.js'
|
||||
import { buildAfterOperation } from './utils.js'
|
||||
|
||||
export type Arguments = {
|
||||
collection: Collection
|
||||
depth?: number
|
||||
disableErrors?: boolean
|
||||
field: string
|
||||
limit?: number
|
||||
locale?: string
|
||||
overrideAccess?: boolean
|
||||
page?: number
|
||||
populate?: PopulateType
|
||||
req?: PayloadRequest
|
||||
showHiddenFields?: boolean
|
||||
sort?: Sort
|
||||
where?: Where
|
||||
}
|
||||
export const findDistinctOperation = async (
|
||||
incomingArgs: Arguments,
|
||||
): Promise<PaginatedDistinctDocs<Record<string, unknown>>> => {
|
||||
let args = incomingArgs
|
||||
|
||||
try {
|
||||
// /////////////////////////////////////
|
||||
// beforeOperation - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
if (args.collection.config.hooks?.beforeOperation?.length) {
|
||||
for (const hook of args.collection.config.hooks.beforeOperation) {
|
||||
args =
|
||||
(await hook({
|
||||
args,
|
||||
collection: args.collection.config,
|
||||
context: args.req!.context,
|
||||
operation: 'readDistinct',
|
||||
req: args.req!,
|
||||
})) || args
|
||||
}
|
||||
}
|
||||
|
||||
const {
|
||||
collection: { config: collectionConfig },
|
||||
disableErrors,
|
||||
overrideAccess,
|
||||
populate,
|
||||
showHiddenFields = false,
|
||||
where,
|
||||
} = args
|
||||
|
||||
const req = args.req!
|
||||
const { locale, payload } = req
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Access
|
||||
// /////////////////////////////////////
|
||||
|
||||
let accessResult: AccessResult
|
||||
|
||||
if (!overrideAccess) {
|
||||
accessResult = await executeAccess({ disableErrors, req }, collectionConfig.access.read)
|
||||
|
||||
// If errors are disabled, and access returns false, return empty results
|
||||
if (accessResult === false) {
|
||||
return {
|
||||
hasNextPage: false,
|
||||
hasPrevPage: false,
|
||||
limit: args.limit || 0,
|
||||
nextPage: null,
|
||||
page: 1,
|
||||
pagingCounter: 1,
|
||||
prevPage: null,
|
||||
totalDocs: 0,
|
||||
totalPages: 0,
|
||||
values: [],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Find Distinct
|
||||
// /////////////////////////////////////
|
||||
|
||||
const fullWhere = combineQueries(where!, accessResult!)
|
||||
sanitizeWhereQuery({ fields: collectionConfig.flattenedFields, payload, where: fullWhere })
|
||||
|
||||
await validateQueryPaths({
|
||||
collectionConfig,
|
||||
overrideAccess: overrideAccess!,
|
||||
req,
|
||||
where: where ?? {},
|
||||
})
|
||||
|
||||
const fieldResult = getFieldByPath({
|
||||
fields: collectionConfig.flattenedFields,
|
||||
path: args.field,
|
||||
})
|
||||
|
||||
if (!fieldResult) {
|
||||
throw new APIError(
|
||||
`Field ${args.field} was not found in the collection ${collectionConfig.slug}`,
|
||||
httpStatus.BAD_REQUEST,
|
||||
)
|
||||
}
|
||||
|
||||
if (fieldResult.field.hidden && !showHiddenFields) {
|
||||
throw new Forbidden(req.t)
|
||||
}
|
||||
|
||||
if (fieldResult.field.access?.read) {
|
||||
const hasAccess = await fieldResult.field.access.read({ req })
|
||||
if (!hasAccess) {
|
||||
throw new Forbidden(req.t)
|
||||
}
|
||||
}
|
||||
|
||||
let result = await payload.db.findDistinct({
|
||||
collection: collectionConfig.slug,
|
||||
field: args.field,
|
||||
limit: args.limit,
|
||||
locale: locale!,
|
||||
page: args.page,
|
||||
req,
|
||||
sort: args.sort,
|
||||
where: fullWhere,
|
||||
})
|
||||
|
||||
if (
|
||||
(fieldResult.field.type === 'relationship' || fieldResult.field.type === 'upload') &&
|
||||
args.depth
|
||||
) {
|
||||
const populationPromises: Promise<void>[] = []
|
||||
for (const doc of result.values) {
|
||||
populationPromises.push(
|
||||
relationshipPopulationPromise({
|
||||
currentDepth: 0,
|
||||
depth: args.depth,
|
||||
draft: false,
|
||||
fallbackLocale: req.fallbackLocale || null,
|
||||
field: fieldResult.field,
|
||||
locale: req.locale || null,
|
||||
overrideAccess: args.overrideAccess ?? true,
|
||||
parentIsLocalized: false,
|
||||
populate,
|
||||
req,
|
||||
showHiddenFields: false,
|
||||
siblingDoc: doc,
|
||||
}),
|
||||
)
|
||||
}
|
||||
await Promise.all(populationPromises)
|
||||
}
|
||||
|
||||
// /////////////////////////////////////
|
||||
// afterOperation - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
result = await buildAfterOperation({
|
||||
args,
|
||||
collection: collectionConfig,
|
||||
operation: 'findDistinct',
|
||||
result,
|
||||
})
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Return results
|
||||
// /////////////////////////////////////
|
||||
|
||||
return result
|
||||
} catch (error: unknown) {
|
||||
await killTransaction(args.req!)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,138 @@
|
||||
import type {
|
||||
CollectionSlug,
|
||||
DataFromCollectionSlug,
|
||||
Document,
|
||||
PaginatedDistinctDocs,
|
||||
Payload,
|
||||
PayloadRequest,
|
||||
PopulateType,
|
||||
RequestContext,
|
||||
Sort,
|
||||
TypedLocale,
|
||||
Where,
|
||||
} from '../../../index.js'
|
||||
import type { CreateLocalReqOptions } from '../../../utilities/createLocalReq.js'
|
||||
|
||||
import { APIError, createLocalReq } from '../../../index.js'
|
||||
import { findDistinctOperation } from '../findDistinct.js'
|
||||
|
||||
export type Options<
|
||||
TSlug extends CollectionSlug,
|
||||
TField extends keyof DataFromCollectionSlug<TSlug>,
|
||||
> = {
|
||||
/**
|
||||
* the Collection slug to operate against.
|
||||
*/
|
||||
collection: TSlug
|
||||
/**
|
||||
* [Context](https://payloadcms.com/docs/hooks/context), which will then be passed to `context` and `req.context`,
|
||||
* which can be read by hooks. Useful if you want to pass additional information to the hooks which
|
||||
* shouldn't be necessarily part of the document, for example a `triggerBeforeChange` option which can be read by the BeforeChange hook
|
||||
* to determine if it should run or not.
|
||||
*/
|
||||
context?: RequestContext
|
||||
/**
|
||||
* [Control auto-population](https://payloadcms.com/docs/queries/depth) of nested relationship and upload fields.
|
||||
*/
|
||||
depth?: number
|
||||
/**
|
||||
* When set to `true`, errors will not be thrown.
|
||||
*/
|
||||
disableErrors?: boolean
|
||||
/**
|
||||
* The field to get distinct values for
|
||||
*/
|
||||
field: TField
|
||||
/**
|
||||
* The maximum distinct field values to be returned.
|
||||
* By default the operation returns all the values.
|
||||
*/
|
||||
limit?: number
|
||||
/**
|
||||
* Specify [locale](https://payloadcms.com/docs/configuration/localization) for any returned documents.
|
||||
*/
|
||||
locale?: 'all' | TypedLocale
|
||||
/**
|
||||
* Skip access control.
|
||||
* Set to `false` if you want to respect Access Control for the operation, for example when fetching data for the fron-end.
|
||||
* @default true
|
||||
*/
|
||||
overrideAccess?: boolean
|
||||
/**
|
||||
* Get a specific page number (if limit is specified)
|
||||
* @default 1
|
||||
*/
|
||||
page?: number
|
||||
/**
|
||||
* Specify [populate](https://payloadcms.com/docs/queries/select#populate) to control which fields to include to the result from populated documents.
|
||||
*/
|
||||
populate?: PopulateType
|
||||
/**
|
||||
* The `PayloadRequest` object. You can pass it to thread the current [transaction](https://payloadcms.com/docs/database/transactions), user and locale to the operation.
|
||||
* Recommended to pass when using the Local API from hooks, as usually you want to execute the operation within the current transaction.
|
||||
*/
|
||||
req?: Partial<PayloadRequest>
|
||||
/**
|
||||
* Opt-in to receiving hidden fields. By default, they are hidden from returned documents in accordance to your config.
|
||||
* @default false
|
||||
*/
|
||||
showHiddenFields?: boolean
|
||||
/**
|
||||
* Sort the documents, can be a string or an array of strings
|
||||
* @example '-createdAt' // Sort DESC by createdAt
|
||||
* @example ['group', '-createdAt'] // sort by 2 fields, ASC group and DESC createdAt
|
||||
*/
|
||||
sort?: Sort
|
||||
/**
|
||||
* If you set `overrideAccess` to `false`, you can pass a user to use against the access control checks.
|
||||
*/
|
||||
user?: Document
|
||||
/**
|
||||
* A filter [query](https://payloadcms.com/docs/queries/overview)
|
||||
*/
|
||||
where?: Where
|
||||
}
|
||||
|
||||
export async function findDistinct<
|
||||
TSlug extends CollectionSlug,
|
||||
TField extends keyof DataFromCollectionSlug<TSlug> & string,
|
||||
>(
|
||||
payload: Payload,
|
||||
options: Options<TSlug, TField>,
|
||||
): Promise<PaginatedDistinctDocs<Record<TField, DataFromCollectionSlug<TSlug>[TField]>>> {
|
||||
const {
|
||||
collection: collectionSlug,
|
||||
depth = 0,
|
||||
disableErrors,
|
||||
field,
|
||||
limit,
|
||||
overrideAccess = true,
|
||||
page,
|
||||
populate,
|
||||
showHiddenFields,
|
||||
sort,
|
||||
where,
|
||||
} = options
|
||||
const collection = payload.collections[collectionSlug]
|
||||
|
||||
if (!collection) {
|
||||
throw new APIError(
|
||||
`The collection with slug ${String(collectionSlug)} can't be found. Find Operation.`,
|
||||
)
|
||||
}
|
||||
|
||||
return findDistinctOperation({
|
||||
collection,
|
||||
depth,
|
||||
disableErrors,
|
||||
field,
|
||||
limit,
|
||||
overrideAccess,
|
||||
page,
|
||||
populate,
|
||||
req: await createLocalReq(options as CreateLocalReqOptions, payload),
|
||||
showHiddenFields,
|
||||
sort,
|
||||
where,
|
||||
}) as Promise<PaginatedDistinctDocs<Record<TField, DataFromCollectionSlug<TSlug>[TField]>>>
|
||||
}
|
||||
@@ -12,6 +12,7 @@ import type { deleteOperation } from './delete.js'
|
||||
import type { deleteByIDOperation } from './deleteByID.js'
|
||||
import type { findOperation } from './find.js'
|
||||
import type { findByIDOperation } from './findByID.js'
|
||||
import type { findDistinctOperation } from './findDistinct.js'
|
||||
import type { updateOperation } from './update.js'
|
||||
import type { updateByIDOperation } from './updateByID.js'
|
||||
|
||||
@@ -30,6 +31,7 @@ export type AfterOperationMap<TOperationGeneric extends CollectionSlug> = {
|
||||
boolean,
|
||||
SelectFromCollectionSlug<TOperationGeneric>
|
||||
>
|
||||
findDistinct: typeof findDistinctOperation
|
||||
forgotPassword: typeof forgotPasswordOperation
|
||||
login: typeof loginOperation<TOperationGeneric>
|
||||
refresh: typeof refreshOperation
|
||||
@@ -81,6 +83,11 @@ export type AfterOperationArg<TOperationGeneric extends CollectionSlug> = {
|
||||
operation: 'findByID'
|
||||
result: Awaited<ReturnType<AfterOperationMap<TOperationGeneric>['findByID']>>
|
||||
}
|
||||
| {
|
||||
args: Parameters<AfterOperationMap<TOperationGeneric>['findDistinct']>[0]
|
||||
operation: 'findDistinct'
|
||||
result: Awaited<ReturnType<AfterOperationMap<TOperationGeneric>['findDistinct']>>
|
||||
}
|
||||
| {
|
||||
args: Parameters<AfterOperationMap<TOperationGeneric>['forgotPassword']>[0]
|
||||
operation: 'forgotPassword'
|
||||
|
||||
@@ -63,6 +63,8 @@ export interface BaseDatabaseAdapter {
|
||||
|
||||
find: Find
|
||||
|
||||
findDistinct: FindDistinct
|
||||
|
||||
findGlobal: FindGlobal
|
||||
|
||||
findGlobalVersions: FindGlobalVersions
|
||||
@@ -82,16 +84,15 @@ export interface BaseDatabaseAdapter {
|
||||
* Run any migration up functions that have not yet been performed and update the status
|
||||
*/
|
||||
migrate: (args?: { migrations?: Migration[] }) => Promise<void>
|
||||
|
||||
/**
|
||||
* Run any migration down functions that have been performed
|
||||
*/
|
||||
migrateDown: () => Promise<void>
|
||||
|
||||
/**
|
||||
* Drop the current database and run all migrate up functions
|
||||
*/
|
||||
migrateFresh: (args: { forceAcceptWarning?: boolean }) => Promise<void>
|
||||
|
||||
/**
|
||||
* Run all migration down functions before running up
|
||||
*/
|
||||
@@ -104,6 +105,7 @@ export interface BaseDatabaseAdapter {
|
||||
* Read the current state of migrations and output the result to show which have been run
|
||||
*/
|
||||
migrateStatus: () => Promise<void>
|
||||
|
||||
/**
|
||||
* Path to read and write migration files from
|
||||
*/
|
||||
@@ -113,7 +115,6 @@ export interface BaseDatabaseAdapter {
|
||||
* The name of the database adapter
|
||||
*/
|
||||
name: string
|
||||
|
||||
/**
|
||||
* Full package name of the database adapter
|
||||
*
|
||||
@@ -124,6 +125,7 @@ export interface BaseDatabaseAdapter {
|
||||
* reference to the instance of payload
|
||||
*/
|
||||
payload: Payload
|
||||
|
||||
queryDrafts: QueryDrafts
|
||||
|
||||
/**
|
||||
@@ -151,7 +153,6 @@ export interface BaseDatabaseAdapter {
|
||||
updateMany: UpdateMany
|
||||
|
||||
updateOne: UpdateOne
|
||||
|
||||
updateVersion: UpdateVersion
|
||||
upsert: Upsert
|
||||
}
|
||||
@@ -481,6 +482,34 @@ export type CreateArgs = {
|
||||
select?: SelectType
|
||||
}
|
||||
|
||||
export type FindDistinctArgs = {
|
||||
collection: CollectionSlug
|
||||
field: string
|
||||
limit?: number
|
||||
locale?: string
|
||||
page?: number
|
||||
req?: Partial<PayloadRequest>
|
||||
sort?: Sort
|
||||
where?: Where
|
||||
}
|
||||
|
||||
export type PaginatedDistinctDocs<T extends Record<string, unknown>> = {
|
||||
hasNextPage: boolean
|
||||
hasPrevPage: boolean
|
||||
limit: number
|
||||
nextPage?: null | number | undefined
|
||||
page: number
|
||||
pagingCounter: number
|
||||
prevPage?: null | number | undefined
|
||||
totalDocs: number
|
||||
totalPages: number
|
||||
values: T[]
|
||||
}
|
||||
|
||||
export type FindDistinct = (
|
||||
args: FindDistinctArgs,
|
||||
) => Promise<PaginatedDistinctDocs<Record<string, any>>>
|
||||
|
||||
export type Create = (args: CreateArgs) => Promise<Document>
|
||||
|
||||
export type UpdateOneArgs = {
|
||||
|
||||
@@ -40,7 +40,7 @@ import {
|
||||
} from './auth/operations/local/verifyEmail.js'
|
||||
export type { FieldState } from './admin/forms/Form.js'
|
||||
import type { InitOptions, SanitizedConfig } from './config/types.js'
|
||||
import type { BaseDatabaseAdapter, PaginatedDocs } from './database/types.js'
|
||||
import type { BaseDatabaseAdapter, PaginatedDistinctDocs, PaginatedDocs } from './database/types.js'
|
||||
import type { InitializedEmailAdapter } from './email/types.js'
|
||||
import type { DataFromGlobalSlug, Globals, SelectFromGlobalSlug } from './globals/config/types.js'
|
||||
import type {
|
||||
@@ -72,6 +72,10 @@ import {
|
||||
findByIDLocal,
|
||||
type Options as FindByIDOptions,
|
||||
} from './collections/operations/local/findByID.js'
|
||||
import {
|
||||
findDistinct as findDistinctLocal,
|
||||
type Options as FindDistinctOptions,
|
||||
} from './collections/operations/local/findDistinct.js'
|
||||
import {
|
||||
findVersionByIDLocal,
|
||||
type Options as FindVersionByIDOptions,
|
||||
@@ -464,6 +468,20 @@ export class BasePayload {
|
||||
return findByIDLocal<TSlug, TDisableErrors, TSelect>(this, options)
|
||||
}
|
||||
|
||||
/**
|
||||
* @description Find distinct field values
|
||||
* @param options
|
||||
* @returns result with distinct field values
|
||||
*/
|
||||
findDistinct = async <
|
||||
TSlug extends CollectionSlug,
|
||||
TField extends keyof DataFromCollectionSlug<TSlug> & string,
|
||||
>(
|
||||
options: FindDistinctOptions<TSlug, TField>,
|
||||
): Promise<PaginatedDistinctDocs<Record<TField, DataFromCollectionSlug<TSlug>[TField]>>> => {
|
||||
return findDistinctLocal(this, options)
|
||||
}
|
||||
|
||||
findGlobal = async <TSlug extends GlobalSlug, TSelect extends SelectFromGlobalSlug<TSlug>>(
|
||||
options: FindGlobalOptions<TSlug, TSelect>,
|
||||
): Promise<TransformGlobalWithSelect<TSlug, TSelect>> => {
|
||||
@@ -1174,7 +1192,6 @@ export { updateOperation } from './collections/operations/update.js'
|
||||
export { updateByIDOperation } from './collections/operations/updateByID.js'
|
||||
|
||||
export { buildConfig } from './config/build.js'
|
||||
|
||||
export {
|
||||
type ClientConfig,
|
||||
createClientConfig,
|
||||
@@ -1183,6 +1200,7 @@ export {
|
||||
type UnsanitizedClientConfig,
|
||||
} from './config/client.js'
|
||||
export { defaults } from './config/defaults.js'
|
||||
|
||||
export { type OrderableEndpointBody } from './config/orderable/index.js'
|
||||
export { sanitizeConfig } from './config/sanitize.js'
|
||||
export type * from './config/types.js'
|
||||
@@ -1237,6 +1255,7 @@ export type {
|
||||
Destroy,
|
||||
Find,
|
||||
FindArgs,
|
||||
FindDistinct,
|
||||
FindGlobal,
|
||||
FindGlobalArgs,
|
||||
FindGlobalVersions,
|
||||
@@ -1250,6 +1269,7 @@ export type {
|
||||
Migration,
|
||||
MigrationData,
|
||||
MigrationTemplateArgs,
|
||||
PaginatedDistinctDocs,
|
||||
PaginatedDocs,
|
||||
QueryDrafts,
|
||||
QueryDraftsArgs,
|
||||
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
fieldAffectsData,
|
||||
fieldHasSubFields,
|
||||
fieldShouldBeLocalized,
|
||||
tabHasName,
|
||||
} from '../fields/config/types.js'
|
||||
|
||||
const traverseArrayOrBlocksField = ({
|
||||
@@ -16,6 +17,7 @@ const traverseArrayOrBlocksField = ({
|
||||
fillEmpty,
|
||||
leavesFirst,
|
||||
parentIsLocalized,
|
||||
parentPath,
|
||||
parentRef,
|
||||
}: {
|
||||
callback: TraverseFieldsCallback
|
||||
@@ -26,6 +28,7 @@ const traverseArrayOrBlocksField = ({
|
||||
fillEmpty: boolean
|
||||
leavesFirst: boolean
|
||||
parentIsLocalized: boolean
|
||||
parentPath?: string
|
||||
parentRef?: unknown
|
||||
}) => {
|
||||
if (fillEmpty) {
|
||||
@@ -38,6 +41,7 @@ const traverseArrayOrBlocksField = ({
|
||||
isTopLevel: false,
|
||||
leavesFirst,
|
||||
parentIsLocalized: parentIsLocalized || field.localized,
|
||||
parentPath: `${parentPath}${field.name}.`,
|
||||
parentRef,
|
||||
})
|
||||
}
|
||||
@@ -55,6 +59,7 @@ const traverseArrayOrBlocksField = ({
|
||||
isTopLevel: false,
|
||||
leavesFirst,
|
||||
parentIsLocalized: parentIsLocalized || field.localized,
|
||||
parentPath: `${parentPath}${field.name}.`,
|
||||
parentRef,
|
||||
})
|
||||
}
|
||||
@@ -88,6 +93,7 @@ const traverseArrayOrBlocksField = ({
|
||||
isTopLevel: false,
|
||||
leavesFirst,
|
||||
parentIsLocalized: parentIsLocalized || field.localized,
|
||||
parentPath: `${parentPath}${field.name}.`,
|
||||
parentRef,
|
||||
ref,
|
||||
})
|
||||
@@ -105,6 +111,7 @@ export type TraverseFieldsCallback = (args: {
|
||||
*/
|
||||
next?: () => void
|
||||
parentIsLocalized: boolean
|
||||
parentPath: string
|
||||
/**
|
||||
* The parent reference object
|
||||
*/
|
||||
@@ -130,6 +137,7 @@ type TraverseFieldsArgs = {
|
||||
*/
|
||||
leavesFirst?: boolean
|
||||
parentIsLocalized?: boolean
|
||||
parentPath?: string
|
||||
parentRef?: Record<string, unknown> | unknown
|
||||
ref?: Record<string, unknown> | unknown
|
||||
}
|
||||
@@ -152,6 +160,7 @@ export const traverseFields = ({
|
||||
isTopLevel = true,
|
||||
leavesFirst = false,
|
||||
parentIsLocalized,
|
||||
parentPath = '',
|
||||
parentRef = {},
|
||||
ref = {},
|
||||
}: TraverseFieldsArgs): void => {
|
||||
@@ -172,12 +181,19 @@ export const traverseFields = ({
|
||||
if (
|
||||
!leavesFirst &&
|
||||
callback &&
|
||||
callback({ field, next, parentIsLocalized: parentIsLocalized!, parentRef, ref })
|
||||
callback({ field, next, parentIsLocalized: parentIsLocalized!, parentPath, parentRef, ref })
|
||||
) {
|
||||
return true
|
||||
} else if (leavesFirst) {
|
||||
callbackStack.push(() =>
|
||||
callback({ field, next, parentIsLocalized: parentIsLocalized!, parentRef, ref }),
|
||||
callback({
|
||||
field,
|
||||
next,
|
||||
parentIsLocalized: parentIsLocalized!,
|
||||
parentPath,
|
||||
parentRef,
|
||||
ref,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -220,6 +236,7 @@ export const traverseFields = ({
|
||||
field: { ...tab, type: 'tab' },
|
||||
next,
|
||||
parentIsLocalized: parentIsLocalized!,
|
||||
parentPath,
|
||||
parentRef: currentParentRef,
|
||||
ref: tabRef,
|
||||
})
|
||||
@@ -231,6 +248,7 @@ export const traverseFields = ({
|
||||
field: { ...tab, type: 'tab' },
|
||||
next,
|
||||
parentIsLocalized: parentIsLocalized!,
|
||||
parentPath,
|
||||
parentRef: currentParentRef,
|
||||
ref: tabRef,
|
||||
}),
|
||||
@@ -254,6 +272,7 @@ export const traverseFields = ({
|
||||
isTopLevel: false,
|
||||
leavesFirst,
|
||||
parentIsLocalized: true,
|
||||
parentPath: `${parentPath}${tab.name}.`,
|
||||
parentRef: currentParentRef,
|
||||
ref: tabRef[key as keyof typeof tabRef],
|
||||
})
|
||||
@@ -268,6 +287,7 @@ export const traverseFields = ({
|
||||
field: { ...tab, type: 'tab' },
|
||||
next,
|
||||
parentIsLocalized: parentIsLocalized!,
|
||||
parentPath,
|
||||
parentRef: currentParentRef,
|
||||
ref: tabRef,
|
||||
})
|
||||
@@ -279,6 +299,7 @@ export const traverseFields = ({
|
||||
field: { ...tab, type: 'tab' },
|
||||
next,
|
||||
parentIsLocalized: parentIsLocalized!,
|
||||
parentPath,
|
||||
parentRef: currentParentRef,
|
||||
ref: tabRef,
|
||||
}),
|
||||
@@ -296,6 +317,7 @@ export const traverseFields = ({
|
||||
isTopLevel: false,
|
||||
leavesFirst,
|
||||
parentIsLocalized: false,
|
||||
parentPath: tabHasName(tab) ? `${parentPath}${tab.name}` : parentPath,
|
||||
parentRef: currentParentRef,
|
||||
ref: tabRef,
|
||||
})
|
||||
@@ -352,6 +374,7 @@ export const traverseFields = ({
|
||||
isTopLevel: false,
|
||||
leavesFirst,
|
||||
parentIsLocalized: true,
|
||||
parentPath: field.name ? `${parentPath}${field.name}` : parentPath,
|
||||
parentRef: currentParentRef,
|
||||
ref: currentRef[key as keyof typeof currentRef],
|
||||
})
|
||||
@@ -426,6 +449,7 @@ export const traverseFields = ({
|
||||
isTopLevel: false,
|
||||
leavesFirst,
|
||||
parentIsLocalized,
|
||||
parentPath,
|
||||
parentRef: currentParentRef,
|
||||
ref: currentRef,
|
||||
})
|
||||
|
||||
@@ -925,11 +925,16 @@ export { HeadingFeature, type HeadingFeatureProps } from './features/heading/ser
|
||||
export { HorizontalRuleFeature } from './features/horizontalRule/server/index.js'
|
||||
|
||||
export { IndentFeature } from './features/indent/server/index.js'
|
||||
export { AutoLinkNode } from './features/link/nodes/AutoLinkNode.js'
|
||||
export { LinkNode } from './features/link/nodes/LinkNode.js'
|
||||
export {
|
||||
$createAutoLinkNode,
|
||||
$isAutoLinkNode,
|
||||
AutoLinkNode,
|
||||
} from './features/link/nodes/AutoLinkNode.js'
|
||||
export { $createLinkNode, $isLinkNode, LinkNode } from './features/link/nodes/LinkNode.js'
|
||||
|
||||
export type { LinkFields } from './features/link/nodes/types.js'
|
||||
export { LinkFeature, type LinkFeatureServerProps } from './features/link/server/index.js'
|
||||
|
||||
export { ChecklistFeature } from './features/lists/checklist/server/index.js'
|
||||
export { OrderedListFeature } from './features/lists/orderedList/server/index.js'
|
||||
|
||||
|
||||
@@ -1,7 +1,13 @@
|
||||
import type { MongooseAdapter } from '@payloadcms/db-mongodb'
|
||||
import type { PostgresAdapter } from '@payloadcms/db-postgres/types'
|
||||
import type { NextRESTClient } from 'helpers/NextRESTClient.js'
|
||||
import type { Payload, PayloadRequest, TypeWithID, ValidationError } from 'payload'
|
||||
import type {
|
||||
DataFromCollectionSlug,
|
||||
Payload,
|
||||
PayloadRequest,
|
||||
TypeWithID,
|
||||
ValidationError,
|
||||
} from 'payload'
|
||||
|
||||
import {
|
||||
migrateRelationshipsV2_V3,
|
||||
@@ -379,6 +385,118 @@ describe('database', () => {
|
||||
})
|
||||
})
|
||||
|
||||
it('should find distinct field values of the collection', async () => {
|
||||
await payload.delete({ collection: 'posts', where: {} })
|
||||
const titles = [
|
||||
'title-1',
|
||||
'title-2',
|
||||
'title-3',
|
||||
'title-4',
|
||||
'title-5',
|
||||
'title-6',
|
||||
'title-7',
|
||||
'title-8',
|
||||
'title-9',
|
||||
].map((title) => ({ title }))
|
||||
|
||||
for (const { title } of titles) {
|
||||
// eslint-disable-next-line jest/no-conditional-in-test
|
||||
const docsCount = Math.random() > 0.5 ? 3 : Math.random() > 0.5 ? 2 : 1
|
||||
for (let i = 0; i < docsCount; i++) {
|
||||
await payload.create({ collection: 'posts', data: { title } })
|
||||
}
|
||||
}
|
||||
|
||||
const res = await payload.findDistinct({
|
||||
collection: 'posts',
|
||||
field: 'title',
|
||||
})
|
||||
|
||||
expect(res.values).toStrictEqual(titles)
|
||||
|
||||
// const resREST = await restClient
|
||||
// .GET('/posts/distinct', {
|
||||
// headers: {
|
||||
// Authorization: `Bearer ${token}`,
|
||||
// },
|
||||
// query: { sortOrder: 'asc', field: 'title' },
|
||||
// })
|
||||
// .then((res) => res.json())
|
||||
|
||||
// expect(resREST.values).toEqual(titles)
|
||||
|
||||
const resLimit = await payload.findDistinct({
|
||||
collection: 'posts',
|
||||
field: 'title',
|
||||
limit: 3,
|
||||
})
|
||||
|
||||
expect(resLimit.values).toStrictEqual(
|
||||
['title-1', 'title-2', 'title-3'].map((title) => ({ title })),
|
||||
)
|
||||
// count is still 9
|
||||
expect(resLimit.totalDocs).toBe(9)
|
||||
|
||||
const resDesc = await payload.findDistinct({
|
||||
collection: 'posts',
|
||||
sort: '-title',
|
||||
field: 'title',
|
||||
})
|
||||
|
||||
expect(resDesc.values).toStrictEqual(titles.toReversed())
|
||||
|
||||
const resAscDefault = await payload.findDistinct({
|
||||
collection: 'posts',
|
||||
field: 'title',
|
||||
})
|
||||
|
||||
expect(resAscDefault.values).toStrictEqual(titles)
|
||||
})
|
||||
|
||||
it('should populate distinct relationships when depth>0', async () => {
|
||||
await payload.delete({ collection: 'posts', where: {} })
|
||||
|
||||
const categories = ['category-1', 'category-2', 'category-3', 'category-4'].map((title) => ({
|
||||
title,
|
||||
}))
|
||||
|
||||
const categoriesIDS: { category: string }[] = []
|
||||
|
||||
for (const { title } of categories) {
|
||||
const doc = await payload.create({ collection: 'categories', data: { title } })
|
||||
categoriesIDS.push({ category: doc.id })
|
||||
}
|
||||
|
||||
for (const { category } of categoriesIDS) {
|
||||
// eslint-disable-next-line jest/no-conditional-in-test
|
||||
const docsCount = Math.random() > 0.5 ? 3 : Math.random() > 0.5 ? 2 : 1
|
||||
for (let i = 0; i < docsCount; i++) {
|
||||
await payload.create({ collection: 'posts', data: { title: randomUUID(), category } })
|
||||
}
|
||||
}
|
||||
|
||||
const resultDepth0 = await payload.findDistinct({
|
||||
collection: 'posts',
|
||||
sort: 'category.title',
|
||||
field: 'category',
|
||||
})
|
||||
expect(resultDepth0.values).toStrictEqual(categoriesIDS)
|
||||
const resultDepth1 = await payload.findDistinct({
|
||||
depth: 1,
|
||||
collection: 'posts',
|
||||
field: 'category',
|
||||
sort: 'category.title',
|
||||
})
|
||||
|
||||
for (let i = 0; i < resultDepth1.values.length; i++) {
|
||||
const fromRes = resultDepth1.values[i] as any
|
||||
const id = categoriesIDS[i].category as any
|
||||
const title = categories[i]?.title
|
||||
expect(fromRes.category.title).toBe(title)
|
||||
expect(fromRes.category.id).toBe(id)
|
||||
}
|
||||
})
|
||||
|
||||
describe('Compound Indexes', () => {
|
||||
beforeEach(async () => {
|
||||
await payload.delete({ collection: 'compound-indexes', where: {} })
|
||||
@@ -2807,7 +2925,7 @@ describe('database', () => {
|
||||
}
|
||||
})
|
||||
|
||||
it('should update simple', async () => {
|
||||
it('should use optimized updateOne', async () => {
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
@@ -2818,7 +2936,7 @@ describe('database', () => {
|
||||
arrayWithIDs: [{ text: 'some text' }],
|
||||
},
|
||||
})
|
||||
const res = await payload.db.updateOne({
|
||||
const res = (await payload.db.updateOne({
|
||||
where: { id: { equals: post.id } },
|
||||
data: {
|
||||
title: 'hello updated',
|
||||
@@ -2826,14 +2944,89 @@ describe('database', () => {
|
||||
tab: { text: 'in tab updated' },
|
||||
},
|
||||
collection: 'posts',
|
||||
})
|
||||
})) as unknown as DataFromCollectionSlug<'posts'>
|
||||
|
||||
expect(res.title).toBe('hello updated')
|
||||
expect(res.text).toBe('other text (should not be nuked)')
|
||||
expect(res.group.text).toBe('in group updated')
|
||||
expect(res.tab.text).toBe('in tab updated')
|
||||
expect(res.group?.text).toBe('in group updated')
|
||||
expect(res.tab?.text).toBe('in tab updated')
|
||||
expect(res.arrayWithIDs).toHaveLength(1)
|
||||
expect(res.arrayWithIDs[0].text).toBe('some text')
|
||||
expect(res.arrayWithIDs?.[0]?.text).toBe('some text')
|
||||
})
|
||||
|
||||
it('should use optimized updateMany', async () => {
|
||||
const post1 = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
text: 'other text (should not be nuked)',
|
||||
title: 'hello',
|
||||
group: { text: 'in group' },
|
||||
tab: { text: 'in tab' },
|
||||
arrayWithIDs: [{ text: 'some text' }],
|
||||
},
|
||||
})
|
||||
const post2 = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
text: 'other text 2 (should not be nuked)',
|
||||
title: 'hello',
|
||||
group: { text: 'in group' },
|
||||
tab: { text: 'in tab' },
|
||||
arrayWithIDs: [{ text: 'some text' }],
|
||||
},
|
||||
})
|
||||
|
||||
const res = (await payload.db.updateMany({
|
||||
where: { id: { in: [post1.id, post2.id] } },
|
||||
data: {
|
||||
title: 'hello updated',
|
||||
group: { text: 'in group updated' },
|
||||
tab: { text: 'in tab updated' },
|
||||
},
|
||||
collection: 'posts',
|
||||
})) as unknown as Array<DataFromCollectionSlug<'posts'>>
|
||||
|
||||
expect(res).toHaveLength(2)
|
||||
const resPost1 = res?.find((r) => r.id === post1.id)
|
||||
const resPost2 = res?.find((r) => r.id === post2.id)
|
||||
expect(resPost1?.text).toBe('other text (should not be nuked)')
|
||||
expect(resPost2?.text).toBe('other text 2 (should not be nuked)')
|
||||
|
||||
for (const post of res) {
|
||||
expect(post.title).toBe('hello updated')
|
||||
expect(post.group?.text).toBe('in group updated')
|
||||
expect(post.tab?.text).toBe('in tab updated')
|
||||
expect(post.arrayWithIDs).toHaveLength(1)
|
||||
expect(post.arrayWithIDs?.[0]?.text).toBe('some text')
|
||||
}
|
||||
})
|
||||
|
||||
it('should allow incremental number update', async () => {
|
||||
const post = await payload.create({ collection: 'posts', data: { number: 1, title: 'post' } })
|
||||
|
||||
const res = await payload.db.updateOne({
|
||||
data: {
|
||||
number: {
|
||||
$inc: 10,
|
||||
},
|
||||
},
|
||||
collection: 'posts',
|
||||
where: { id: { equals: post.id } },
|
||||
})
|
||||
|
||||
expect(res.number).toBe(11)
|
||||
|
||||
const res2 = await payload.db.updateOne({
|
||||
data: {
|
||||
number: {
|
||||
$inc: -3,
|
||||
},
|
||||
},
|
||||
collection: 'posts',
|
||||
where: { id: { equals: post.id } },
|
||||
})
|
||||
|
||||
expect(res2.number).toBe(8)
|
||||
})
|
||||
|
||||
it('should support x3 nesting blocks', async () => {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"id": "bf183b76-944c-4e83-bd58-4aa993885106",
|
||||
"id": "80e7a0d2-ffb3-4f22-8597-0442b3ab8102",
|
||||
"prevId": "00000000-0000-0000-0000-000000000000",
|
||||
"version": "7",
|
||||
"dialect": "postgresql",
|
||||
@@ -1,9 +1,9 @@
|
||||
import * as migration_20250707_123508 from './20250707_123508.js'
|
||||
import * as migration_20250714_201659 from './20250714_201659.js';
|
||||
|
||||
export const migrations = [
|
||||
{
|
||||
up: migration_20250707_123508.up,
|
||||
down: migration_20250707_123508.down,
|
||||
name: '20250707_123508',
|
||||
up: migration_20250714_201659.up,
|
||||
down: migration_20250714_201659.down,
|
||||
name: '20250714_201659'
|
||||
},
|
||||
]
|
||||
];
|
||||
|
||||
@@ -21,6 +21,25 @@ export const allDatabaseAdapters = {
|
||||
strength: 1,
|
||||
},
|
||||
})`,
|
||||
firestore: `
|
||||
import { mongooseAdapter, compatabilityOptions } from '@payloadcms/db-mongodb'
|
||||
|
||||
export const databaseAdapter = mongooseAdapter({
|
||||
...compatabilityOptions.firestore,
|
||||
url:
|
||||
process.env.DATABASE_URI ||
|
||||
process.env.MONGODB_MEMORY_SERVER_URI ||
|
||||
'mongodb://127.0.0.1/payloadtests',
|
||||
collation: {
|
||||
strength: 1,
|
||||
},
|
||||
// The following options prevent some tests from failing.
|
||||
// More work needed to get tests succeeding without these options.
|
||||
ensureIndexes: true,
|
||||
transactionOptions: {},
|
||||
disableIndexHints: false,
|
||||
useAlternativeDropDatabase: false,
|
||||
})`,
|
||||
postgres: `
|
||||
import { postgresAdapter } from '@payloadcms/db-postgres'
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ const dirname = path.dirname(filename)
|
||||
const writeDBAdapter = process.env.WRITE_DB_ADAPTER !== 'false'
|
||||
process.env.PAYLOAD_DROP_DATABASE = process.env.PAYLOAD_DROP_DATABASE || 'true'
|
||||
|
||||
if (process.env.PAYLOAD_DATABASE === 'mongodb') {
|
||||
if (process.env.PAYLOAD_DATABASE === 'mongodb' || process.env.PAYLOAD_DATABASE === 'firestore') {
|
||||
throw new Error('Not supported')
|
||||
}
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ import { devUser } from '../credentials.js'
|
||||
type ValidPath = `/${string}`
|
||||
type RequestOptions = {
|
||||
auth?: boolean
|
||||
query?: {
|
||||
query?: { [key: string]: unknown } & {
|
||||
depth?: number
|
||||
fallbackLocale?: string
|
||||
joins?: JoinQuery
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import type { Payload } from 'payload'
|
||||
|
||||
export function isMongoose(_payload?: Payload) {
|
||||
return _payload?.db?.name === 'mongoose' || ['mongodb'].includes(process.env.PAYLOAD_DATABASE)
|
||||
return (
|
||||
_payload?.db?.name === 'mongoose' ||
|
||||
['firestore', 'mongodb'].includes(process.env.PAYLOAD_DATABASE)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -14,13 +14,17 @@ declare global {
|
||||
*/
|
||||
// eslint-disable-next-line no-restricted-exports
|
||||
export default async () => {
|
||||
if (process.env.DATABASE_URI) {
|
||||
return
|
||||
}
|
||||
process.env.NODE_ENV = 'test'
|
||||
process.env.PAYLOAD_DROP_DATABASE = 'true'
|
||||
process.env.NODE_OPTIONS = '--no-deprecation'
|
||||
process.env.DISABLE_PAYLOAD_HMR = 'true'
|
||||
|
||||
if (
|
||||
(!process.env.PAYLOAD_DATABASE || process.env.PAYLOAD_DATABASE === 'mongodb') &&
|
||||
(!process.env.PAYLOAD_DATABASE ||
|
||||
['firestore', 'mongodb'].includes(process.env.PAYLOAD_DATABASE)) &&
|
||||
!global._mongoMemoryServer
|
||||
) {
|
||||
console.log('Starting memory db...')
|
||||
|
||||
@@ -38,7 +38,7 @@ const dirname = path.dirname(filename)
|
||||
|
||||
type EasierChained = { id: string; relation: EasierChained }
|
||||
|
||||
const mongoIt = process.env.PAYLOAD_DATABASE === 'mongodb' ? it : it.skip
|
||||
const mongoIt = ['firestore', 'mongodb'].includes(process.env.PAYLOAD_DATABASE || '') ? it : it.skip
|
||||
|
||||
describe('Relationships', () => {
|
||||
beforeAll(async () => {
|
||||
@@ -791,6 +791,47 @@ describe('Relationships', () => {
|
||||
expect(localized_res_2.docs).toStrictEqual([movie_1, movie_2])
|
||||
})
|
||||
|
||||
it('should sort by multiple properties of a relationship', async () => {
|
||||
await payload.delete({ collection: 'directors', where: {} })
|
||||
await payload.delete({ collection: 'movies', where: {} })
|
||||
|
||||
const createDirector = {
|
||||
collection: 'directors',
|
||||
data: {
|
||||
name: 'Dan',
|
||||
},
|
||||
} as const
|
||||
|
||||
const director_1 = await payload.create(createDirector)
|
||||
const director_2 = await payload.create(createDirector)
|
||||
|
||||
const movie_1 = await payload.create({
|
||||
collection: 'movies',
|
||||
depth: 0,
|
||||
data: { director: director_1.id, name: 'Some Movie 1' },
|
||||
})
|
||||
|
||||
const movie_2 = await payload.create({
|
||||
collection: 'movies',
|
||||
depth: 0,
|
||||
data: { director: director_2.id, name: 'Some Movie 2' },
|
||||
})
|
||||
|
||||
const res_1 = await payload.find({
|
||||
collection: 'movies',
|
||||
sort: ['director.name', 'director.createdAt'],
|
||||
depth: 0,
|
||||
})
|
||||
const res_2 = await payload.find({
|
||||
collection: 'movies',
|
||||
sort: ['director.name', '-director.createdAt'],
|
||||
depth: 0,
|
||||
})
|
||||
|
||||
expect(res_1.docs).toStrictEqual([movie_1, movie_2])
|
||||
expect(res_2.docs).toStrictEqual([movie_2, movie_1])
|
||||
})
|
||||
|
||||
it('should sort by a property of a hasMany relationship', async () => {
|
||||
const movie1 = await payload.create({
|
||||
collection: 'movies',
|
||||
|
||||
Reference in New Issue
Block a user