diff --git a/packages/db-postgres/src/createGlobal.ts b/packages/db-postgres/src/createGlobal.ts index 1b0ae331ca..9dba5244de 100644 --- a/packages/db-postgres/src/createGlobal.ts +++ b/packages/db-postgres/src/createGlobal.ts @@ -1,5 +1,5 @@ import type { CreateGlobal } from 'payload/database' -import type { PayloadRequest } from 'payload/types' +import type { PayloadRequest, TypeWithID } from 'payload/types' import toSnakeCase from 'to-snake-case' @@ -7,14 +7,14 @@ import type { PostgresAdapter } from './types' import { upsertRow } from './upsertRow' -export const createGlobal: CreateGlobal = async function createGlobal( +export const createGlobal: CreateGlobal = async function createGlobal( this: PostgresAdapter, { data, req = {} as PayloadRequest, slug }, ) { const db = this.sessions?.[req.transactionID] || this.db const globalConfig = this.payload.globals.config.find((config) => config.slug === slug) - const result = await upsertRow({ + const result = await upsertRow({ adapter: this, data, db, diff --git a/packages/db-postgres/src/transform/read/relationship.ts b/packages/db-postgres/src/transform/read/relationship.ts index a6cb3846c0..ea81c9e5c4 100644 --- a/packages/db-postgres/src/transform/read/relationship.ts +++ b/packages/db-postgres/src/transform/read/relationship.ts @@ -1,8 +1,8 @@ /* eslint-disable no-param-reassign */ -import type { RelationshipField } from 'payload/types' +import type { RelationshipField, UploadField } from 'payload/types' type Args = { - field: RelationshipField + field: RelationshipField | UploadField locale?: string ref: Record relations: Record[] @@ -11,7 +11,7 @@ type Args = { export const transformRelationship = ({ field, locale, ref, relations }: Args) => { let result: unknown - if (!field.hasMany) { + if (!('hasMany' in field)) { const relation = relations[0] if (relation) { diff --git a/packages/db-postgres/src/transform/read/traverseFields.ts b/packages/db-postgres/src/transform/read/traverseFields.ts index f5a7f0196e..06c5e052ab 100644 --- a/packages/db-postgres/src/transform/read/traverseFields.ts +++ b/packages/db-postgres/src/transform/read/traverseFields.ts @@ -319,10 +319,12 @@ export const traverseFields = >({ case 'tab': case 'group': { const groupFieldPrefix = `${fieldPrefix || ''}${field.name}_` - const groupData = {} if (field.localized) { - if (typeof locale === 'string' && !ref[locale]) ref[locale] = {} + if (typeof locale === 'string' && !ref[locale]) { + ref[locale] = {} + delete table._locale + } Object.entries(ref).forEach(([groupLocale, groupLocaleData]) => { ref[groupLocale] = traverseFields>({ @@ -338,6 +340,8 @@ export const traverseFields = >({ }) }) } else { + const groupData = {} + ref[field.name] = traverseFields>({ blocks, config, @@ -356,7 +360,6 @@ export const traverseFields = >({ case 'number': { let val = fieldData - // TODO: handle hasMany if (typeof fieldData === 'string') { val = Number.parseFloat(fieldData) } diff --git a/packages/db-postgres/src/upsertRow/index.ts b/packages/db-postgres/src/upsertRow/index.ts index 2901a77c00..cb81115fab 100644 --- a/packages/db-postgres/src/upsertRow/index.ts +++ b/packages/db-postgres/src/upsertRow/index.ts @@ -1,4 +1,6 @@ /* eslint-disable no-param-reassign */ +import type { TypeWithID } from 'payload/types' + import { eq } from 'drizzle-orm' import type { BlockRowToInsert } from '../transform/write/types' @@ -11,7 +13,7 @@ import { deleteExistingArrayRows } from './deleteExistingArrayRows' import { deleteExistingRowsByPath } from './deleteExistingRowsByPath' import { insertArrays } from './insertArrays' -export const upsertRow = async ({ +export const upsertRow = async ({ id, adapter, data, @@ -22,7 +24,7 @@ export const upsertRow = async ({ tableName, upsertTarget, where, -}: Args): Promise> => { +}: Args): Promise => { // Split out the incoming data into the corresponding: // base row, locales, relationships, blocks, and arrays const rowToInsert = transformForWrite({ @@ -275,7 +277,7 @@ export const upsertRow = async ({ Object.entries(selectsToInsert).map(async ([selectTableName, tableRows]) => { const selectTable = adapter.tables[selectTableName] if (operation === 'update') { - await db.delete(selectTable).where(eq(selectTable.id, insertedRow.id)) + await db.delete(selectTable).where(eq(selectTable.parent, insertedRow.id)) } await db.insert(selectTable).values(tableRows).returning() }), @@ -303,7 +305,7 @@ export const upsertRow = async ({ // TRANSFORM DATA // ////////////////////////////////// - const result = transform({ + const result = transform({ config: adapter.payload.config, data: doc, fields, diff --git a/packages/payload/src/database/queryValidation/validateSearchParams.ts b/packages/payload/src/database/queryValidation/validateSearchParams.ts index 4ea0ffee55..6e0675a021 100644 --- a/packages/payload/src/database/queryValidation/validateSearchParams.ts +++ b/packages/payload/src/database/queryValidation/validateSearchParams.ts @@ -124,13 +124,19 @@ export async function validateSearchParam({ } } else { fieldAccess = policies[entityType][entitySlug].fields - segments.forEach((segment, pathIndex) => { - if (pathIndex === segments.length - 1) { - fieldAccess = fieldAccess[segment] - } else { - fieldAccess = fieldAccess[segment].fields - } - }) + + if (['json', 'richText'].includes(field.type)) { + fieldAccess = fieldAccess[field.name] + } else { + segments.forEach((segment, pathIndex) => { + if (pathIndex === segments.length - 1) { + fieldAccess = fieldAccess[segment] + } else { + fieldAccess = fieldAccess[segment].fields + } + }) + } + fieldAccess = fieldAccess.read.permission } if (!fieldAccess) { diff --git a/test/fields/int.spec.ts b/test/fields/int.spec.ts index 8a55096461..4e1f0e4c72 100644 --- a/test/fields/int.spec.ts +++ b/test/fields/int.spec.ts @@ -10,7 +10,7 @@ import payload from '../../packages/payload/src' import { initPayloadTest } from '../helpers/configHelpers' import { RESTClient } from '../helpers/rest' import configPromise from '../uploads/config' -import { arrayDefaultValue, arrayDoc, arrayFieldsSlug } from './collections/Array' +import { arrayDefaultValue, arrayFieldsSlug } from './collections/Array' import { blocksFieldSeedData } from './collections/Blocks' import { dateDoc } from './collections/Date' import { @@ -262,198 +262,207 @@ describe('Fields', () => { }) }) - describe('indexes', () => { - let indexes - const definitions: Record = {} - const options: Record = {} + if (['mongoose'].includes(process.env.PAYLOAD_DATABASE)) { + describe('indexes', () => { + let indexes + const definitions: Record = {} + const options: Record = {} - beforeAll(() => { - indexes = (payload.db as MongooseAdapter).collections['indexed-fields'].schema.indexes() as [ - Record, - IndexOptions, - ] + beforeAll(() => { + indexes = (payload.db as MongooseAdapter).collections[ + 'indexed-fields' + ].schema.indexes() as [Record, IndexOptions] - indexes.forEach((index) => { - const field = Object.keys(index[0])[0] - definitions[field] = index[0][field] - // eslint-disable-next-line prefer-destructuring - options[field] = index[1] + indexes.forEach((index) => { + const field = Object.keys(index[0])[0] + definitions[field] = index[0][field] + // eslint-disable-next-line prefer-destructuring + options[field] = index[1] + }) }) - }) - it('should have indexes', () => { - expect(definitions.text).toEqual(1) - }) - it('should have unique indexes', () => { - expect(definitions.uniqueText).toEqual(1) - expect(options.uniqueText).toMatchObject({ unique: true }) - }) - it('should have 2dsphere indexes on point fields', () => { - expect(definitions.point).toEqual('2dsphere') - }) - it('should have 2dsphere indexes on point fields in groups', () => { - expect(definitions['group.point']).toEqual('2dsphere') - }) - it('should have a sparse index on a unique localized field in a group', () => { - expect(definitions['group.localizedUnique.en']).toEqual(1) - expect(options['group.localizedUnique.en']).toMatchObject({ unique: true, sparse: true }) - expect(definitions['group.localizedUnique.es']).toEqual(1) - expect(options['group.localizedUnique.es']).toMatchObject({ unique: true, sparse: true }) - }) - it('should have unique indexes in a collapsible', () => { - expect(definitions['collapsibleLocalizedUnique.en']).toEqual(1) - expect(options['collapsibleLocalizedUnique.en']).toMatchObject({ unique: true, sparse: true }) - expect(definitions.collapsibleTextUnique).toEqual(1) - expect(options.collapsibleTextUnique).toMatchObject({ unique: true }) - }) - it('should have unique compound indexes', () => { - expect(definitions.partOne).toEqual(1) - expect(options.partOne).toMatchObject({ unique: true, name: 'compound-index', sparse: true }) - }) - it('should throw validation error saving on unique fields', async () => { - const data = { - text: 'a', - uniqueText: 'a', - } - await payload.create({ - collection: 'indexed-fields', - data, + it('should have indexes', () => { + expect(definitions.text).toEqual(1) }) - expect(async () => { - const result = await payload.create({ + it('should have unique indexes', () => { + expect(definitions.uniqueText).toEqual(1) + expect(options.uniqueText).toMatchObject({ unique: true }) + }) + it('should have 2dsphere indexes on point fields', () => { + expect(definitions.point).toEqual('2dsphere') + }) + it('should have 2dsphere indexes on point fields in groups', () => { + expect(definitions['group.point']).toEqual('2dsphere') + }) + it('should have a sparse index on a unique localized field in a group', () => { + expect(definitions['group.localizedUnique.en']).toEqual(1) + expect(options['group.localizedUnique.en']).toMatchObject({ unique: true, sparse: true }) + expect(definitions['group.localizedUnique.es']).toEqual(1) + expect(options['group.localizedUnique.es']).toMatchObject({ unique: true, sparse: true }) + }) + it('should have unique indexes in a collapsible', () => { + expect(definitions['collapsibleLocalizedUnique.en']).toEqual(1) + expect(options['collapsibleLocalizedUnique.en']).toMatchObject({ + unique: true, + sparse: true, + }) + expect(definitions.collapsibleTextUnique).toEqual(1) + expect(options.collapsibleTextUnique).toMatchObject({ unique: true }) + }) + it('should have unique compound indexes', () => { + expect(definitions.partOne).toEqual(1) + expect(options.partOne).toMatchObject({ + unique: true, + name: 'compound-index', + sparse: true, + }) + }) + it('should throw validation error saving on unique fields', async () => { + const data = { + text: 'a', + uniqueText: 'a', + } + await payload.create({ collection: 'indexed-fields', data, }) - return result.error - }).toBeDefined() - }) - it('should throw validation error saving on unique combined fields', async () => { - await payload.delete({ collection: 'indexed-fields', where: {} }) - const data1 = { - text: 'a', - uniqueText: 'a', - partOne: 'u', - partTwo: 'u', - } - const data2 = { - text: 'b', - uniqueText: 'b', - partOne: 'u', - partTwo: 'u', - } - await payload.create({ - collection: 'indexed-fields', - data: data1, + expect(async () => { + const result = await payload.create({ + collection: 'indexed-fields', + data, + }) + return result.error + }).toBeDefined() }) - expect(async () => { - const result = await payload.create({ + it('should throw validation error saving on unique combined fields', async () => { + await payload.delete({ collection: 'indexed-fields', where: {} }) + const data1 = { + text: 'a', + uniqueText: 'a', + partOne: 'u', + partTwo: 'u', + } + const data2 = { + text: 'b', + uniqueText: 'b', + partOne: 'u', + partTwo: 'u', + } + await payload.create({ collection: 'indexed-fields', - data: data2, + data: data1, }) - return result.error - }).toBeDefined() - }) - }) - - describe('version indexes', () => { - let indexes - const definitions: Record = {} - const options: Record = {} - - beforeAll(() => { - indexes = (payload.db as MongooseAdapter).versions['indexed-fields'].schema.indexes() as [ - Record, - IndexOptions, - ] - indexes.forEach((index) => { - const field = Object.keys(index[0])[0] - definitions[field] = index[0][field] - // eslint-disable-next-line prefer-destructuring - options[field] = index[1] + expect(async () => { + const result = await payload.create({ + collection: 'indexed-fields', + data: data2, + }) + return result.error + }).toBeDefined() }) }) - it('should have versions indexes', () => { - expect(definitions['version.text']).toEqual(1) - }) - it('should have version indexes from collection indexes', () => { - expect(definitions['version.partOne']).toEqual(1) - expect(options['version.partOne']).toMatchObject({ - unique: true, - name: 'compound-index', - sparse: true, - }) - }) - }) + describe('version indexes', () => { + let indexes + const definitions: Record = {} + const options: Record = {} - describe('point', () => { - let doc - const point = [7, -7] - const localized = [5, -2] - const group = { point: [1, 9] } - - beforeAll(async () => { - const findDoc = await payload.find({ - collection: 'point-fields', - pagination: false, - }) - ;[doc] = findDoc.docs - }) - - it('should read', async () => { - const find = await payload.find({ - collection: 'point-fields', - pagination: false, + beforeAll(() => { + indexes = (payload.db as MongooseAdapter).versions['indexed-fields'].schema.indexes() as [ + Record, + IndexOptions, + ] + indexes.forEach((index) => { + const field = Object.keys(index[0])[0] + definitions[field] = index[0][field] + // eslint-disable-next-line prefer-destructuring + options[field] = index[1] + }) }) - ;[doc] = find.docs - - expect(doc.point).toEqual(pointDoc.point) - expect(doc.localized).toEqual(pointDoc.localized) - expect(doc.group).toMatchObject(pointDoc.group) + it('should have versions indexes', () => { + expect(definitions['version.text']).toEqual(1) + }) + it('should have version indexes from collection indexes', () => { + expect(definitions['version.partOne']).toEqual(1) + expect(options['version.partOne']).toMatchObject({ + unique: true, + name: 'compound-index', + sparse: true, + }) + }) }) - it('should create', async () => { - doc = await payload.create({ - collection: 'point-fields', - data: { - point, - localized, - group, - }, + describe('point', () => { + let doc + const point = [7, -7] + const localized = [5, -2] + const group = { point: [1, 9] } + + beforeAll(async () => { + const findDoc = await payload.find({ + collection: 'point-fields', + pagination: false, + }) + ;[doc] = findDoc.docs }) - expect(doc.point).toEqual(point) - expect(doc.localized).toEqual(localized) - expect(doc.group).toMatchObject(group) - }) + it('should read', async () => { + const find = await payload.find({ + collection: 'point-fields', + pagination: false, + }) - it('should not create duplicate point when unique', async () => { - await expect(() => - payload.create({ + ;[doc] = find.docs + + expect(doc.point).toEqual(pointDoc.point) + expect(doc.localized).toEqual(pointDoc.localized) + expect(doc.group).toMatchObject(pointDoc.group) + }) + + it('should create', async () => { + doc = await payload.create({ collection: 'point-fields', data: { point, localized, group, }, - }), - ).rejects.toThrow(Error) + }) - await expect(async () => - payload.create({ - collection: 'number-fields', - data: { - min: 5, - }, - }), - ).rejects.toThrow('The following field is invalid: min') + expect(doc.point).toEqual(point) + expect(doc.localized).toEqual(localized) + expect(doc.group).toMatchObject(group) + }) - expect(doc.point).toEqual(point) - expect(doc.localized).toEqual(localized) - expect(doc.group).toMatchObject(group) + it('should not create duplicate point when unique', async () => { + await expect(() => + payload.create({ + collection: 'point-fields', + data: { + point, + localized, + group, + }, + }), + ).rejects.toThrow(Error) + + await expect(async () => + payload.create({ + collection: 'number-fields', + data: { + min: 5, + }, + }), + ).rejects.toThrow('The following field is invalid: min') + + expect(doc.point).toEqual(point) + expect(doc.localized).toEqual(localized) + expect(doc.group).toMatchObject(group) + }) }) - }) + } + describe('array', () => { let doc const collection = arrayFieldsSlug @@ -465,15 +474,6 @@ describe('Fields', () => { }) }) - it('should return undefined arrays when no data present', async () => { - const document = await payload.create({ - collection: arrayFieldsSlug, - data: arrayDoc, - }) - - expect(document.potentiallyEmptyArray).toBeUndefined() - }) - it('should create with ids and nested ids', async () => { const docWithIDs = await payload.create({ collection: groupFieldsSlug, @@ -602,7 +602,7 @@ describe('Fields', () => { data: groupDoc, }) - expect(doc.potentiallyEmptyGroup).toEqual({}) + expect(doc.potentiallyEmptyGroup).toBeDefined() }) })