chore: merge from 2.0
This commit is contained in:
10
packages/db-mongodb/.eslintignore
Normal file
10
packages/db-mongodb/.eslintignore
Normal file
@@ -0,0 +1,10 @@
|
||||
.tmp
|
||||
**/.git
|
||||
**/.hg
|
||||
**/.pnp.*
|
||||
**/.svn
|
||||
**/.yarn/**
|
||||
**/build
|
||||
**/dist/**
|
||||
**/node_modules
|
||||
**/temp
|
||||
15
packages/db-mongodb/.eslintrc.cjs
Normal file
15
packages/db-mongodb/.eslintrc.cjs
Normal file
@@ -0,0 +1,15 @@
|
||||
/** @type {import('prettier').Config} */
|
||||
module.exports = {
|
||||
extends: ['@payloadcms'],
|
||||
overrides: [
|
||||
{
|
||||
extends: ['plugin:@typescript-eslint/disable-type-checked'],
|
||||
files: ['*.js', '*.cjs', '*.json', '*.md', '*.yml', '*.yaml'],
|
||||
},
|
||||
],
|
||||
parserOptions: {
|
||||
project: ['./tsconfig.json'],
|
||||
tsconfigRootDir: __dirname,
|
||||
},
|
||||
root: true,
|
||||
}
|
||||
10
packages/db-mongodb/.prettierignore
Normal file
10
packages/db-mongodb/.prettierignore
Normal file
@@ -0,0 +1,10 @@
|
||||
.tmp
|
||||
**/.git
|
||||
**/.hg
|
||||
**/.pnp.*
|
||||
**/.svn
|
||||
**/.yarn/**
|
||||
**/build
|
||||
**/dist/**
|
||||
**/node_modules
|
||||
**/temp
|
||||
15
packages/db-mongodb/.swcrc
Normal file
15
packages/db-mongodb/.swcrc
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/swcrc",
|
||||
"sourceMaps": "inline",
|
||||
"jsc": {
|
||||
"target": "esnext",
|
||||
"parser": {
|
||||
"syntax": "typescript",
|
||||
"tsx": true,
|
||||
"dts": true
|
||||
}
|
||||
},
|
||||
"module": {
|
||||
"type": "commonjs"
|
||||
}
|
||||
}
|
||||
@@ -1,25 +1,35 @@
|
||||
{
|
||||
"name": "@payloadcms/db-mongodb",
|
||||
"version": "0.0.1",
|
||||
"description": "The officially supported MongoDB database adapter for Payload",
|
||||
"main": "index.js",
|
||||
"repository": "https://github.com/payloadcms/payload",
|
||||
"author": "Payload CMS, Inc.",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"build": "tsc"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mongodb-memory-server": "^8.13.0",
|
||||
"payload": "payloadcms/payload#build/chore/update-2.0",
|
||||
"typescript": "^4.9.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"bson-objectid": "^2.0.4",
|
||||
"deepmerge": "^4.3.1",
|
||||
"bson-objectid": "2.0.4",
|
||||
"deepmerge": "4.3.1",
|
||||
"get-port": "5.1.1",
|
||||
"mongoose": "6.11.4",
|
||||
"mongoose-aggregate-paginate-v2": "1.0.6",
|
||||
"mongoose-paginate-v2": "1.7.22",
|
||||
"uuid": "^9.0.0"
|
||||
}
|
||||
"uuid": "9.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"@types/mongoose-aggregate-paginate-v2": "1.0.9",
|
||||
"mongodb-memory-server": "8.13.0",
|
||||
"payload": "workspace:*"
|
||||
},
|
||||
"main": "./src/index.ts",
|
||||
"types": "./src/index.ts",
|
||||
"publishConfig": {
|
||||
"registry": "https://registry.npmjs.org/",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "pnpm build:swc && pnpm build:types",
|
||||
"build:swc": "swc ./src -d ./dist --config-file .swcrc",
|
||||
"build:types": "tsc --emitDeclarationOnly --outDir dist"
|
||||
},
|
||||
"version": "0.0.1",
|
||||
"repository": "https://github.com/payloadcms/payload"
|
||||
}
|
||||
|
||||
@@ -1,67 +1,61 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
import type { ConnectOptions } from 'mongoose';
|
||||
import mongoose from 'mongoose';
|
||||
import type { Connect } from 'payload/dist/database/types';
|
||||
import type { MongooseAdapter } from '.';
|
||||
import type { ConnectOptions } from 'mongoose'
|
||||
import type { Connect } from 'payload/database'
|
||||
|
||||
export const connect: Connect = async function connect(
|
||||
this: MongooseAdapter,
|
||||
payload,
|
||||
) {
|
||||
import mongoose from 'mongoose'
|
||||
|
||||
import type { MongooseAdapter } from '.'
|
||||
|
||||
export const connect: Connect = async function connect(this: MongooseAdapter, payload) {
|
||||
if (this.url === false) {
|
||||
return;
|
||||
return
|
||||
}
|
||||
|
||||
if (!payload.local && typeof this.url !== 'string') {
|
||||
throw new Error('Error: missing MongoDB connection URL.');
|
||||
throw new Error('Error: missing MongoDB connection URL.')
|
||||
}
|
||||
|
||||
let urlToConnect = this.url;
|
||||
let successfulConnectionMessage = 'Connected to MongoDB server successfully!';
|
||||
let urlToConnect = this.url
|
||||
let successfulConnectionMessage = 'Connected to MongoDB server successfully!'
|
||||
|
||||
const connectionOptions: ConnectOptions & { useFacet: undefined } = {
|
||||
autoIndex: true,
|
||||
...this.connectOptions,
|
||||
useFacet: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
if (process.env.NODE_ENV === 'test') {
|
||||
if (process.env.PAYLOAD_TEST_MONGO_URL) {
|
||||
urlToConnect = process.env.PAYLOAD_TEST_MONGO_URL;
|
||||
urlToConnect = process.env.PAYLOAD_TEST_MONGO_URL
|
||||
} else {
|
||||
connectionOptions.dbName = 'payloadmemory';
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const getPort = require('get-port');
|
||||
connectionOptions.dbName = 'payloadmemory'
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server')
|
||||
const getPort = require('get-port')
|
||||
|
||||
const port = await getPort();
|
||||
const port = await getPort()
|
||||
this.mongoMemoryServer = await MongoMemoryServer.create({
|
||||
instance: {
|
||||
dbName: 'payloadmemory',
|
||||
port,
|
||||
},
|
||||
});
|
||||
})
|
||||
|
||||
urlToConnect = this.mongoMemoryServer.getUri();
|
||||
successfulConnectionMessage = 'Connected to in-memory MongoDB server successfully!';
|
||||
urlToConnect = this.mongoMemoryServer.getUri()
|
||||
successfulConnectionMessage = 'Connected to in-memory MongoDB server successfully!'
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
this.connection = (
|
||||
await mongoose.connect(urlToConnect, connectionOptions)
|
||||
).connection;
|
||||
this.connection = (await mongoose.connect(urlToConnect, connectionOptions)).connection
|
||||
|
||||
if (process.env.PAYLOAD_DROP_DATABASE === 'true') {
|
||||
this.payload.logger.info('---- DROPPING DATABASE ----');
|
||||
await mongoose.connection.dropDatabase();
|
||||
this.payload.logger.info('---- DROPPED DATABASE ----');
|
||||
this.payload.logger.info('---- DROPPING DATABASE ----')
|
||||
await mongoose.connection.dropDatabase()
|
||||
this.payload.logger.info('---- DROPPED DATABASE ----')
|
||||
}
|
||||
this.payload.logger.info(successfulConnectionMessage);
|
||||
this.payload.logger.info(successfulConnectionMessage)
|
||||
} catch (err) {
|
||||
this.payload.logger.error(
|
||||
`Error: cannot connect to MongoDB. Details: ${err.message}`,
|
||||
err,
|
||||
);
|
||||
process.exit(1);
|
||||
this.payload.logger.error(`Error: cannot connect to MongoDB. Details: ${err.message}`, err)
|
||||
process.exit(1)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,27 +1,29 @@
|
||||
import { Create } from 'payload/dist/database/types';
|
||||
import type { Document } from 'payload/types';
|
||||
import { PayloadRequest } from 'payload/dist/express/types';
|
||||
import type { MongooseAdapter } from '.';
|
||||
import { withSession } from './withSession';
|
||||
import type { Create } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
import type { Document } from 'payload/types'
|
||||
|
||||
import type { MongooseAdapter } from '.'
|
||||
|
||||
import { withSession } from './withSession'
|
||||
|
||||
export const create: Create = async function create(
|
||||
this: MongooseAdapter,
|
||||
{ collection, data, req = {} as PayloadRequest },
|
||||
) {
|
||||
const Model = this.collections[collection];
|
||||
const options = withSession(this, req.transactionID);
|
||||
const Model = this.collections[collection]
|
||||
const options = withSession(this, req.transactionID)
|
||||
|
||||
const [doc] = await Model.create([data], options);
|
||||
const [doc] = await Model.create([data], options)
|
||||
|
||||
// doc.toJSON does not do stuff like converting ObjectIds to string, or date strings to date objects. That's why we use JSON.parse/stringify here
|
||||
const result: Document = JSON.parse(JSON.stringify(doc));
|
||||
const verificationToken = doc._verificationToken;
|
||||
const result: Document = JSON.parse(JSON.stringify(doc))
|
||||
const verificationToken = doc._verificationToken
|
||||
|
||||
// custom id type reset
|
||||
result.id = result._id;
|
||||
result.id = result._id
|
||||
if (verificationToken) {
|
||||
result._verificationToken = verificationToken;
|
||||
result._verificationToken = verificationToken
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1,27 +1,29 @@
|
||||
import { PayloadRequest } from 'payload/types';
|
||||
import { CreateGlobal } from 'payload/dist/database/types';
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields';
|
||||
import { withSession } from './withSession';
|
||||
import type { MongooseAdapter } from '.';
|
||||
import type { CreateGlobal } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
|
||||
import type { MongooseAdapter } from '.'
|
||||
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields'
|
||||
import { withSession } from './withSession'
|
||||
|
||||
export const createGlobal: CreateGlobal = async function createGlobal(
|
||||
this: MongooseAdapter,
|
||||
{ data, slug, req = {} as PayloadRequest },
|
||||
{ data, req = {} as PayloadRequest, slug },
|
||||
) {
|
||||
const Model = this.globals;
|
||||
const Model = this.globals
|
||||
const global = {
|
||||
globalType: slug,
|
||||
...data,
|
||||
};
|
||||
const options = withSession(this, req.transactionID);
|
||||
}
|
||||
const options = withSession(this, req.transactionID)
|
||||
|
||||
let [result] = (await Model.create([global], options)) as any;
|
||||
let [result] = (await Model.create([global], options)) as any
|
||||
|
||||
result = JSON.parse(JSON.stringify(result));
|
||||
result = JSON.parse(JSON.stringify(result))
|
||||
|
||||
// custom id type reset
|
||||
result.id = result._id;
|
||||
result = sanitizeInternalFields(result);
|
||||
result.id = result._id
|
||||
result = sanitizeInternalFields(result)
|
||||
|
||||
return result;
|
||||
};
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1,38 +1,40 @@
|
||||
import type { CreateVersion } from 'payload/dist/database/types';
|
||||
import { PayloadRequest } from 'payload/dist/express/types';
|
||||
import type { Document } from 'payload/types';
|
||||
import type { MongooseAdapter } from '.';
|
||||
import { withSession } from './withSession';
|
||||
import type { CreateVersion } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
import type { Document } from 'payload/types'
|
||||
|
||||
import type { MongooseAdapter } from '.'
|
||||
|
||||
import { withSession } from './withSession'
|
||||
|
||||
export const createVersion: CreateVersion = async function createVersion(
|
||||
this: MongooseAdapter,
|
||||
{
|
||||
collectionSlug,
|
||||
parent,
|
||||
versionData,
|
||||
autosave,
|
||||
collectionSlug,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
parent,
|
||||
req = {} as PayloadRequest,
|
||||
updatedAt,
|
||||
versionData,
|
||||
},
|
||||
) {
|
||||
const VersionModel = this.versions[collectionSlug];
|
||||
const options = withSession(this, req.transactionID);
|
||||
const VersionModel = this.versions[collectionSlug]
|
||||
const options = withSession(this, req.transactionID)
|
||||
|
||||
const [doc] = await VersionModel.create(
|
||||
[
|
||||
{
|
||||
parent,
|
||||
version: versionData,
|
||||
latest: true,
|
||||
autosave,
|
||||
createdAt,
|
||||
parent,
|
||||
updatedAt,
|
||||
version: versionData,
|
||||
},
|
||||
],
|
||||
options,
|
||||
req,
|
||||
);
|
||||
)
|
||||
|
||||
await VersionModel.updateMany({
|
||||
$and: [
|
||||
@@ -58,9 +60,9 @@ export const createVersion: CreateVersion = async function createVersion(
|
||||
const verificationToken = doc._verificationToken;
|
||||
|
||||
// custom id type reset
|
||||
result.id = result._id;
|
||||
result.id = result._id
|
||||
if (verificationToken) {
|
||||
result._verificationToken = verificationToken;
|
||||
result._verificationToken = verificationToken
|
||||
}
|
||||
return result;
|
||||
};
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1,20 +1,24 @@
|
||||
import { DeleteMany } from 'payload/dist/database/types';
|
||||
import { PayloadRequest } from 'payload/dist/express/types';
|
||||
import type { MongooseAdapter } from '.';
|
||||
import { withSession } from './withSession';
|
||||
import type { DeleteMany } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
|
||||
export const deleteMany: DeleteMany = async function deleteMany(this: MongooseAdapter,
|
||||
{ collection, where, req = {} as PayloadRequest }) {
|
||||
const Model = this.collections[collection];
|
||||
import type { MongooseAdapter } from '.'
|
||||
|
||||
import { withSession } from './withSession'
|
||||
|
||||
export const deleteMany: DeleteMany = async function deleteMany(
|
||||
this: MongooseAdapter,
|
||||
{ collection, req = {} as PayloadRequest, where },
|
||||
) {
|
||||
const Model = this.collections[collection]
|
||||
const options = {
|
||||
...withSession(this, req.transactionID),
|
||||
lean: true,
|
||||
};
|
||||
}
|
||||
|
||||
const query = await Model.buildQuery({
|
||||
payload: this.payload,
|
||||
where,
|
||||
});
|
||||
})
|
||||
|
||||
await Model.deleteMany(query, options);
|
||||
};
|
||||
await Model.deleteMany(query, options)
|
||||
}
|
||||
|
||||
@@ -1,29 +1,31 @@
|
||||
import { DeleteOne } from 'payload/dist/database/types';
|
||||
import type { Document } from 'payload/types';
|
||||
import { PayloadRequest } from 'payload/dist/express/types';
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields';
|
||||
import type { MongooseAdapter } from '.';
|
||||
import { withSession } from './withSession';
|
||||
import type { DeleteOne } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
import type { Document } from 'payload/types'
|
||||
|
||||
import type { MongooseAdapter } from '.'
|
||||
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields'
|
||||
import { withSession } from './withSession'
|
||||
|
||||
export const deleteOne: DeleteOne = async function deleteOne(
|
||||
this: MongooseAdapter,
|
||||
{ collection, where, req = {} as PayloadRequest },
|
||||
{ collection, req = {} as PayloadRequest, where },
|
||||
) {
|
||||
const Model = this.collections[collection];
|
||||
const options = withSession(this, req.transactionID);
|
||||
const Model = this.collections[collection]
|
||||
const options = withSession(this, req.transactionID)
|
||||
|
||||
const query = await Model.buildQuery({
|
||||
payload: this.payload,
|
||||
where,
|
||||
});
|
||||
})
|
||||
|
||||
const doc = await Model.findOneAndDelete(query, options).lean();
|
||||
const doc = await Model.findOneAndDelete(query, options).lean()
|
||||
|
||||
let result: Document = JSON.parse(JSON.stringify(doc));
|
||||
let result: Document = JSON.parse(JSON.stringify(doc))
|
||||
|
||||
// custom id type reset
|
||||
result.id = result._id;
|
||||
result = sanitizeInternalFields(result);
|
||||
result.id = result._id
|
||||
result = sanitizeInternalFields(result)
|
||||
|
||||
return result;
|
||||
};
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1,21 +1,25 @@
|
||||
import { DeleteVersions } from 'payload/dist/database/types';
|
||||
import { PayloadRequest } from 'payload/dist/express/types';
|
||||
import type { MongooseAdapter } from '.';
|
||||
import { withSession } from './withSession';
|
||||
import type { DeleteVersions } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
|
||||
export const deleteVersions: DeleteVersions = async function deleteVersions(this: MongooseAdapter,
|
||||
{ collection, where, locale, req = {} as PayloadRequest }) {
|
||||
const VersionsModel = this.versions[collection];
|
||||
import type { MongooseAdapter } from '.'
|
||||
|
||||
import { withSession } from './withSession'
|
||||
|
||||
export const deleteVersions: DeleteVersions = async function deleteVersions(
|
||||
this: MongooseAdapter,
|
||||
{ collection, locale, req = {} as PayloadRequest, where },
|
||||
) {
|
||||
const VersionsModel = this.versions[collection]
|
||||
const options = {
|
||||
...withSession(this, req.transactionID),
|
||||
lean: true,
|
||||
};
|
||||
}
|
||||
|
||||
const query = await VersionsModel.buildQuery({
|
||||
payload: this.payload,
|
||||
locale,
|
||||
payload: this.payload,
|
||||
where,
|
||||
});
|
||||
})
|
||||
|
||||
await VersionsModel.deleteMany(query, options);
|
||||
};
|
||||
await VersionsModel.deleteMany(query, options)
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { Destroy } from 'payload/dist/database/types';
|
||||
import { MongooseAdapter } from './index';
|
||||
import type { Destroy } from 'payload/database'
|
||||
|
||||
export const destroy: Destroy = async function destroy(
|
||||
this: MongooseAdapter,
|
||||
) {
|
||||
import mongoose from 'mongoose'
|
||||
|
||||
import type { MongooseAdapter } from './index'
|
||||
|
||||
export const destroy: Destroy = async function destroy(this: MongooseAdapter) {
|
||||
if (this.mongoMemoryServer) {
|
||||
await mongoose.connection.dropDatabase();
|
||||
await mongoose.connection.close();
|
||||
await this.mongoMemoryServer.stop();
|
||||
await mongoose.connection.dropDatabase()
|
||||
await mongoose.connection.close()
|
||||
await this.mongoMemoryServer.stop()
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,79 +1,73 @@
|
||||
import type { PaginateOptions } from 'mongoose';
|
||||
import type { Find } from 'payload/dist/database/types';
|
||||
import flattenWhereToOperators from 'payload/dist/database/flattenWhereToOperators';
|
||||
import { PayloadRequest } from 'payload/dist/express/types';
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields';
|
||||
import { buildSortParam } from './queries/buildSortParam';
|
||||
import type { MongooseAdapter } from '.';
|
||||
import { withSession } from './withSession';
|
||||
import type { PaginateOptions } from 'mongoose'
|
||||
import type { Find } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
|
||||
import { flattenWhereToOperators } from 'payload/database'
|
||||
|
||||
import type { MongooseAdapter } from '.'
|
||||
|
||||
import { buildSortParam } from './queries/buildSortParam'
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields'
|
||||
import { withSession } from './withSession'
|
||||
|
||||
export const find: Find = async function find(
|
||||
this: MongooseAdapter,
|
||||
{
|
||||
collection,
|
||||
where,
|
||||
page,
|
||||
limit,
|
||||
sort: sortArg,
|
||||
locale,
|
||||
pagination,
|
||||
req = {} as PayloadRequest,
|
||||
},
|
||||
{ collection, limit, locale, page, pagination, req = {} as PayloadRequest, sort: sortArg, where },
|
||||
) {
|
||||
const Model = this.collections[collection];
|
||||
const collectionConfig = this.payload.collections[collection].config;
|
||||
const options = withSession(this, req.transactionID);
|
||||
const Model = this.collections[collection]
|
||||
const collectionConfig = this.payload.collections[collection].config
|
||||
const options = withSession(this, req.transactionID)
|
||||
|
||||
let hasNearConstraint = false;
|
||||
let hasNearConstraint = false
|
||||
|
||||
if (where) {
|
||||
const constraints = flattenWhereToOperators(where);
|
||||
hasNearConstraint = constraints.some((prop) => Object.keys(prop).some((key) => key === 'near'));
|
||||
const constraints = flattenWhereToOperators(where)
|
||||
hasNearConstraint = constraints.some((prop) => Object.keys(prop).some((key) => key === 'near'))
|
||||
}
|
||||
|
||||
let sort;
|
||||
let sort
|
||||
if (!hasNearConstraint) {
|
||||
sort = buildSortParam({
|
||||
sort: sortArg || collectionConfig.defaultSort,
|
||||
fields: collectionConfig.fields,
|
||||
timestamps: true,
|
||||
config: this.payload.config,
|
||||
fields: collectionConfig.fields,
|
||||
locale,
|
||||
});
|
||||
sort: sortArg || collectionConfig.defaultSort,
|
||||
timestamps: true,
|
||||
})
|
||||
}
|
||||
|
||||
const query = await Model.buildQuery({
|
||||
payload: this.payload,
|
||||
locale,
|
||||
payload: this.payload,
|
||||
where,
|
||||
});
|
||||
})
|
||||
|
||||
const paginationOptions: PaginateOptions = {
|
||||
page,
|
||||
sort,
|
||||
forceCountFn: hasNearConstraint,
|
||||
lean: true,
|
||||
leanWithId: true,
|
||||
useEstimatedCount: hasNearConstraint,
|
||||
forceCountFn: hasNearConstraint,
|
||||
pagination,
|
||||
options,
|
||||
};
|
||||
|
||||
if (limit > 0) {
|
||||
paginationOptions.limit = limit;
|
||||
// limit must also be set here, it's ignored when pagination is false
|
||||
paginationOptions.options.limit = limit;
|
||||
page,
|
||||
pagination,
|
||||
sort,
|
||||
useEstimatedCount: hasNearConstraint,
|
||||
}
|
||||
|
||||
const result = await Model.paginate(query, paginationOptions);
|
||||
const docs = JSON.parse(JSON.stringify(result.docs));
|
||||
if (limit > 0) {
|
||||
paginationOptions.limit = limit
|
||||
// limit must also be set here, it's ignored when pagination is false
|
||||
paginationOptions.options.limit = limit
|
||||
}
|
||||
|
||||
const result = await Model.paginate(query, paginationOptions)
|
||||
const docs = JSON.parse(JSON.stringify(result.docs))
|
||||
|
||||
return {
|
||||
...result,
|
||||
docs: docs.map((doc) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
doc.id = doc._id;
|
||||
return sanitizeInternalFields(doc);
|
||||
doc.id = doc._id
|
||||
return sanitizeInternalFields(doc)
|
||||
}),
|
||||
};
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,39 +1,42 @@
|
||||
import { combineQueries } from 'payload/dist/database/combineQueries';
|
||||
import type { FindGlobal } from 'payload/dist/database/types';
|
||||
import { PayloadRequest } from 'payload/dist/express/types';
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields';
|
||||
import type { MongooseAdapter } from '.';
|
||||
import { withSession } from './withSession';
|
||||
import type { FindGlobal } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
|
||||
import { combineQueries } from 'payload/database'
|
||||
|
||||
import type { MongooseAdapter } from '.'
|
||||
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields'
|
||||
import { withSession } from './withSession'
|
||||
|
||||
export const findGlobal: FindGlobal = async function findGlobal(
|
||||
this: MongooseAdapter,
|
||||
{ slug, locale, where, req = {} as PayloadRequest },
|
||||
{ locale, req = {} as PayloadRequest, slug, where },
|
||||
) {
|
||||
const Model = this.globals;
|
||||
const Model = this.globals
|
||||
const options = {
|
||||
...withSession(this, req.transactionID),
|
||||
lean: true,
|
||||
};
|
||||
}
|
||||
|
||||
const query = await Model.buildQuery({
|
||||
where: combineQueries({ globalType: { equals: slug } }, where),
|
||||
payload: this.payload,
|
||||
locale,
|
||||
globalSlug: slug,
|
||||
});
|
||||
locale,
|
||||
payload: this.payload,
|
||||
where: combineQueries({ globalType: { equals: slug } }, where),
|
||||
})
|
||||
|
||||
let doc = (await Model.findOne(query, {}, options)) as any;
|
||||
let doc = (await Model.findOne(query, {}, options)) as any
|
||||
|
||||
if (!doc) {
|
||||
return null;
|
||||
return null
|
||||
}
|
||||
if (doc._id) {
|
||||
doc.id = doc._id;
|
||||
delete doc._id;
|
||||
doc.id = doc._id
|
||||
delete doc._id
|
||||
}
|
||||
|
||||
doc = JSON.parse(JSON.stringify(doc));
|
||||
doc = sanitizeInternalFields(doc);
|
||||
doc = JSON.parse(JSON.stringify(doc))
|
||||
doc = sanitizeInternalFields(doc)
|
||||
|
||||
return doc;
|
||||
};
|
||||
return doc
|
||||
}
|
||||
|
||||
@@ -1,89 +1,92 @@
|
||||
import { PaginateOptions } from 'mongoose';
|
||||
import type { FindGlobalVersions } from 'payload/dist/database/types';
|
||||
import flattenWhereToOperators from 'payload/dist/database/flattenWhereToOperators';
|
||||
import { buildVersionGlobalFields } from 'payload/dist/versions/buildGlobalFields';
|
||||
import { PayloadRequest } from 'payload/dist/express/types';
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields';
|
||||
import type { MongooseAdapter } from '.';
|
||||
import { buildSortParam } from './queries/buildSortParam';
|
||||
import { withSession } from './withSession';
|
||||
import type { PaginateOptions } from 'mongoose'
|
||||
import type { FindGlobalVersions } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
|
||||
import { flattenWhereToOperators } from 'payload/database'
|
||||
import { buildVersionGlobalFields } from 'payload/versions'
|
||||
|
||||
import type { MongooseAdapter } from '.'
|
||||
|
||||
import { buildSortParam } from './queries/buildSortParam'
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields'
|
||||
import { withSession } from './withSession'
|
||||
|
||||
export const findGlobalVersions: FindGlobalVersions = async function findGlobalVersions(
|
||||
this: MongooseAdapter,
|
||||
{
|
||||
global,
|
||||
where,
|
||||
page,
|
||||
limit,
|
||||
sort: sortArg,
|
||||
locale,
|
||||
page,
|
||||
pagination,
|
||||
skip,
|
||||
req = {} as PayloadRequest,
|
||||
skip,
|
||||
sort: sortArg,
|
||||
where,
|
||||
},
|
||||
) {
|
||||
const Model = this.versions[global];
|
||||
const Model = this.versions[global]
|
||||
const versionFields = buildVersionGlobalFields(
|
||||
this.payload.globals.config.find(({ slug }) => slug === global),
|
||||
);
|
||||
)
|
||||
const options = {
|
||||
...withSession(this, req.transactionID),
|
||||
skip,
|
||||
limit,
|
||||
};
|
||||
|
||||
let hasNearConstraint = false;
|
||||
|
||||
if (where) {
|
||||
const constraints = flattenWhereToOperators(where);
|
||||
hasNearConstraint = constraints.some((prop) => Object.keys(prop).some((key) => key === 'near'));
|
||||
skip,
|
||||
}
|
||||
|
||||
let sort;
|
||||
let hasNearConstraint = false
|
||||
|
||||
if (where) {
|
||||
const constraints = flattenWhereToOperators(where)
|
||||
hasNearConstraint = constraints.some((prop) => Object.keys(prop).some((key) => key === 'near'))
|
||||
}
|
||||
|
||||
let sort
|
||||
if (!hasNearConstraint) {
|
||||
sort = buildSortParam({
|
||||
sort: sortArg || '-updatedAt',
|
||||
fields: versionFields,
|
||||
timestamps: true,
|
||||
config: this.payload.config,
|
||||
fields: versionFields,
|
||||
locale,
|
||||
});
|
||||
sort: sortArg || '-updatedAt',
|
||||
timestamps: true,
|
||||
})
|
||||
}
|
||||
|
||||
const query = await Model.buildQuery({
|
||||
payload: this.payload,
|
||||
locale,
|
||||
where,
|
||||
globalSlug: global,
|
||||
});
|
||||
locale,
|
||||
payload: this.payload,
|
||||
where,
|
||||
})
|
||||
|
||||
const paginationOptions: PaginateOptions = {
|
||||
page,
|
||||
sort,
|
||||
forceCountFn: hasNearConstraint,
|
||||
lean: true,
|
||||
leanWithId: true,
|
||||
pagination,
|
||||
offset: skip,
|
||||
useEstimatedCount: hasNearConstraint,
|
||||
forceCountFn: hasNearConstraint,
|
||||
options,
|
||||
};
|
||||
|
||||
if (limit > 0) {
|
||||
paginationOptions.limit = limit;
|
||||
// limit must also be set here, it's ignored when pagination is false
|
||||
paginationOptions.options.limit = limit;
|
||||
page,
|
||||
pagination,
|
||||
sort,
|
||||
useEstimatedCount: hasNearConstraint,
|
||||
}
|
||||
|
||||
const result = await Model.paginate(query, paginationOptions);
|
||||
const docs = JSON.parse(JSON.stringify(result.docs));
|
||||
if (limit > 0) {
|
||||
paginationOptions.limit = limit
|
||||
// limit must also be set here, it's ignored when pagination is false
|
||||
paginationOptions.options.limit = limit
|
||||
}
|
||||
|
||||
const result = await Model.paginate(query, paginationOptions)
|
||||
const docs = JSON.parse(JSON.stringify(result.docs))
|
||||
|
||||
return {
|
||||
...result,
|
||||
docs: docs.map((doc) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
doc.id = doc._id;
|
||||
return sanitizeInternalFields(doc);
|
||||
doc.id = doc._id
|
||||
return sanitizeInternalFields(doc)
|
||||
}),
|
||||
};
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,38 +1,40 @@
|
||||
import type { MongooseQueryOptions } from 'mongoose';
|
||||
import type { FindOne } from 'payload/dist/database/types';
|
||||
import type { Document } from 'payload/types';
|
||||
import { PayloadRequest } from 'payload/dist/express/types';
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields';
|
||||
import type { MongooseAdapter } from '.';
|
||||
import { withSession } from './withSession';
|
||||
import type { MongooseQueryOptions } from 'mongoose'
|
||||
import type { FindOne } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
import type { Document } from 'payload/types'
|
||||
|
||||
import type { MongooseAdapter } from '.'
|
||||
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields'
|
||||
import { withSession } from './withSession'
|
||||
|
||||
export const findOne: FindOne = async function findOne(
|
||||
this: MongooseAdapter,
|
||||
{ collection, where, locale, req = {} as PayloadRequest },
|
||||
{ collection, locale, req = {} as PayloadRequest, where },
|
||||
) {
|
||||
const Model = this.collections[collection];
|
||||
const Model = this.collections[collection]
|
||||
const options: MongooseQueryOptions = {
|
||||
...withSession(this, req.transactionID),
|
||||
lean: true,
|
||||
};
|
||||
|
||||
const query = await Model.buildQuery({
|
||||
payload: this.payload,
|
||||
locale,
|
||||
where,
|
||||
});
|
||||
|
||||
const doc = await Model.findOne(query, {}, options);
|
||||
|
||||
if (!doc) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let result: Document = JSON.parse(JSON.stringify(doc));
|
||||
const query = await Model.buildQuery({
|
||||
locale,
|
||||
payload: this.payload,
|
||||
where,
|
||||
})
|
||||
|
||||
const doc = await Model.findOne(query, {}, options)
|
||||
|
||||
if (!doc) {
|
||||
return null
|
||||
}
|
||||
|
||||
let result: Document = JSON.parse(JSON.stringify(doc))
|
||||
|
||||
// custom id type reset
|
||||
result.id = result._id;
|
||||
result = sanitizeInternalFields(result);
|
||||
result.id = result._id
|
||||
result = sanitizeInternalFields(result)
|
||||
|
||||
return result;
|
||||
};
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1,86 +1,89 @@
|
||||
import { PaginateOptions } from 'mongoose';
|
||||
import type { FindVersions } from 'payload/dist/database/types';
|
||||
import flattenWhereToOperators from 'payload/dist/database/flattenWhereToOperators';
|
||||
import { PayloadRequest } from 'payload/dist/express/types';
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields';
|
||||
import type { MongooseAdapter } from '.';
|
||||
import { buildSortParam } from './queries/buildSortParam';
|
||||
import { withSession } from './withSession';
|
||||
import type { PaginateOptions } from 'mongoose'
|
||||
import type { FindVersions } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
|
||||
import { flattenWhereToOperators } from 'payload/database'
|
||||
|
||||
import type { MongooseAdapter } from '.'
|
||||
|
||||
import { buildSortParam } from './queries/buildSortParam'
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields'
|
||||
import { withSession } from './withSession'
|
||||
|
||||
export const findVersions: FindVersions = async function findVersions(
|
||||
this: MongooseAdapter,
|
||||
{
|
||||
collection,
|
||||
where,
|
||||
page,
|
||||
limit,
|
||||
sort: sortArg,
|
||||
locale,
|
||||
page,
|
||||
pagination,
|
||||
skip,
|
||||
req = {} as PayloadRequest,
|
||||
skip,
|
||||
sort: sortArg,
|
||||
where,
|
||||
},
|
||||
) {
|
||||
const Model = this.versions[collection];
|
||||
const collectionConfig = this.payload.collections[collection].config;
|
||||
const Model = this.versions[collection]
|
||||
const collectionConfig = this.payload.collections[collection].config
|
||||
const options = {
|
||||
...withSession(this, req.transactionID),
|
||||
skip,
|
||||
limit,
|
||||
};
|
||||
|
||||
let hasNearConstraint = false;
|
||||
|
||||
if (where) {
|
||||
const constraints = flattenWhereToOperators(where);
|
||||
hasNearConstraint = constraints.some((prop) => Object.keys(prop).some((key) => key === 'near'));
|
||||
skip,
|
||||
}
|
||||
|
||||
let sort;
|
||||
let hasNearConstraint = false
|
||||
|
||||
if (where) {
|
||||
const constraints = flattenWhereToOperators(where)
|
||||
hasNearConstraint = constraints.some((prop) => Object.keys(prop).some((key) => key === 'near'))
|
||||
}
|
||||
|
||||
let sort
|
||||
if (!hasNearConstraint) {
|
||||
sort = buildSortParam({
|
||||
sort: sortArg || '-updatedAt',
|
||||
fields: collectionConfig.fields,
|
||||
timestamps: true,
|
||||
config: this.payload.config,
|
||||
fields: collectionConfig.fields,
|
||||
locale,
|
||||
});
|
||||
sort: sortArg || '-updatedAt',
|
||||
timestamps: true,
|
||||
})
|
||||
}
|
||||
|
||||
const query = await Model.buildQuery({
|
||||
payload: this.payload,
|
||||
locale,
|
||||
payload: this.payload,
|
||||
where,
|
||||
});
|
||||
})
|
||||
|
||||
const paginationOptions: PaginateOptions = {
|
||||
page,
|
||||
sort,
|
||||
limit,
|
||||
forceCountFn: hasNearConstraint,
|
||||
lean: true,
|
||||
leanWithId: true,
|
||||
pagination,
|
||||
limit,
|
||||
offset: skip,
|
||||
useEstimatedCount: hasNearConstraint,
|
||||
forceCountFn: hasNearConstraint,
|
||||
options,
|
||||
};
|
||||
|
||||
if (limit > 0) {
|
||||
paginationOptions.limit = limit;
|
||||
// limit must also be set here, it's ignored when pagination is false
|
||||
paginationOptions.options.limit = limit;
|
||||
page,
|
||||
pagination,
|
||||
sort,
|
||||
useEstimatedCount: hasNearConstraint,
|
||||
}
|
||||
|
||||
const result = await Model.paginate(query, paginationOptions);
|
||||
const docs = JSON.parse(JSON.stringify(result.docs));
|
||||
if (limit > 0) {
|
||||
paginationOptions.limit = limit
|
||||
// limit must also be set here, it's ignored when pagination is false
|
||||
paginationOptions.options.limit = limit
|
||||
}
|
||||
|
||||
const result = await Model.paginate(query, paginationOptions)
|
||||
const docs = JSON.parse(JSON.stringify(result.docs))
|
||||
|
||||
return {
|
||||
...result,
|
||||
docs: docs.map((doc) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
doc.id = doc._id;
|
||||
return sanitizeInternalFields(doc);
|
||||
doc.id = doc._id
|
||||
return sanitizeInternalFields(doc)
|
||||
}),
|
||||
};
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,108 +1,111 @@
|
||||
import type { ClientSession, Connection, ConnectOptions } from 'mongoose';
|
||||
import mongoose from 'mongoose';
|
||||
import { createMigration } from 'payload/dist/database/migrations/createMigration';
|
||||
import type { Payload } from 'payload';
|
||||
import type { DatabaseAdapter } from 'payload/dist/database/types';
|
||||
import { createDatabaseAdapter } from 'payload/dist/database/createAdapter';
|
||||
import { connect } from './connect';
|
||||
import { init } from './init';
|
||||
import { webpack } from './webpack';
|
||||
import { createGlobal } from './createGlobal';
|
||||
import { createVersion } from './createVersion';
|
||||
import { beginTransaction } from './transactions/beginTransaction';
|
||||
import { rollbackTransaction } from './transactions/rollbackTransaction';
|
||||
import { commitTransaction } from './transactions/commitTransaction';
|
||||
import { queryDrafts } from './queryDrafts';
|
||||
import { find } from './find';
|
||||
import { findGlobalVersions } from './findGlobalVersions';
|
||||
import { findVersions } from './findVersions';
|
||||
import { create } from './create';
|
||||
import { deleteOne } from './deleteOne';
|
||||
import { deleteVersions } from './deleteVersions';
|
||||
import { findGlobal } from './findGlobal';
|
||||
import { findOne } from './findOne';
|
||||
import { updateGlobal } from './updateGlobal';
|
||||
import { updateOne } from './updateOne';
|
||||
import { updateVersion } from './updateVersion';
|
||||
import { deleteMany } from './deleteMany';
|
||||
import { destroy } from './destroy';
|
||||
import type { CollectionModel, GlobalModel } from './types';
|
||||
import type { ClientSession, ConnectOptions, Connection } from 'mongoose'
|
||||
import type { Payload } from 'payload'
|
||||
import type { DatabaseAdapter } from 'payload/database'
|
||||
|
||||
import mongoose from 'mongoose'
|
||||
import { createDatabaseAdapter } from 'payload/database'
|
||||
import { createMigration } from 'payload/database'
|
||||
|
||||
import type { CollectionModel, GlobalModel } from './types'
|
||||
|
||||
import { connect } from './connect'
|
||||
import { create } from './create'
|
||||
import { createGlobal } from './createGlobal'
|
||||
import { createVersion } from './createVersion'
|
||||
import { deleteMany } from './deleteMany'
|
||||
import { deleteOne } from './deleteOne'
|
||||
import { deleteVersions } from './deleteVersions'
|
||||
import { destroy } from './destroy'
|
||||
import { find } from './find'
|
||||
import { findGlobal } from './findGlobal'
|
||||
import { findGlobalVersions } from './findGlobalVersions'
|
||||
import { findOne } from './findOne'
|
||||
import { findVersions } from './findVersions'
|
||||
import { init } from './init'
|
||||
import { queryDrafts } from './queryDrafts'
|
||||
import { beginTransaction } from './transactions/beginTransaction'
|
||||
import { commitTransaction } from './transactions/commitTransaction'
|
||||
import { rollbackTransaction } from './transactions/rollbackTransaction'
|
||||
import { updateGlobal } from './updateGlobal'
|
||||
import { updateOne } from './updateOne'
|
||||
import { updateVersion } from './updateVersion'
|
||||
import { webpack } from './webpack'
|
||||
|
||||
export interface Args {
|
||||
/** The URL to connect to MongoDB or false to start payload and prevent connecting */
|
||||
url: string | false;
|
||||
migrationDir?: string;
|
||||
/** Set to false to disable auto-pluralization of collection names, Defaults to true */
|
||||
autoPluralization?: boolean
|
||||
/** Extra configuration options */
|
||||
connectOptions?: ConnectOptions & {
|
||||
/** Set false to disable $facet aggregation in non-supporting databases, Defaults to true */
|
||||
useFacet?: boolean;
|
||||
};
|
||||
/** Set to false to disable auto-pluralization of collection names, Defaults to true */
|
||||
autoPluralization?: boolean;
|
||||
useFacet?: boolean
|
||||
}
|
||||
migrationDir?: string
|
||||
/** The URL to connect to MongoDB or false to start payload and prevent connecting */
|
||||
url: false | string
|
||||
}
|
||||
|
||||
export type MongooseAdapter = DatabaseAdapter &
|
||||
Args & {
|
||||
mongoMemoryServer: any;
|
||||
collections: {
|
||||
[slug: string]: CollectionModel;
|
||||
};
|
||||
globals: GlobalModel;
|
||||
[slug: string]: CollectionModel
|
||||
}
|
||||
connection: Connection
|
||||
globals: GlobalModel
|
||||
mongoMemoryServer: any
|
||||
sessions: Record<number | string, ClientSession>
|
||||
versions: {
|
||||
[slug: string]: CollectionModel
|
||||
}
|
||||
sessions: Record<string | number, ClientSession>
|
||||
connection: Connection
|
||||
}
|
||||
|
||||
type MongooseAdapterResult = (args: { payload: Payload }) => MongooseAdapter
|
||||
|
||||
export function mongooseAdapter({
|
||||
url,
|
||||
autoPluralization = true,
|
||||
connectOptions,
|
||||
migrationDir,
|
||||
autoPluralization = true,
|
||||
url,
|
||||
}: Args): MongooseAdapterResult {
|
||||
function adapter({ payload }: { payload: Payload }) {
|
||||
mongoose.set('strictQuery', false);
|
||||
mongoose.set('strictQuery', false)
|
||||
|
||||
return createDatabaseAdapter<MongooseAdapter>({
|
||||
payload,
|
||||
migrationDir,
|
||||
connection: undefined,
|
||||
mongoMemoryServer: undefined,
|
||||
sessions: {},
|
||||
url,
|
||||
connectOptions: connectOptions || {},
|
||||
autoPluralization,
|
||||
globals: undefined,
|
||||
collections: {},
|
||||
versions: {},
|
||||
connect,
|
||||
destroy,
|
||||
init,
|
||||
webpack,
|
||||
createMigration,
|
||||
beginTransaction,
|
||||
rollbackTransaction,
|
||||
collections: {},
|
||||
commitTransaction,
|
||||
queryDrafts,
|
||||
findOne,
|
||||
find,
|
||||
connect,
|
||||
connectOptions: connectOptions || {},
|
||||
connection: undefined,
|
||||
create,
|
||||
updateOne,
|
||||
deleteOne,
|
||||
deleteMany,
|
||||
findGlobal,
|
||||
createGlobal,
|
||||
updateGlobal,
|
||||
findVersions,
|
||||
findGlobalVersions,
|
||||
createMigration,
|
||||
createVersion,
|
||||
updateVersion,
|
||||
deleteMany,
|
||||
deleteOne,
|
||||
deleteVersions,
|
||||
});
|
||||
destroy,
|
||||
find,
|
||||
findGlobal,
|
||||
findGlobalVersions,
|
||||
findOne,
|
||||
findVersions,
|
||||
globals: undefined,
|
||||
init,
|
||||
migrationDir,
|
||||
mongoMemoryServer: undefined,
|
||||
payload,
|
||||
queryDrafts,
|
||||
rollbackTransaction,
|
||||
sessions: {},
|
||||
updateGlobal,
|
||||
updateOne,
|
||||
updateVersion,
|
||||
url,
|
||||
versions: {},
|
||||
webpack,
|
||||
})
|
||||
}
|
||||
|
||||
return adapter;
|
||||
return adapter
|
||||
}
|
||||
|
||||
@@ -1,121 +1,118 @@
|
||||
/* eslint-disable no-param-reassign */
|
||||
import mongoose, { PaginateOptions } from 'mongoose';
|
||||
import paginate from 'mongoose-paginate-v2';
|
||||
import { buildVersionCollectionFields } from 'payload/dist/versions/buildCollectionFields';
|
||||
import { SanitizedCollectionConfig } from 'payload/dist/collections/config/types';
|
||||
import { getVersionsModelName } from 'payload/dist/versions/getVersionsModelName';
|
||||
import { buildVersionGlobalFields } from 'payload/dist/versions/buildGlobalFields';
|
||||
import type { Init } from 'payload/dist/database/types';
|
||||
import getBuildQueryPlugin from './queries/buildQuery';
|
||||
import buildCollectionSchema from './models/buildCollectionSchema';
|
||||
import buildSchema from './models/buildSchema';
|
||||
import type { MongooseAdapter } from '.';
|
||||
import { buildGlobalModel } from './models/buildGlobalModel';
|
||||
import { CollectionModel } from './types';
|
||||
import type { PaginateOptions } from 'mongoose'
|
||||
import type { Init } from 'payload/database'
|
||||
import type { SanitizedCollectionConfig } from 'payload/types'
|
||||
|
||||
export const init: Init = async function init(
|
||||
this: MongooseAdapter,
|
||||
) {
|
||||
this.payload.config.collections.forEach(
|
||||
(collection: SanitizedCollectionConfig) => {
|
||||
const schema = buildCollectionSchema(collection, this.payload.config);
|
||||
import mongoose from 'mongoose'
|
||||
import mongooseAggregatePaginate from 'mongoose-aggregate-paginate-v2'
|
||||
import paginate from 'mongoose-paginate-v2'
|
||||
import { buildVersionGlobalFields } from 'payload/versions'
|
||||
import { buildVersionCollectionFields } from 'payload/versions'
|
||||
import { getVersionsModelName } from 'payload/versions'
|
||||
|
||||
if (collection.versions) {
|
||||
const versionModelName = getVersionsModelName(collection);
|
||||
import type { MongooseAdapter } from '.'
|
||||
import type { CollectionModel } from './types'
|
||||
|
||||
const versionCollectionFields = buildVersionCollectionFields(collection);
|
||||
import buildCollectionSchema from './models/buildCollectionSchema'
|
||||
import { buildGlobalModel } from './models/buildGlobalModel'
|
||||
import buildSchema from './models/buildSchema'
|
||||
import getBuildQueryPlugin from './queries/buildQuery'
|
||||
|
||||
const versionSchema = buildSchema(
|
||||
this.payload.config,
|
||||
versionCollectionFields,
|
||||
{
|
||||
disableUnique: true,
|
||||
draftsEnabled: true,
|
||||
options: {
|
||||
timestamps: false,
|
||||
minimize: false,
|
||||
},
|
||||
},
|
||||
);
|
||||
export const init: Init = async function init(this: MongooseAdapter) {
|
||||
this.payload.config.collections.forEach((collection: SanitizedCollectionConfig) => {
|
||||
const schema = buildCollectionSchema(collection, this.payload.config)
|
||||
|
||||
if (collection.indexes) {
|
||||
collection.indexes.forEach((index) => {
|
||||
// prefix 'version.' to each field in the index
|
||||
const versionIndex = {
|
||||
fields: {},
|
||||
options: index.options,
|
||||
};
|
||||
Object.entries(index.fields)
|
||||
.forEach(([key, value]) => {
|
||||
versionIndex.fields[`version.${key}`] = value;
|
||||
});
|
||||
versionSchema.index(versionIndex.fields, versionIndex.options);
|
||||
});
|
||||
}
|
||||
if (collection.versions) {
|
||||
const versionModelName = getVersionsModelName(collection)
|
||||
|
||||
versionSchema.plugin<any, PaginateOptions>(paginate, { useEstimatedCount: true })
|
||||
.plugin(
|
||||
getBuildQueryPlugin({
|
||||
collectionSlug: collection.slug,
|
||||
versionsFields: versionCollectionFields,
|
||||
}),
|
||||
);
|
||||
const versionCollectionFields = buildVersionCollectionFields(collection)
|
||||
|
||||
const model = mongoose.model(
|
||||
versionModelName,
|
||||
versionSchema,
|
||||
versionModelName,
|
||||
) as CollectionModel;
|
||||
// this.payload.versions[collection.slug] = model;
|
||||
this.versions[collection.slug] = model;
|
||||
const versionSchema = buildSchema(this.payload.config, versionCollectionFields, {
|
||||
disableUnique: true,
|
||||
draftsEnabled: true,
|
||||
options: {
|
||||
minimize: false,
|
||||
timestamps: false,
|
||||
},
|
||||
})
|
||||
|
||||
if (collection.indexes) {
|
||||
collection.indexes.forEach((index) => {
|
||||
// prefix 'version.' to each field in the index
|
||||
const versionIndex = {
|
||||
fields: {},
|
||||
options: index.options,
|
||||
}
|
||||
Object.entries(index.fields).forEach(([key, value]) => {
|
||||
versionIndex.fields[`version.${key}`] = value
|
||||
})
|
||||
versionSchema.index(versionIndex.fields, versionIndex.options)
|
||||
})
|
||||
}
|
||||
|
||||
versionSchema.plugin<any, PaginateOptions>(paginate, { useEstimatedCount: true }).plugin(
|
||||
getBuildQueryPlugin({
|
||||
collectionSlug: collection.slug,
|
||||
versionsFields: versionCollectionFields,
|
||||
}),
|
||||
)
|
||||
|
||||
if (collection.versions?.drafts) {
|
||||
versionSchema.plugin(mongooseAggregatePaginate)
|
||||
}
|
||||
|
||||
const model = mongoose.model(
|
||||
collection.slug,
|
||||
schema,
|
||||
this.autoPluralization === true ? undefined : collection.slug,
|
||||
) as CollectionModel;
|
||||
this.collections[collection.slug] = model;
|
||||
versionModelName,
|
||||
versionSchema,
|
||||
versionModelName,
|
||||
) as CollectionModel
|
||||
// this.payload.versions[collection.slug] = model;
|
||||
this.versions[collection.slug] = model
|
||||
}
|
||||
|
||||
this.payload.collections[collection.slug] = {
|
||||
config: collection,
|
||||
};
|
||||
},
|
||||
);
|
||||
const model = mongoose.model(
|
||||
collection.slug,
|
||||
schema,
|
||||
this.autoPluralization === true ? undefined : collection.slug,
|
||||
) as CollectionModel
|
||||
this.collections[collection.slug] = model
|
||||
|
||||
const model = buildGlobalModel(this.payload.config);
|
||||
this.globals = model;
|
||||
// TS expect error only needed until we launch 2.0.0
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
this.payload.collections[collection.slug] = {
|
||||
config: collection,
|
||||
}
|
||||
})
|
||||
|
||||
const model = buildGlobalModel(this.payload.config)
|
||||
this.globals = model
|
||||
|
||||
this.payload.config.globals.forEach((global) => {
|
||||
if (global.versions) {
|
||||
const versionModelName = getVersionsModelName(global);
|
||||
const versionModelName = getVersionsModelName(global)
|
||||
|
||||
const versionGlobalFields = buildVersionGlobalFields(global);
|
||||
const versionGlobalFields = buildVersionGlobalFields(global)
|
||||
|
||||
const versionSchema = buildSchema(
|
||||
this.payload.config,
|
||||
versionGlobalFields,
|
||||
{
|
||||
indexSortableFields: this.payload.config.indexSortableFields,
|
||||
disableUnique: true,
|
||||
draftsEnabled: true,
|
||||
options: {
|
||||
timestamps: false,
|
||||
minimize: false,
|
||||
},
|
||||
const versionSchema = buildSchema(this.payload.config, versionGlobalFields, {
|
||||
disableUnique: true,
|
||||
draftsEnabled: true,
|
||||
indexSortableFields: this.payload.config.indexSortableFields,
|
||||
options: {
|
||||
minimize: false,
|
||||
timestamps: false,
|
||||
},
|
||||
);
|
||||
})
|
||||
|
||||
versionSchema
|
||||
.plugin<any, PaginateOptions>(paginate, { useEstimatedCount: true })
|
||||
.plugin(getBuildQueryPlugin({ versionsFields: versionGlobalFields }));
|
||||
.plugin(getBuildQueryPlugin({ versionsFields: versionGlobalFields }))
|
||||
|
||||
const versionsModel = mongoose.model(
|
||||
versionModelName,
|
||||
versionSchema,
|
||||
versionModelName,
|
||||
) as CollectionModel;
|
||||
this.versions[global.slug] = versionsModel;
|
||||
) as CollectionModel
|
||||
this.versions[global.slug] = versionsModel
|
||||
}
|
||||
});
|
||||
};
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
exports.mongooseAdapter = () => ({});
|
||||
exports.mongooseAdapter = () => ({})
|
||||
|
||||
@@ -1,38 +1,41 @@
|
||||
import paginate from 'mongoose-paginate-v2';
|
||||
import { PaginateOptions, Schema } from 'mongoose';
|
||||
import { SanitizedConfig } from 'payload/dist/config/types';
|
||||
import { SanitizedCollectionConfig } from 'payload/dist/collections/config/types';
|
||||
import getBuildQueryPlugin from '../queries/buildQuery';
|
||||
import buildSchema from './buildSchema';
|
||||
import type { PaginateOptions, Schema } from 'mongoose'
|
||||
import type { SanitizedConfig } from 'payload/config'
|
||||
import type { SanitizedCollectionConfig } from 'payload/types'
|
||||
|
||||
const buildCollectionSchema = (collection: SanitizedCollectionConfig, config: SanitizedConfig, schemaOptions = {}): Schema => {
|
||||
const schema = buildSchema(
|
||||
config,
|
||||
collection.fields,
|
||||
{
|
||||
draftsEnabled: Boolean(typeof collection?.versions === 'object' && collection.versions.drafts),
|
||||
options: {
|
||||
timestamps: collection.timestamps !== false,
|
||||
minimize: false,
|
||||
...schemaOptions,
|
||||
},
|
||||
indexSortableFields: config.indexSortableFields,
|
||||
import paginate from 'mongoose-paginate-v2'
|
||||
|
||||
import getBuildQueryPlugin from '../queries/buildQuery'
|
||||
import buildSchema from './buildSchema'
|
||||
|
||||
const buildCollectionSchema = (
|
||||
collection: SanitizedCollectionConfig,
|
||||
config: SanitizedConfig,
|
||||
schemaOptions = {},
|
||||
): Schema => {
|
||||
const schema = buildSchema(config, collection.fields, {
|
||||
draftsEnabled: Boolean(typeof collection?.versions === 'object' && collection.versions.drafts),
|
||||
indexSortableFields: config.indexSortableFields,
|
||||
options: {
|
||||
minimize: false,
|
||||
timestamps: collection.timestamps !== false,
|
||||
...schemaOptions,
|
||||
},
|
||||
);
|
||||
})
|
||||
|
||||
if (config.indexSortableFields && collection.timestamps !== false) {
|
||||
schema.index({ updatedAt: 1 });
|
||||
schema.index({ createdAt: 1 });
|
||||
schema.index({ updatedAt: 1 })
|
||||
schema.index({ createdAt: 1 })
|
||||
}
|
||||
if (collection.indexes) {
|
||||
collection.indexes.forEach((index) => {
|
||||
schema.index(index.fields, index.options);
|
||||
});
|
||||
schema.index(index.fields, index.options)
|
||||
})
|
||||
}
|
||||
schema.plugin<any, PaginateOptions>(paginate, { useEstimatedCount: true })
|
||||
.plugin(getBuildQueryPlugin({ collectionSlug: collection.slug }));
|
||||
schema
|
||||
.plugin<any, PaginateOptions>(paginate, { useEstimatedCount: true })
|
||||
.plugin(getBuildQueryPlugin({ collectionSlug: collection.slug }))
|
||||
|
||||
return schema;
|
||||
};
|
||||
return schema
|
||||
}
|
||||
|
||||
export default buildCollectionSchema;
|
||||
export default buildCollectionSchema
|
||||
|
||||
@@ -1,32 +1,34 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { SanitizedConfig } from 'payload/dist/config/types';
|
||||
import buildSchema from './buildSchema';
|
||||
import getBuildQueryPlugin from '../queries/buildQuery';
|
||||
import type { GlobalModel } from '../types';
|
||||
import type { SanitizedConfig } from 'payload/config'
|
||||
|
||||
import mongoose from 'mongoose'
|
||||
|
||||
import type { GlobalModel } from '../types'
|
||||
|
||||
import getBuildQueryPlugin from '../queries/buildQuery'
|
||||
import buildSchema from './buildSchema'
|
||||
|
||||
export const buildGlobalModel = (config: SanitizedConfig): GlobalModel | null => {
|
||||
if (config.globals && config.globals.length > 0) {
|
||||
const globalsSchema = new mongoose.Schema({}, { discriminatorKey: 'globalType', timestamps: true, minimize: false });
|
||||
const globalsSchema = new mongoose.Schema(
|
||||
{},
|
||||
{ discriminatorKey: 'globalType', minimize: false, timestamps: true },
|
||||
)
|
||||
|
||||
globalsSchema.plugin(getBuildQueryPlugin());
|
||||
globalsSchema.plugin(getBuildQueryPlugin())
|
||||
|
||||
const Globals = mongoose.model('globals', globalsSchema, 'globals') as unknown as GlobalModel;
|
||||
const Globals = mongoose.model('globals', globalsSchema, 'globals') as unknown as GlobalModel
|
||||
|
||||
Object.values(config.globals).forEach((globalConfig) => {
|
||||
const globalSchema = buildSchema(
|
||||
config,
|
||||
globalConfig.fields,
|
||||
{
|
||||
options: {
|
||||
minimize: false,
|
||||
},
|
||||
const globalSchema = buildSchema(config, globalConfig.fields, {
|
||||
options: {
|
||||
minimize: false,
|
||||
},
|
||||
);
|
||||
Globals.discriminator(globalConfig.slug, globalSchema);
|
||||
});
|
||||
})
|
||||
Globals.discriminator(globalConfig.slug, globalSchema)
|
||||
})
|
||||
|
||||
return Globals;
|
||||
return Globals
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
/* eslint-disable class-methods-use-this */
|
||||
/* eslint-disable @typescript-eslint/no-use-before-define */
|
||||
/* eslint-disable no-use-before-define */
|
||||
import { IndexOptions, Schema, SchemaOptions, SchemaTypeOptions } from 'mongoose';
|
||||
import { SanitizedConfig, SanitizedLocalizationConfig } from 'payload/dist/config/types';
|
||||
import {
|
||||
import type { IndexOptions, SchemaOptions, SchemaTypeOptions } from 'mongoose'
|
||||
import type { SanitizedConfig, SanitizedLocalizationConfig } from 'payload/config'
|
||||
import type {
|
||||
ArrayField,
|
||||
Block,
|
||||
BlockField,
|
||||
@@ -14,13 +14,8 @@ import {
|
||||
DateField,
|
||||
EmailField,
|
||||
Field,
|
||||
FieldAffectingData,
|
||||
fieldAffectsData,
|
||||
fieldIsLocalized,
|
||||
fieldIsPresentationalOnly,
|
||||
GroupField,
|
||||
JSONField,
|
||||
NonPresentationalField,
|
||||
NumberField,
|
||||
PointField,
|
||||
RadioField,
|
||||
@@ -28,401 +23,480 @@ import {
|
||||
RichTextField,
|
||||
RowField,
|
||||
SelectField,
|
||||
Tab,
|
||||
tabHasName,
|
||||
TabsField,
|
||||
TextareaField,
|
||||
TextField,
|
||||
UnnamedTab,
|
||||
TextareaField,
|
||||
UploadField,
|
||||
} from 'payload/dist/fields/config/types';
|
||||
} from 'payload/types'
|
||||
import type { FieldAffectingData, NonPresentationalField, Tab, UnnamedTab } from 'payload/types'
|
||||
|
||||
import { Schema } from 'mongoose'
|
||||
import {
|
||||
fieldAffectsData,
|
||||
fieldIsLocalized,
|
||||
fieldIsPresentationalOnly,
|
||||
tabHasName,
|
||||
} from 'payload/types'
|
||||
|
||||
export type BuildSchemaOptions = {
|
||||
options?: SchemaOptions
|
||||
allowIDField?: boolean
|
||||
disableUnique?: boolean
|
||||
draftsEnabled?: boolean
|
||||
indexSortableFields?: boolean
|
||||
options?: SchemaOptions
|
||||
}
|
||||
|
||||
type FieldSchemaGenerator = (field: Field, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions) => void;
|
||||
type FieldSchemaGenerator = (
|
||||
field: Field,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
) => void
|
||||
|
||||
const formatBaseSchema = (field: FieldAffectingData, buildSchemaOptions: BuildSchemaOptions) => {
|
||||
const { disableUnique, draftsEnabled, indexSortableFields } = buildSchemaOptions;
|
||||
const { disableUnique, draftsEnabled, indexSortableFields } = buildSchemaOptions
|
||||
const schema: SchemaTypeOptions<unknown> = {
|
||||
unique: (!disableUnique && field.unique) || false,
|
||||
required: false,
|
||||
index: field.index || (!disableUnique && field.unique) || indexSortableFields || false,
|
||||
};
|
||||
required: false,
|
||||
unique: (!disableUnique && field.unique) || false,
|
||||
}
|
||||
|
||||
if ((schema.unique && (field.localized || draftsEnabled))) {
|
||||
schema.sparse = true;
|
||||
if (schema.unique && (field.localized || draftsEnabled)) {
|
||||
schema.sparse = true
|
||||
}
|
||||
|
||||
if (field.hidden) {
|
||||
schema.hidden = true;
|
||||
schema.hidden = true
|
||||
}
|
||||
|
||||
return schema;
|
||||
};
|
||||
return schema
|
||||
}
|
||||
|
||||
const localizeSchema = (entity: NonPresentationalField | Tab, schema, localization: false | SanitizedLocalizationConfig) => {
|
||||
const localizeSchema = (
|
||||
entity: NonPresentationalField | Tab,
|
||||
schema,
|
||||
localization: SanitizedLocalizationConfig | false,
|
||||
) => {
|
||||
if (fieldIsLocalized(entity) && localization && Array.isArray(localization.locales)) {
|
||||
return {
|
||||
type: localization.localeCodes.reduce((localeSchema, locale) => ({
|
||||
...localeSchema,
|
||||
[locale]: schema,
|
||||
}), {
|
||||
_id: false,
|
||||
}),
|
||||
localized: true,
|
||||
};
|
||||
type: localization.localeCodes.reduce(
|
||||
(localeSchema, locale) => ({
|
||||
...localeSchema,
|
||||
[locale]: schema,
|
||||
}),
|
||||
{
|
||||
_id: false,
|
||||
},
|
||||
),
|
||||
}
|
||||
}
|
||||
return schema;
|
||||
};
|
||||
return schema
|
||||
}
|
||||
|
||||
const buildSchema = (config: SanitizedConfig, configFields: Field[], buildSchemaOptions: BuildSchemaOptions = {}): Schema => {
|
||||
const { allowIDField, options } = buildSchemaOptions;
|
||||
let fields = {};
|
||||
const buildSchema = (
|
||||
config: SanitizedConfig,
|
||||
configFields: Field[],
|
||||
buildSchemaOptions: BuildSchemaOptions = {},
|
||||
): Schema => {
|
||||
const { allowIDField, options } = buildSchemaOptions
|
||||
let fields = {}
|
||||
|
||||
let schemaFields = configFields;
|
||||
let schemaFields = configFields
|
||||
|
||||
if (!allowIDField) {
|
||||
const idField = schemaFields.find((field) => fieldAffectsData(field) && field.name === 'id');
|
||||
const idField = schemaFields.find((field) => fieldAffectsData(field) && field.name === 'id')
|
||||
if (idField) {
|
||||
fields = {
|
||||
_id: idField.type === 'number' ? Number : String,
|
||||
};
|
||||
schemaFields = schemaFields.filter((field) => !(fieldAffectsData(field) && field.name === 'id'));
|
||||
}
|
||||
schemaFields = schemaFields.filter(
|
||||
(field) => !(fieldAffectsData(field) && field.name === 'id'),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const schema = new Schema(fields, options);
|
||||
const schema = new Schema(fields, options)
|
||||
|
||||
schemaFields.forEach((field) => {
|
||||
if (!fieldIsPresentationalOnly(field)) {
|
||||
const addFieldSchema: FieldSchemaGenerator = fieldToSchemaMap[field.type];
|
||||
const addFieldSchema: FieldSchemaGenerator = fieldToSchemaMap[field.type]
|
||||
|
||||
if (addFieldSchema) {
|
||||
addFieldSchema(field, schema, config, buildSchemaOptions);
|
||||
addFieldSchema(field, schema, config, buildSchemaOptions)
|
||||
}
|
||||
}
|
||||
});
|
||||
})
|
||||
|
||||
return schema;
|
||||
};
|
||||
return schema
|
||||
}
|
||||
|
||||
const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
number: (field: NumberField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions): void => {
|
||||
const baseSchema = { ...formatBaseSchema(field, buildSchemaOptions), type: field.hasMany ? [Number] : Number };
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
});
|
||||
},
|
||||
text: (field: TextField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions): void => {
|
||||
const baseSchema = { ...formatBaseSchema(field, buildSchemaOptions), type: String };
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
});
|
||||
},
|
||||
email: (field: EmailField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions): void => {
|
||||
const baseSchema = { ...formatBaseSchema(field, buildSchemaOptions), type: String };
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
});
|
||||
},
|
||||
textarea: (field: TextareaField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions): void => {
|
||||
const baseSchema = { ...formatBaseSchema(field, buildSchemaOptions), type: String };
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
});
|
||||
},
|
||||
richText: (field: RichTextField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions): void => {
|
||||
const baseSchema = { ...formatBaseSchema(field, buildSchemaOptions), type: Schema.Types.Mixed };
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
});
|
||||
},
|
||||
code: (field: CodeField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions): void => {
|
||||
const baseSchema = { ...formatBaseSchema(field, buildSchemaOptions), type: String };
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
});
|
||||
},
|
||||
json: (field: JSONField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions): void => {
|
||||
const baseSchema = { ...formatBaseSchema(field, buildSchemaOptions), type: Schema.Types.Mixed };
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
});
|
||||
},
|
||||
point: (field: PointField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions): void => {
|
||||
const baseSchema: SchemaTypeOptions<unknown> = {
|
||||
type: {
|
||||
type: String,
|
||||
enum: ['Point'],
|
||||
},
|
||||
coordinates: {
|
||||
type: [Number],
|
||||
required: false,
|
||||
default: field.defaultValue || undefined,
|
||||
},
|
||||
};
|
||||
if (buildSchemaOptions.disableUnique && field.unique && field.localized) {
|
||||
baseSchema.coordinates.sparse = true;
|
||||
array: (
|
||||
field: ArrayField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
) => {
|
||||
const baseSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
default: undefined,
|
||||
type: [
|
||||
buildSchema(config, field.fields, {
|
||||
allowIDField: true,
|
||||
disableUnique: buildSchemaOptions.disableUnique,
|
||||
draftsEnabled: buildSchemaOptions.draftsEnabled,
|
||||
options: {
|
||||
_id: false,
|
||||
id: false,
|
||||
minimize: false,
|
||||
},
|
||||
}),
|
||||
],
|
||||
}
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
});
|
||||
})
|
||||
},
|
||||
blocks: (
|
||||
field: BlockField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const fieldSchema = {
|
||||
default: undefined,
|
||||
type: [new Schema({}, { _id: false, discriminatorKey: 'blockType' })],
|
||||
}
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, fieldSchema, config.localization),
|
||||
})
|
||||
|
||||
field.blocks.forEach((blockItem: Block) => {
|
||||
const blockSchema = new Schema({}, { _id: false, id: false })
|
||||
|
||||
blockItem.fields.forEach((blockField) => {
|
||||
const addFieldSchema: FieldSchemaGenerator = fieldToSchemaMap[blockField.type]
|
||||
if (addFieldSchema) {
|
||||
addFieldSchema(blockField, blockSchema, config, buildSchemaOptions)
|
||||
}
|
||||
})
|
||||
|
||||
if (field.localized && config.localization) {
|
||||
config.localization.localeCodes.forEach((localeCode) => {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore Possible incorrect typing in mongoose types, this works
|
||||
schema.path(`${field.name}.${localeCode}`).discriminator(blockItem.slug, blockSchema)
|
||||
})
|
||||
} else {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore Possible incorrect typing in mongoose types, this works
|
||||
schema.path(field.name).discriminator(blockItem.slug, blockSchema)
|
||||
}
|
||||
})
|
||||
},
|
||||
checkbox: (
|
||||
field: CheckboxField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const baseSchema = { ...formatBaseSchema(field, buildSchemaOptions), type: Boolean }
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
})
|
||||
},
|
||||
code: (
|
||||
field: CodeField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const baseSchema = { ...formatBaseSchema(field, buildSchemaOptions), type: String }
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
})
|
||||
},
|
||||
collapsible: (
|
||||
field: CollapsibleField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
field.fields.forEach((subField: Field) => {
|
||||
const addFieldSchema: FieldSchemaGenerator = fieldToSchemaMap[subField.type]
|
||||
|
||||
if (addFieldSchema) {
|
||||
addFieldSchema(subField, schema, config, buildSchemaOptions)
|
||||
}
|
||||
})
|
||||
},
|
||||
date: (
|
||||
field: DateField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const baseSchema = { ...formatBaseSchema(field, buildSchemaOptions), type: Date }
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
})
|
||||
},
|
||||
email: (
|
||||
field: EmailField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const baseSchema = { ...formatBaseSchema(field, buildSchemaOptions), type: String }
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
})
|
||||
},
|
||||
group: (
|
||||
field: GroupField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const formattedBaseSchema = formatBaseSchema(field, buildSchemaOptions)
|
||||
|
||||
// carry indexSortableFields through to versions if drafts enabled
|
||||
const indexSortableFields =
|
||||
buildSchemaOptions.indexSortableFields &&
|
||||
field.name === 'version' &&
|
||||
buildSchemaOptions.draftsEnabled
|
||||
|
||||
const baseSchema = {
|
||||
...formattedBaseSchema,
|
||||
type: buildSchema(config, field.fields, {
|
||||
disableUnique: buildSchemaOptions.disableUnique,
|
||||
draftsEnabled: buildSchemaOptions.draftsEnabled,
|
||||
indexSortableFields,
|
||||
options: {
|
||||
_id: false,
|
||||
id: false,
|
||||
minimize: false,
|
||||
},
|
||||
}),
|
||||
}
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
})
|
||||
},
|
||||
json: (
|
||||
field: JSONField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const baseSchema = { ...formatBaseSchema(field, buildSchemaOptions), type: Schema.Types.Mixed }
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
})
|
||||
},
|
||||
number: (
|
||||
field: NumberField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const baseSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
type: field.hasMany ? [Number] : Number,
|
||||
}
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
})
|
||||
},
|
||||
point: (
|
||||
field: PointField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const baseSchema: SchemaTypeOptions<unknown> = {
|
||||
coordinates: {
|
||||
default: field.defaultValue || undefined,
|
||||
required: false,
|
||||
type: [Number],
|
||||
},
|
||||
type: {
|
||||
enum: ['Point'],
|
||||
type: String,
|
||||
},
|
||||
}
|
||||
if (buildSchemaOptions.disableUnique && field.unique && field.localized) {
|
||||
baseSchema.coordinates.sparse = true
|
||||
}
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
})
|
||||
|
||||
if (field.index === true || field.index === undefined) {
|
||||
const indexOptions: IndexOptions = {};
|
||||
const indexOptions: IndexOptions = {}
|
||||
if (!buildSchemaOptions.disableUnique && field.unique) {
|
||||
indexOptions.sparse = true;
|
||||
indexOptions.unique = true;
|
||||
indexOptions.sparse = true
|
||||
indexOptions.unique = true
|
||||
}
|
||||
if (field.localized && config.localization) {
|
||||
config.localization.locales.forEach((locale) => {
|
||||
schema.index({ [`${field.name}.${locale}`]: '2dsphere' }, indexOptions);
|
||||
});
|
||||
schema.index({ [`${field.name}.${locale}`]: '2dsphere' }, indexOptions)
|
||||
})
|
||||
} else {
|
||||
schema.index({ [field.name]: '2dsphere' }, indexOptions);
|
||||
schema.index({ [field.name]: '2dsphere' }, indexOptions)
|
||||
}
|
||||
}
|
||||
},
|
||||
radio: (field: RadioField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions): void => {
|
||||
radio: (
|
||||
field: RadioField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const baseSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
type: String,
|
||||
enum: field.options.map((option) => {
|
||||
if (typeof option === 'object') return option.value;
|
||||
return option;
|
||||
if (typeof option === 'object') return option.value
|
||||
return option
|
||||
}),
|
||||
};
|
||||
type: String,
|
||||
}
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
});
|
||||
})
|
||||
},
|
||||
checkbox: (field: CheckboxField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions): void => {
|
||||
const baseSchema = { ...formatBaseSchema(field, buildSchemaOptions), type: Boolean };
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
});
|
||||
},
|
||||
date: (field: DateField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions): void => {
|
||||
const baseSchema = { ...formatBaseSchema(field, buildSchemaOptions), type: Date };
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
});
|
||||
},
|
||||
upload: (field: UploadField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions): void => {
|
||||
const baseSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
type: Schema.Types.Mixed,
|
||||
ref: field.relationTo,
|
||||
};
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
});
|
||||
},
|
||||
relationship: (field: RelationshipField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions) => {
|
||||
const hasManyRelations = Array.isArray(field.relationTo);
|
||||
let schemaToReturn: { [key: string]: any } = {};
|
||||
relationship: (
|
||||
field: RelationshipField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
) => {
|
||||
const hasManyRelations = Array.isArray(field.relationTo)
|
||||
let schemaToReturn: { [key: string]: any } = {}
|
||||
|
||||
if (field.localized && config.localization) {
|
||||
schemaToReturn = {
|
||||
localized: true,
|
||||
type: config.localization.localeCodes.reduce((locales, locale) => {
|
||||
let localeSchema: { [key: string]: any } = {};
|
||||
let localeSchema: { [key: string]: any } = {}
|
||||
|
||||
if (hasManyRelations) {
|
||||
localeSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
type: Schema.Types.Mixed,
|
||||
_id: false,
|
||||
relationTo: { enum: field.relationTo, type: String },
|
||||
type: Schema.Types.Mixed,
|
||||
value: {
|
||||
type: Schema.Types.Mixed,
|
||||
refPath: `${field.name}.${locale}.relationTo`,
|
||||
type: Schema.Types.Mixed,
|
||||
},
|
||||
relationTo: { type: String, enum: field.relationTo },
|
||||
};
|
||||
}
|
||||
} else {
|
||||
localeSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
type: Schema.Types.Mixed,
|
||||
ref: field.relationTo,
|
||||
};
|
||||
type: Schema.Types.Mixed,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...locales,
|
||||
[locale]: field.hasMany ? { type: [localeSchema], default: undefined } : localeSchema,
|
||||
};
|
||||
[locale]: field.hasMany ? { default: undefined, type: [localeSchema] } : localeSchema,
|
||||
}
|
||||
}, {}),
|
||||
localized: true,
|
||||
};
|
||||
}
|
||||
} else if (hasManyRelations) {
|
||||
schemaToReturn = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
type: Schema.Types.Mixed,
|
||||
_id: false,
|
||||
relationTo: { enum: field.relationTo, type: String },
|
||||
type: Schema.Types.Mixed,
|
||||
value: {
|
||||
type: Schema.Types.Mixed,
|
||||
refPath: `${field.name}.relationTo`,
|
||||
type: Schema.Types.Mixed,
|
||||
},
|
||||
relationTo: { type: String, enum: field.relationTo },
|
||||
};
|
||||
}
|
||||
|
||||
if (field.hasMany) {
|
||||
schemaToReturn = {
|
||||
type: [schemaToReturn],
|
||||
default: undefined,
|
||||
};
|
||||
type: [schemaToReturn],
|
||||
}
|
||||
}
|
||||
} else {
|
||||
schemaToReturn = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
type: Schema.Types.Mixed,
|
||||
ref: field.relationTo,
|
||||
};
|
||||
type: Schema.Types.Mixed,
|
||||
}
|
||||
|
||||
if (field.hasMany) {
|
||||
schemaToReturn = {
|
||||
type: [schemaToReturn],
|
||||
default: undefined,
|
||||
};
|
||||
type: [schemaToReturn],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
schema.add({
|
||||
[field.name]: schemaToReturn,
|
||||
});
|
||||
})
|
||||
},
|
||||
row: (field: RowField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions): void => {
|
||||
field.fields.forEach((subField: Field) => {
|
||||
const addFieldSchema: FieldSchemaGenerator = fieldToSchemaMap[subField.type];
|
||||
|
||||
if (addFieldSchema) {
|
||||
addFieldSchema(subField, schema, config, buildSchemaOptions);
|
||||
}
|
||||
});
|
||||
},
|
||||
collapsible: (field: CollapsibleField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions): void => {
|
||||
field.fields.forEach((subField: Field) => {
|
||||
const addFieldSchema: FieldSchemaGenerator = fieldToSchemaMap[subField.type];
|
||||
|
||||
if (addFieldSchema) {
|
||||
addFieldSchema(subField, schema, config, buildSchemaOptions);
|
||||
}
|
||||
});
|
||||
},
|
||||
tabs: (field: TabsField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions): void => {
|
||||
field.tabs.forEach((tab) => {
|
||||
if (tabHasName(tab)) {
|
||||
const baseSchema = {
|
||||
type: buildSchema(
|
||||
config,
|
||||
tab.fields,
|
||||
{
|
||||
options: {
|
||||
_id: false,
|
||||
id: false,
|
||||
minimize: false,
|
||||
},
|
||||
disableUnique: buildSchemaOptions.disableUnique,
|
||||
draftsEnabled: buildSchemaOptions.draftsEnabled,
|
||||
},
|
||||
),
|
||||
};
|
||||
|
||||
schema.add({
|
||||
[tab.name]: localizeSchema(tab, baseSchema, config.localization),
|
||||
});
|
||||
} else {
|
||||
(tab as UnnamedTab).fields.forEach((subField: Field) => {
|
||||
const addFieldSchema: FieldSchemaGenerator = fieldToSchemaMap[subField.type];
|
||||
|
||||
if (addFieldSchema) {
|
||||
addFieldSchema(subField, schema, config, buildSchemaOptions);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
},
|
||||
array: (field: ArrayField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions) => {
|
||||
const baseSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
default: undefined,
|
||||
type: [buildSchema(
|
||||
config,
|
||||
field.fields,
|
||||
{
|
||||
options: {
|
||||
_id: false,
|
||||
id: false,
|
||||
minimize: false,
|
||||
},
|
||||
allowIDField: true,
|
||||
disableUnique: buildSchemaOptions.disableUnique,
|
||||
draftsEnabled: buildSchemaOptions.draftsEnabled,
|
||||
},
|
||||
)],
|
||||
};
|
||||
richText: (
|
||||
field: RichTextField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const baseSchema = { ...formatBaseSchema(field, buildSchemaOptions), type: Schema.Types.Mixed }
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
});
|
||||
})
|
||||
},
|
||||
group: (field: GroupField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions): void => {
|
||||
const formattedBaseSchema = formatBaseSchema(field, buildSchemaOptions);
|
||||
row: (
|
||||
field: RowField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
field.fields.forEach((subField: Field) => {
|
||||
const addFieldSchema: FieldSchemaGenerator = fieldToSchemaMap[subField.type]
|
||||
|
||||
// carry indexSortableFields through to versions if drafts enabled
|
||||
const indexSortableFields = (buildSchemaOptions.indexSortableFields && field.name === 'version' && buildSchemaOptions.draftsEnabled);
|
||||
|
||||
const baseSchema = {
|
||||
...formattedBaseSchema,
|
||||
type: buildSchema(
|
||||
config,
|
||||
field.fields,
|
||||
{
|
||||
options: {
|
||||
_id: false,
|
||||
id: false,
|
||||
minimize: false,
|
||||
},
|
||||
indexSortableFields,
|
||||
disableUnique: buildSchemaOptions.disableUnique,
|
||||
draftsEnabled: buildSchemaOptions.draftsEnabled,
|
||||
},
|
||||
),
|
||||
};
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
});
|
||||
if (addFieldSchema) {
|
||||
addFieldSchema(subField, schema, config, buildSchemaOptions)
|
||||
}
|
||||
})
|
||||
},
|
||||
select: (field: SelectField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions): void => {
|
||||
select: (
|
||||
field: SelectField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const baseSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
type: String,
|
||||
enum: field.options.map((option) => {
|
||||
if (typeof option === 'object') return option.value;
|
||||
return option;
|
||||
if (typeof option === 'object') return option.value
|
||||
return option
|
||||
}),
|
||||
};
|
||||
type: String,
|
||||
}
|
||||
|
||||
if (buildSchemaOptions.draftsEnabled || !field.required) {
|
||||
baseSchema.enum.push(null);
|
||||
baseSchema.enum.push(null)
|
||||
}
|
||||
|
||||
schema.add({
|
||||
@@ -431,41 +505,82 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
field.hasMany ? [baseSchema] : baseSchema,
|
||||
config.localization,
|
||||
),
|
||||
});
|
||||
})
|
||||
},
|
||||
blocks: (field: BlockField, schema: Schema, config: SanitizedConfig, buildSchemaOptions: BuildSchemaOptions): void => {
|
||||
const fieldSchema = {
|
||||
default: undefined,
|
||||
type: [new Schema({}, { _id: false, discriminatorKey: 'blockType' })],
|
||||
};
|
||||
tabs: (
|
||||
field: TabsField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
field.tabs.forEach((tab) => {
|
||||
if (tabHasName(tab)) {
|
||||
const baseSchema = {
|
||||
type: buildSchema(config, tab.fields, {
|
||||
disableUnique: buildSchemaOptions.disableUnique,
|
||||
draftsEnabled: buildSchemaOptions.draftsEnabled,
|
||||
options: {
|
||||
_id: false,
|
||||
id: false,
|
||||
minimize: false,
|
||||
},
|
||||
}),
|
||||
}
|
||||
|
||||
schema.add({
|
||||
[tab.name]: localizeSchema(tab, baseSchema, config.localization),
|
||||
})
|
||||
} else {
|
||||
tab.fields.forEach((subField: Field) => {
|
||||
const addFieldSchema: FieldSchemaGenerator = fieldToSchemaMap[subField.type]
|
||||
|
||||
if (addFieldSchema) {
|
||||
addFieldSchema(subField, schema, config, buildSchemaOptions)
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
},
|
||||
text: (
|
||||
field: TextField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const baseSchema = { ...formatBaseSchema(field, buildSchemaOptions), type: String }
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, fieldSchema, config.localization),
|
||||
});
|
||||
|
||||
field.blocks.forEach((blockItem: Block) => {
|
||||
const blockSchema = new Schema({}, { _id: false, id: false });
|
||||
|
||||
blockItem.fields.forEach((blockField) => {
|
||||
const addFieldSchema: FieldSchemaGenerator = fieldToSchemaMap[blockField.type];
|
||||
if (addFieldSchema) {
|
||||
addFieldSchema(blockField, blockSchema, config, buildSchemaOptions);
|
||||
}
|
||||
});
|
||||
|
||||
if (field.localized && config.localization) {
|
||||
config.localization.localeCodes.forEach((localeCode) => {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore Possible incorrect typing in mongoose types, this works
|
||||
schema.path(`${field.name}.${localeCode}`).discriminator(blockItem.slug, blockSchema);
|
||||
});
|
||||
} else {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore Possible incorrect typing in mongoose types, this works
|
||||
schema.path(field.name).discriminator(blockItem.slug, blockSchema);
|
||||
}
|
||||
});
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
})
|
||||
},
|
||||
};
|
||||
textarea: (
|
||||
field: TextareaField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const baseSchema = { ...formatBaseSchema(field, buildSchemaOptions), type: String }
|
||||
|
||||
export default buildSchema;
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
})
|
||||
},
|
||||
upload: (
|
||||
field: UploadField,
|
||||
schema: Schema,
|
||||
config: SanitizedConfig,
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const baseSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
ref: field.relationTo,
|
||||
type: Schema.Types.Mixed,
|
||||
}
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, config.localization),
|
||||
})
|
||||
},
|
||||
}
|
||||
|
||||
export default buildSchema
|
||||
|
||||
@@ -1,24 +1,24 @@
|
||||
import { Where } from 'payload/types';
|
||||
import { Field } from 'payload/dist/fields/config/types';
|
||||
import { Payload } from 'payload';
|
||||
import { parseParams } from './parseParams';
|
||||
import type { Payload } from 'payload'
|
||||
import type { Field, Where } from 'payload/types'
|
||||
|
||||
import { parseParams } from './parseParams'
|
||||
|
||||
export async function buildAndOrConditions({
|
||||
where,
|
||||
collectionSlug,
|
||||
globalSlug,
|
||||
payload,
|
||||
locale,
|
||||
fields,
|
||||
globalSlug,
|
||||
locale,
|
||||
payload,
|
||||
where,
|
||||
}: {
|
||||
where: Where[],
|
||||
collectionSlug?: string,
|
||||
globalSlug?: string,
|
||||
payload: Payload,
|
||||
locale?: string,
|
||||
fields: Field[],
|
||||
collectionSlug?: string
|
||||
fields: Field[]
|
||||
globalSlug?: string
|
||||
locale?: string
|
||||
payload: Payload
|
||||
where: Where[]
|
||||
}): Promise<Record<string, unknown>[]> {
|
||||
const completedConditions = [];
|
||||
const completedConditions = []
|
||||
// Loop over all AND / OR operations and add them to the AND / OR query param
|
||||
// Operations should come through as an array
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
@@ -27,17 +27,17 @@ export async function buildAndOrConditions({
|
||||
if (typeof condition === 'object') {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const result = await parseParams({
|
||||
where: condition,
|
||||
collectionSlug,
|
||||
globalSlug,
|
||||
payload,
|
||||
locale,
|
||||
fields,
|
||||
});
|
||||
globalSlug,
|
||||
locale,
|
||||
payload,
|
||||
where: condition,
|
||||
})
|
||||
if (Object.keys(result).length > 0) {
|
||||
completedConditions.push(result);
|
||||
completedConditions.push(result)
|
||||
}
|
||||
}
|
||||
}
|
||||
return completedConditions;
|
||||
return completedConditions
|
||||
}
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { Where } from 'payload/dist/types';
|
||||
import { Field } from 'payload/dist/fields/config/types';
|
||||
import QueryError from 'payload/dist/errors/QueryError';
|
||||
import { Payload } from 'payload';
|
||||
import { parseParams } from './parseParams';
|
||||
import type { Payload } from 'payload'
|
||||
import type { Field, Where } from 'payload/types'
|
||||
|
||||
import { QueryError } from 'payload/errors'
|
||||
|
||||
import { parseParams } from './parseParams'
|
||||
|
||||
type GetBuildQueryPluginArgs = {
|
||||
collectionSlug?: string
|
||||
@@ -10,50 +11,52 @@ type GetBuildQueryPluginArgs = {
|
||||
}
|
||||
|
||||
export type BuildQueryArgs = {
|
||||
payload: Payload
|
||||
locale?: string
|
||||
where: Where
|
||||
globalSlug?: string
|
||||
locale?: string
|
||||
payload: Payload
|
||||
where: Where
|
||||
}
|
||||
|
||||
// This plugin asynchronously builds a list of Mongoose query constraints
|
||||
// which can then be used in subsequent Mongoose queries.
|
||||
const getBuildQueryPlugin = ({
|
||||
collectionSlug,
|
||||
versionsFields,
|
||||
}: GetBuildQueryPluginArgs = {}) => {
|
||||
const getBuildQueryPlugin = ({ collectionSlug, versionsFields }: GetBuildQueryPluginArgs = {}) => {
|
||||
return function buildQueryPlugin(schema) {
|
||||
const modifiedSchema = schema;
|
||||
async function buildQuery({ payload, locale, where, globalSlug }: BuildQueryArgs): Promise<Record<string, unknown>> {
|
||||
let fields = versionsFields;
|
||||
const modifiedSchema = schema
|
||||
async function buildQuery({
|
||||
globalSlug,
|
||||
locale,
|
||||
payload,
|
||||
where,
|
||||
}: BuildQueryArgs): Promise<Record<string, unknown>> {
|
||||
let fields = versionsFields
|
||||
if (!fields) {
|
||||
if (globalSlug) {
|
||||
const globalConfig = payload.globals.config.find(({ slug }) => slug === globalSlug);
|
||||
fields = globalConfig.fields;
|
||||
const globalConfig = payload.globals.config.find(({ slug }) => slug === globalSlug)
|
||||
fields = globalConfig.fields
|
||||
}
|
||||
if (collectionSlug) {
|
||||
const collectionConfig = payload.collections[collectionSlug].config;
|
||||
fields = collectionConfig.fields;
|
||||
const collectionConfig = payload.collections[collectionSlug].config
|
||||
fields = collectionConfig.fields
|
||||
}
|
||||
}
|
||||
const errors = [];
|
||||
const errors = []
|
||||
const result = await parseParams({
|
||||
collectionSlug,
|
||||
fields,
|
||||
globalSlug,
|
||||
payload,
|
||||
locale,
|
||||
payload,
|
||||
where,
|
||||
});
|
||||
})
|
||||
|
||||
if (errors.length > 0) {
|
||||
throw new QueryError(errors);
|
||||
throw new QueryError(errors)
|
||||
}
|
||||
|
||||
return result;
|
||||
return result
|
||||
}
|
||||
modifiedSchema.statics.buildQuery = buildQuery;
|
||||
};
|
||||
};
|
||||
modifiedSchema.statics.buildQuery = buildQuery
|
||||
}
|
||||
}
|
||||
|
||||
export default getBuildQueryPlugin;
|
||||
export default getBuildQueryPlugin
|
||||
|
||||
@@ -1,238 +1,247 @@
|
||||
import mongoose from 'mongoose';
|
||||
import objectID from 'bson-objectid';
|
||||
import { getLocalizedPaths } from 'payload/dist/database/getLocalizedPaths';
|
||||
import { Field, fieldAffectsData } from 'payload/dist/fields/config/types';
|
||||
import { PathToQuery } from 'payload/dist/database/queryValidation/types';
|
||||
import { validOperators } from 'payload/dist/types/constants';
|
||||
import { Payload } from 'payload';
|
||||
import { Operator } from 'payload/types';
|
||||
import { operatorMap } from './operatorMap';
|
||||
import { sanitizeQueryValue } from './sanitizeQueryValue';
|
||||
import { MongooseAdapter } from '..';
|
||||
import type { Payload } from 'payload'
|
||||
import type { PathToQuery } from 'payload/database'
|
||||
import type { Field } from 'payload/types'
|
||||
import type { Operator } from 'payload/types'
|
||||
|
||||
import objectID from 'bson-objectid'
|
||||
import mongoose from 'mongoose'
|
||||
import { getLocalizedPaths } from 'payload/database'
|
||||
import { fieldAffectsData } from 'payload/types'
|
||||
import { validOperators } from 'payload/types'
|
||||
|
||||
import type { MongooseAdapter } from '..'
|
||||
|
||||
import { operatorMap } from './operatorMap'
|
||||
import { sanitizeQueryValue } from './sanitizeQueryValue'
|
||||
|
||||
type SearchParam = {
|
||||
path?: string,
|
||||
value: unknown,
|
||||
path?: string
|
||||
value: unknown
|
||||
}
|
||||
|
||||
const subQueryOptions = {
|
||||
limit: 50,
|
||||
lean: true,
|
||||
};
|
||||
limit: 50,
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert the Payload key / value / operator into a MongoDB query
|
||||
*/
|
||||
export async function buildSearchParam({
|
||||
fields,
|
||||
incomingPath,
|
||||
val,
|
||||
operator,
|
||||
collectionSlug,
|
||||
fields,
|
||||
globalSlug,
|
||||
payload,
|
||||
incomingPath,
|
||||
locale,
|
||||
operator,
|
||||
payload,
|
||||
val,
|
||||
}: {
|
||||
fields: Field[],
|
||||
incomingPath: string,
|
||||
val: unknown,
|
||||
operator: string
|
||||
collectionSlug?: string,
|
||||
globalSlug?: string,
|
||||
payload: Payload,
|
||||
collectionSlug?: string
|
||||
fields: Field[]
|
||||
globalSlug?: string
|
||||
incomingPath: string
|
||||
locale?: string
|
||||
operator: string
|
||||
payload: Payload
|
||||
val: unknown
|
||||
}): Promise<SearchParam> {
|
||||
// Replace GraphQL nested field double underscore formatting
|
||||
let sanitizedPath = incomingPath.replace(/__/gi, '.');
|
||||
if (sanitizedPath === 'id') sanitizedPath = '_id';
|
||||
let sanitizedPath = incomingPath.replace(/__/g, '.')
|
||||
if (sanitizedPath === 'id') sanitizedPath = '_id'
|
||||
|
||||
let paths: PathToQuery[] = [];
|
||||
let paths: PathToQuery[] = []
|
||||
|
||||
let hasCustomID = false;
|
||||
let hasCustomID = false
|
||||
|
||||
if (sanitizedPath === '_id') {
|
||||
const customIDfield = payload.collections[collectionSlug]?.config.fields.find((field) => fieldAffectsData(field) && field.name === 'id');
|
||||
const customIDfield = payload.collections[collectionSlug]?.config.fields.find(
|
||||
(field) => fieldAffectsData(field) && field.name === 'id',
|
||||
)
|
||||
|
||||
let idFieldType: 'text' | 'number' = 'text';
|
||||
let idFieldType: 'number' | 'text' = 'text'
|
||||
|
||||
if (customIDfield) {
|
||||
if (customIDfield?.type === 'text' || customIDfield?.type === 'number') {
|
||||
idFieldType = customIDfield.type;
|
||||
idFieldType = customIDfield.type
|
||||
}
|
||||
|
||||
hasCustomID = true;
|
||||
hasCustomID = true
|
||||
}
|
||||
|
||||
paths.push({
|
||||
path: '_id',
|
||||
collectionSlug,
|
||||
complete: true,
|
||||
field: {
|
||||
name: 'id',
|
||||
type: idFieldType,
|
||||
} as Field,
|
||||
complete: true,
|
||||
collectionSlug,
|
||||
});
|
||||
path: '_id',
|
||||
})
|
||||
} else {
|
||||
paths = await getLocalizedPaths({
|
||||
payload,
|
||||
locale,
|
||||
collectionSlug,
|
||||
globalSlug,
|
||||
fields,
|
||||
globalSlug,
|
||||
incomingPath: sanitizedPath,
|
||||
});
|
||||
locale,
|
||||
payload,
|
||||
})
|
||||
}
|
||||
|
||||
const [{
|
||||
path,
|
||||
field,
|
||||
}] = paths;
|
||||
const [{ field, path }] = paths
|
||||
|
||||
if (path) {
|
||||
const formattedValue = sanitizeQueryValue({
|
||||
field,
|
||||
path,
|
||||
operator,
|
||||
val,
|
||||
hasCustomID,
|
||||
});
|
||||
operator,
|
||||
path,
|
||||
val,
|
||||
})
|
||||
|
||||
// If there are multiple collections to search through,
|
||||
// Recursively build up a list of query constraints
|
||||
if (paths.length > 1) {
|
||||
// Remove top collection and reverse array
|
||||
// to work backwards from top
|
||||
const pathsToQuery = paths.slice(1)
|
||||
.reverse();
|
||||
const pathsToQuery = paths.slice(1).reverse()
|
||||
|
||||
const initialRelationshipQuery = {
|
||||
value: {},
|
||||
} as SearchParam;
|
||||
} as SearchParam
|
||||
|
||||
const relationshipQuery = await pathsToQuery.reduce(async (priorQuery, {
|
||||
path: subPath,
|
||||
collectionSlug: slug,
|
||||
}, i) => {
|
||||
const priorQueryResult = await priorQuery;
|
||||
const relationshipQuery = await pathsToQuery.reduce(
|
||||
async (priorQuery, { collectionSlug: slug, path: subPath }, i) => {
|
||||
const priorQueryResult = await priorQuery
|
||||
|
||||
const SubModel = (payload.db as MongooseAdapter).collections[slug];
|
||||
const SubModel = (payload.db as MongooseAdapter).collections[slug]
|
||||
|
||||
// On the "deepest" collection,
|
||||
// Search on the value passed through the query
|
||||
if (i === 0) {
|
||||
const subQuery = await SubModel.buildQuery({
|
||||
where: {
|
||||
[subPath]: {
|
||||
[operator]: val,
|
||||
// On the "deepest" collection,
|
||||
// Search on the value passed through the query
|
||||
if (i === 0) {
|
||||
const subQuery = await SubModel.buildQuery({
|
||||
locale,
|
||||
payload,
|
||||
where: {
|
||||
[subPath]: {
|
||||
[operator]: val,
|
||||
},
|
||||
},
|
||||
},
|
||||
payload,
|
||||
locale,
|
||||
});
|
||||
})
|
||||
|
||||
const result = await SubModel.find(subQuery, subQueryOptions);
|
||||
const result = await SubModel.find(subQuery, subQueryOptions)
|
||||
|
||||
const $in: unknown[] = [];
|
||||
const $in: unknown[] = []
|
||||
|
||||
result.forEach((doc) => {
|
||||
const stringID = doc._id.toString();
|
||||
$in.push(stringID);
|
||||
result.forEach((doc) => {
|
||||
const stringID = doc._id.toString()
|
||||
$in.push(stringID)
|
||||
|
||||
if (mongoose.Types.ObjectId.isValid(stringID)) {
|
||||
$in.push(doc._id);
|
||||
if (mongoose.Types.ObjectId.isValid(stringID)) {
|
||||
$in.push(doc._id)
|
||||
}
|
||||
})
|
||||
|
||||
if (pathsToQuery.length === 1) {
|
||||
return {
|
||||
path,
|
||||
value: { $in },
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (pathsToQuery.length === 1) {
|
||||
const nextSubPath = pathsToQuery[i + 1].path
|
||||
|
||||
return {
|
||||
value: { [nextSubPath]: { $in } },
|
||||
}
|
||||
}
|
||||
|
||||
const subQuery = priorQueryResult.value
|
||||
const result = await SubModel.find(subQuery, subQueryOptions)
|
||||
|
||||
const $in = result.map((doc) => doc._id.toString())
|
||||
|
||||
// If it is the last recursion
|
||||
// then pass through the search param
|
||||
if (i + 1 === pathsToQuery.length) {
|
||||
return {
|
||||
path,
|
||||
value: { $in },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const nextSubPath = pathsToQuery[i + 1].path;
|
||||
|
||||
return {
|
||||
value: { [nextSubPath]: { $in } },
|
||||
};
|
||||
}
|
||||
value: {
|
||||
_id: { $in },
|
||||
},
|
||||
}
|
||||
},
|
||||
Promise.resolve(initialRelationshipQuery),
|
||||
)
|
||||
|
||||
const subQuery = priorQueryResult.value;
|
||||
const result = await SubModel.find(subQuery, subQueryOptions);
|
||||
|
||||
const $in = result.map((doc) => doc._id.toString());
|
||||
|
||||
// If it is the last recursion
|
||||
// then pass through the search param
|
||||
if (i + 1 === pathsToQuery.length) {
|
||||
return {
|
||||
path,
|
||||
value: { $in },
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
value: {
|
||||
_id: { $in },
|
||||
},
|
||||
};
|
||||
}, Promise.resolve(initialRelationshipQuery));
|
||||
|
||||
return relationshipQuery;
|
||||
return relationshipQuery
|
||||
}
|
||||
|
||||
if (operator && validOperators.includes(operator as Operator)) {
|
||||
const operatorKey = operatorMap[operator];
|
||||
const operatorKey = operatorMap[operator]
|
||||
|
||||
if (field.type === 'relationship' || field.type === 'upload') {
|
||||
let hasNumberIDRelation;
|
||||
let hasNumberIDRelation
|
||||
|
||||
const result = {
|
||||
value: {
|
||||
$or: [
|
||||
{ [path]: { [operatorKey]: formattedValue } },
|
||||
],
|
||||
$or: [{ [path]: { [operatorKey]: formattedValue } }],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof formattedValue === 'string') {
|
||||
if (mongoose.Types.ObjectId.isValid(formattedValue)) {
|
||||
result.value.$or.push({ [path]: { [operatorKey]: objectID(formattedValue) } });
|
||||
result.value.$or.push({ [path]: { [operatorKey]: objectID(formattedValue) } })
|
||||
} else {
|
||||
(Array.isArray(field.relationTo) ? field.relationTo : [field.relationTo]).forEach((relationTo) => {
|
||||
const isRelatedToCustomNumberID = payload.collections[relationTo]?.config?.fields.find((relatedField) => {
|
||||
return fieldAffectsData(relatedField) && relatedField.name === 'id' && relatedField.type === 'number';
|
||||
});
|
||||
;(Array.isArray(field.relationTo) ? field.relationTo : [field.relationTo]).forEach(
|
||||
(relationTo) => {
|
||||
const isRelatedToCustomNumberID = payload.collections[
|
||||
relationTo
|
||||
]?.config?.fields.find((relatedField) => {
|
||||
return (
|
||||
fieldAffectsData(relatedField) &&
|
||||
relatedField.name === 'id' &&
|
||||
relatedField.type === 'number'
|
||||
)
|
||||
})
|
||||
|
||||
if (isRelatedToCustomNumberID) {
|
||||
if (isRelatedToCustomNumberID.type === 'number') hasNumberIDRelation = true;
|
||||
}
|
||||
});
|
||||
if (isRelatedToCustomNumberID) {
|
||||
if (isRelatedToCustomNumberID.type === 'number') hasNumberIDRelation = true
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
if (hasNumberIDRelation) result.value.$or.push({ [path]: { [operatorKey]: parseFloat(formattedValue) } });
|
||||
if (hasNumberIDRelation)
|
||||
result.value.$or.push({ [path]: { [operatorKey]: parseFloat(formattedValue) } })
|
||||
}
|
||||
}
|
||||
|
||||
if (result.value.$or.length > 1) {
|
||||
return result;
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
if (operator === 'like' && typeof formattedValue === 'string') {
|
||||
const words = formattedValue.split(' ');
|
||||
const words = formattedValue.split(' ')
|
||||
|
||||
const result = {
|
||||
value: {
|
||||
$and: words.map((word) => ({
|
||||
[path]: {
|
||||
$regex: word.replace(/[\\^$*+?\\.()|[\]{}]/g, '\\$&'),
|
||||
$options: 'i',
|
||||
$regex: word.replace(/[\\^$*+?.()|[\]{}]/g, '\\$&'),
|
||||
},
|
||||
})),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return result;
|
||||
return result
|
||||
}
|
||||
|
||||
// Some operators like 'near' need to define a full query
|
||||
@@ -241,14 +250,14 @@ export async function buildSearchParam({
|
||||
return {
|
||||
path,
|
||||
value: formattedValue,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
path,
|
||||
value: { [operatorKey]: formattedValue },
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
return undefined
|
||||
}
|
||||
|
||||
@@ -1,50 +1,57 @@
|
||||
import { PaginateOptions } from 'mongoose';
|
||||
import { SanitizedConfig } from 'payload/dist/config/types';
|
||||
import { Field } from 'payload/dist/fields/config/types';
|
||||
import { getLocalizedSortProperty } from './getLocalizedSortProperty';
|
||||
import type { PaginateOptions } from 'mongoose'
|
||||
import type { SanitizedConfig } from 'payload/config'
|
||||
import type { Field } from 'payload/types'
|
||||
|
||||
import { getLocalizedSortProperty } from './getLocalizedSortProperty'
|
||||
|
||||
type Args = {
|
||||
sort: string
|
||||
config: SanitizedConfig
|
||||
fields: Field[]
|
||||
timestamps: boolean
|
||||
locale: string
|
||||
sort: string
|
||||
timestamps: boolean
|
||||
}
|
||||
|
||||
export type SortArgs = {
|
||||
property: string
|
||||
direction: SortDirection
|
||||
property: string
|
||||
}[]
|
||||
|
||||
export type SortDirection = 'asc' | 'desc';
|
||||
export type SortDirection = 'asc' | 'desc'
|
||||
|
||||
export const buildSortParam = ({ sort, config, fields, timestamps, locale }: Args): PaginateOptions['sort'] => {
|
||||
let sortProperty: string;
|
||||
let sortDirection: SortDirection = 'desc';
|
||||
export const buildSortParam = ({
|
||||
config,
|
||||
fields,
|
||||
locale,
|
||||
sort,
|
||||
timestamps,
|
||||
}: Args): PaginateOptions['sort'] => {
|
||||
let sortProperty: string
|
||||
let sortDirection: SortDirection = 'desc'
|
||||
|
||||
if (!sort) {
|
||||
if (timestamps) {
|
||||
sortProperty = 'createdAt';
|
||||
sortProperty = 'createdAt'
|
||||
} else {
|
||||
sortProperty = '_id';
|
||||
sortProperty = '_id'
|
||||
}
|
||||
} else if (sort.indexOf('-') === 0) {
|
||||
sortProperty = sort.substring(1);
|
||||
sortProperty = sort.substring(1)
|
||||
} else {
|
||||
sortProperty = sort;
|
||||
sortDirection = 'asc';
|
||||
sortProperty = sort
|
||||
sortDirection = 'asc'
|
||||
}
|
||||
|
||||
if (sortProperty === 'id') {
|
||||
sortProperty = '_id';
|
||||
sortProperty = '_id'
|
||||
} else {
|
||||
sortProperty = getLocalizedSortProperty({
|
||||
segments: sortProperty.split('.'),
|
||||
config,
|
||||
fields,
|
||||
locale,
|
||||
});
|
||||
segments: sortProperty.split('.'),
|
||||
})
|
||||
}
|
||||
|
||||
return { [sortProperty]: sortDirection };
|
||||
};
|
||||
return { [sortProperty]: sortDirection }
|
||||
}
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import { sanitizeConfig } from 'payload/dist/config/sanitize';
|
||||
import { Config } from 'payload/dist/config/types';
|
||||
import { getLocalizedSortProperty } from './getLocalizedSortProperty';
|
||||
import { sanitizeConfig } from 'payload/config'
|
||||
import { Config } from 'payload/config'
|
||||
import { getLocalizedSortProperty } from './getLocalizedSortProperty'
|
||||
|
||||
const config = {
|
||||
localization: {
|
||||
locales: ['en', 'es'],
|
||||
},
|
||||
} as Config;
|
||||
} as Config
|
||||
|
||||
describe('get localized sort property', () => {
|
||||
it('passes through a non-localized sort property', () => {
|
||||
@@ -20,10 +20,10 @@ describe('get localized sort property', () => {
|
||||
},
|
||||
],
|
||||
locale: 'en',
|
||||
});
|
||||
})
|
||||
|
||||
expect(result).toStrictEqual('title');
|
||||
});
|
||||
expect(result).toStrictEqual('title')
|
||||
})
|
||||
|
||||
it('properly localizes an un-localized sort property', () => {
|
||||
const result = getLocalizedSortProperty({
|
||||
@@ -37,10 +37,10 @@ describe('get localized sort property', () => {
|
||||
},
|
||||
],
|
||||
locale: 'en',
|
||||
});
|
||||
})
|
||||
|
||||
expect(result).toStrictEqual('title.en');
|
||||
});
|
||||
expect(result).toStrictEqual('title.en')
|
||||
})
|
||||
|
||||
it('keeps specifically asked-for localized sort properties', () => {
|
||||
const result = getLocalizedSortProperty({
|
||||
@@ -54,10 +54,10 @@ describe('get localized sort property', () => {
|
||||
},
|
||||
],
|
||||
locale: 'en',
|
||||
});
|
||||
})
|
||||
|
||||
expect(result).toStrictEqual('title.es');
|
||||
});
|
||||
expect(result).toStrictEqual('title.es')
|
||||
})
|
||||
|
||||
it('properly localizes nested sort properties', () => {
|
||||
const result = getLocalizedSortProperty({
|
||||
@@ -77,10 +77,10 @@ describe('get localized sort property', () => {
|
||||
},
|
||||
],
|
||||
locale: 'en',
|
||||
});
|
||||
})
|
||||
|
||||
expect(result).toStrictEqual('group.title.en');
|
||||
});
|
||||
expect(result).toStrictEqual('group.title.en')
|
||||
})
|
||||
|
||||
it('keeps requested locale with nested sort properties', () => {
|
||||
const result = getLocalizedSortProperty({
|
||||
@@ -100,10 +100,10 @@ describe('get localized sort property', () => {
|
||||
},
|
||||
],
|
||||
locale: 'en',
|
||||
});
|
||||
})
|
||||
|
||||
expect(result).toStrictEqual('group.title.es');
|
||||
});
|
||||
expect(result).toStrictEqual('group.title.es')
|
||||
})
|
||||
|
||||
it('properly localizes field within row', () => {
|
||||
const result = getLocalizedSortProperty({
|
||||
@@ -122,10 +122,10 @@ describe('get localized sort property', () => {
|
||||
},
|
||||
],
|
||||
locale: 'en',
|
||||
});
|
||||
})
|
||||
|
||||
expect(result).toStrictEqual('title.en');
|
||||
});
|
||||
expect(result).toStrictEqual('title.en')
|
||||
})
|
||||
|
||||
it('properly localizes field within named tab', () => {
|
||||
const result = getLocalizedSortProperty({
|
||||
@@ -149,10 +149,10 @@ describe('get localized sort property', () => {
|
||||
},
|
||||
],
|
||||
locale: 'en',
|
||||
});
|
||||
})
|
||||
|
||||
expect(result).toStrictEqual('tab.title.en');
|
||||
});
|
||||
expect(result).toStrictEqual('tab.title.en')
|
||||
})
|
||||
|
||||
it('properly localizes field within unnamed tab', () => {
|
||||
const result = getLocalizedSortProperty({
|
||||
@@ -176,8 +176,8 @@ describe('get localized sort property', () => {
|
||||
},
|
||||
],
|
||||
locale: 'en',
|
||||
});
|
||||
})
|
||||
|
||||
expect(result).toStrictEqual('title.en');
|
||||
});
|
||||
});
|
||||
expect(result).toStrictEqual('title.en')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,89 +1,103 @@
|
||||
import { SanitizedConfig } from 'payload/dist/config/types';
|
||||
import { Field, fieldAffectsData, fieldIsPresentationalOnly } from 'payload/dist/fields/config/types';
|
||||
import flattenTopLevelFields from 'payload/dist/utilities/flattenTopLevelFields';
|
||||
import type { SanitizedConfig } from 'payload/config'
|
||||
import type { Field } from 'payload/types'
|
||||
|
||||
import { fieldAffectsData, fieldIsPresentationalOnly } from 'payload/types'
|
||||
import { flattenTopLevelFields } from 'payload/utilities'
|
||||
|
||||
type Args = {
|
||||
segments: string[]
|
||||
config: SanitizedConfig
|
||||
fields: Field[]
|
||||
locale: string
|
||||
result?: string
|
||||
segments: string[]
|
||||
}
|
||||
|
||||
export const getLocalizedSortProperty = ({
|
||||
segments: incomingSegments,
|
||||
config,
|
||||
fields: incomingFields,
|
||||
locale,
|
||||
result: incomingResult,
|
||||
segments: incomingSegments,
|
||||
}: Args): string => {
|
||||
// If localization is not enabled, accept exactly
|
||||
// what is sent in
|
||||
if (!config.localization) {
|
||||
return incomingSegments.join('.');
|
||||
return incomingSegments.join('.')
|
||||
}
|
||||
|
||||
// Flatten incoming fields (row, etc)
|
||||
const fields = flattenTopLevelFields(incomingFields);
|
||||
const fields = flattenTopLevelFields(incomingFields)
|
||||
|
||||
const segments = [...incomingSegments];
|
||||
const segments = [...incomingSegments]
|
||||
|
||||
// Retrieve first segment, and remove from segments
|
||||
const firstSegment = segments.shift();
|
||||
const firstSegment = segments.shift()
|
||||
|
||||
// Attempt to find a matched field
|
||||
const matchedField = fields.find((field) => fieldAffectsData(field) && field.name === firstSegment);
|
||||
const matchedField = fields.find(
|
||||
(field) => fieldAffectsData(field) && field.name === firstSegment,
|
||||
)
|
||||
|
||||
if (matchedField && !fieldIsPresentationalOnly(matchedField)) {
|
||||
let nextFields: Field[];
|
||||
const remainingSegments = [...segments];
|
||||
let localizedSegment = matchedField.name;
|
||||
let nextFields: Field[]
|
||||
const remainingSegments = [...segments]
|
||||
let localizedSegment = matchedField.name
|
||||
|
||||
if (matchedField.localized) {
|
||||
// Check to see if next segment is a locale
|
||||
if (segments.length > 0) {
|
||||
const nextSegmentIsLocale = config.localization.localeCodes.includes(remainingSegments[0]);
|
||||
const nextSegmentIsLocale = config.localization.localeCodes.includes(remainingSegments[0])
|
||||
|
||||
// If next segment is locale, remove it from remaining segments
|
||||
// and use it to localize the current segment
|
||||
if (nextSegmentIsLocale) {
|
||||
const nextSegment = remainingSegments.shift();
|
||||
localizedSegment = `${matchedField.name}.${nextSegment}`;
|
||||
const nextSegment = remainingSegments.shift()
|
||||
localizedSegment = `${matchedField.name}.${nextSegment}`
|
||||
}
|
||||
} else {
|
||||
// If no more segments, but field is localized, use default locale
|
||||
localizedSegment = `${matchedField.name}.${locale}`;
|
||||
localizedSegment = `${matchedField.name}.${locale}`
|
||||
}
|
||||
}
|
||||
|
||||
// If there are subfields, pass them through
|
||||
if (matchedField.type === 'tab' || matchedField.type === 'group' || matchedField.type === 'array') {
|
||||
nextFields = matchedField.fields;
|
||||
if (
|
||||
matchedField.type === 'tab' ||
|
||||
matchedField.type === 'group' ||
|
||||
matchedField.type === 'array'
|
||||
) {
|
||||
nextFields = matchedField.fields
|
||||
}
|
||||
|
||||
if (matchedField.type === 'blocks') {
|
||||
nextFields = matchedField.blocks.reduce((flattenedBlockFields, block) => {
|
||||
return [
|
||||
...flattenedBlockFields,
|
||||
...block.fields.filter((blockField) => (fieldAffectsData(blockField) && (blockField.name !== 'blockType' && blockField.name !== 'blockName')) || !fieldAffectsData(blockField)),
|
||||
];
|
||||
}, []);
|
||||
...block.fields.filter(
|
||||
(blockField) =>
|
||||
(fieldAffectsData(blockField) &&
|
||||
blockField.name !== 'blockType' &&
|
||||
blockField.name !== 'blockName') ||
|
||||
!fieldAffectsData(blockField),
|
||||
),
|
||||
]
|
||||
}, [])
|
||||
}
|
||||
|
||||
const result = incomingResult ? `${incomingResult}.${localizedSegment}` : localizedSegment;
|
||||
const result = incomingResult ? `${incomingResult}.${localizedSegment}` : localizedSegment
|
||||
|
||||
if (nextFields) {
|
||||
return getLocalizedSortProperty({
|
||||
segments: remainingSegments,
|
||||
config,
|
||||
fields: nextFields,
|
||||
locale,
|
||||
result,
|
||||
});
|
||||
segments: remainingSegments,
|
||||
})
|
||||
}
|
||||
|
||||
return result;
|
||||
return result
|
||||
}
|
||||
|
||||
return incomingSegments.join('.');
|
||||
};
|
||||
return incomingSegments.join('.')
|
||||
}
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
export const operatorMap = {
|
||||
greater_than_equal: '$gte',
|
||||
less_than_equal: '$lte',
|
||||
less_than: '$lt',
|
||||
greater_than: '$gt',
|
||||
in: '$in',
|
||||
all: '$all',
|
||||
not_in: '$nin',
|
||||
not_equals: '$ne',
|
||||
exists: '$exists',
|
||||
equals: '$eq',
|
||||
near: '$near',
|
||||
within: '$geoWithin',
|
||||
exists: '$exists',
|
||||
greater_than: '$gt',
|
||||
greater_than_equal: '$gte',
|
||||
in: '$in',
|
||||
intersects: '$geoIntersects',
|
||||
};
|
||||
less_than: '$lt',
|
||||
less_than_equal: '$lte',
|
||||
near: '$near',
|
||||
not_equals: '$ne',
|
||||
not_in: '$nin',
|
||||
within: '$geoWithin',
|
||||
}
|
||||
|
||||
@@ -1,78 +1,80 @@
|
||||
/* eslint-disable no-restricted-syntax */
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { FilterQuery } from 'mongoose';
|
||||
import deepmerge from 'deepmerge';
|
||||
import { Operator, Where } from 'payload/types';
|
||||
import { combineMerge } from 'payload/dist/utilities/combineMerge';
|
||||
import { Field } from 'payload/dist/fields/config/types';
|
||||
import { validOperators } from 'payload/dist/types/constants';
|
||||
import { Payload } from 'payload';
|
||||
import { buildSearchParam } from './buildSearchParams';
|
||||
import { buildAndOrConditions } from './buildAndOrConditions';
|
||||
import type { FilterQuery } from 'mongoose'
|
||||
import type { Payload } from 'payload'
|
||||
import type { Operator, Where } from 'payload/types'
|
||||
import type { Field } from 'payload/types'
|
||||
|
||||
import deepmerge from 'deepmerge'
|
||||
import { validOperators } from 'payload/types'
|
||||
import { combineMerge } from 'payload/utilities'
|
||||
|
||||
import { buildAndOrConditions } from './buildAndOrConditions'
|
||||
import { buildSearchParam } from './buildSearchParams'
|
||||
|
||||
export async function parseParams({
|
||||
where,
|
||||
collectionSlug,
|
||||
globalSlug,
|
||||
payload,
|
||||
locale,
|
||||
fields,
|
||||
globalSlug,
|
||||
locale,
|
||||
payload,
|
||||
where,
|
||||
}: {
|
||||
where: Where,
|
||||
collectionSlug?: string,
|
||||
globalSlug?: string,
|
||||
payload: Payload,
|
||||
locale: string,
|
||||
fields: Field[],
|
||||
collectionSlug?: string
|
||||
fields: Field[]
|
||||
globalSlug?: string
|
||||
locale: string
|
||||
payload: Payload
|
||||
where: Where
|
||||
}): Promise<Record<string, unknown>> {
|
||||
let result = {} as FilterQuery<any>;
|
||||
let result = {} as FilterQuery<any>
|
||||
|
||||
if (typeof where === 'object') {
|
||||
// We need to determine if the whereKey is an AND, OR, or a schema path
|
||||
for (const relationOrPath of Object.keys(where)) {
|
||||
const condition = where[relationOrPath];
|
||||
let conditionOperator: '$and' | '$or';
|
||||
const condition = where[relationOrPath]
|
||||
let conditionOperator: '$and' | '$or'
|
||||
if (relationOrPath.toLowerCase() === 'and') {
|
||||
conditionOperator = '$and';
|
||||
conditionOperator = '$and'
|
||||
} else if (relationOrPath.toLowerCase() === 'or') {
|
||||
conditionOperator = '$or';
|
||||
conditionOperator = '$or'
|
||||
}
|
||||
if (Array.isArray(condition)) {
|
||||
const builtConditions = await buildAndOrConditions({
|
||||
collectionSlug,
|
||||
fields,
|
||||
globalSlug,
|
||||
payload,
|
||||
locale,
|
||||
payload,
|
||||
where: condition,
|
||||
});
|
||||
if (builtConditions.length > 0) result[conditionOperator] = builtConditions;
|
||||
})
|
||||
if (builtConditions.length > 0) result[conditionOperator] = builtConditions
|
||||
} else {
|
||||
// It's a path - and there can be multiple comparisons on a single path.
|
||||
// For example - title like 'test' and title not equal to 'tester'
|
||||
// So we need to loop on keys again here to handle each operator independently
|
||||
const pathOperators = where[relationOrPath];
|
||||
const pathOperators = where[relationOrPath]
|
||||
if (typeof pathOperators === 'object') {
|
||||
for (const operator of Object.keys(pathOperators)) {
|
||||
if (validOperators.includes(operator as Operator)) {
|
||||
const searchParam = await buildSearchParam({
|
||||
collectionSlug,
|
||||
globalSlug,
|
||||
payload,
|
||||
locale,
|
||||
fields,
|
||||
globalSlug,
|
||||
incomingPath: relationOrPath,
|
||||
val: pathOperators[operator],
|
||||
locale,
|
||||
operator,
|
||||
});
|
||||
payload,
|
||||
val: pathOperators[operator],
|
||||
})
|
||||
|
||||
if (searchParam?.value && searchParam?.path) {
|
||||
result = {
|
||||
...result,
|
||||
[searchParam.path]: searchParam.value,
|
||||
};
|
||||
}
|
||||
} else if (typeof searchParam?.value === 'object') {
|
||||
result = deepmerge(result, searchParam.value, { arrayMerge: combineMerge });
|
||||
result = deepmerge(result, searchParam.value, { arrayMerge: combineMerge })
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -81,5 +83,5 @@ export async function parseParams({
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1,127 +1,132 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { Field, TabAsField } from 'payload/dist/fields/config/types';
|
||||
import { createArrayFromCommaDelineated } from 'payload/dist/utilities/createArrayFromCommaDelineated';
|
||||
import type { Field, TabAsField } from 'payload/types'
|
||||
|
||||
import mongoose from 'mongoose'
|
||||
|
||||
import { createArrayFromCommaDelineated } from '../utilities/createArrayFromCommaDelineated'
|
||||
|
||||
type SanitizeQueryValueArgs = {
|
||||
field: Field | TabAsField
|
||||
path: string
|
||||
operator: string,
|
||||
val: any
|
||||
hasCustomID: boolean
|
||||
operator: string
|
||||
path: string
|
||||
val: any
|
||||
}
|
||||
|
||||
export const sanitizeQueryValue = ({ field, path, operator, val, hasCustomID }: SanitizeQueryValueArgs): unknown => {
|
||||
let formattedValue = val;
|
||||
export const sanitizeQueryValue = ({
|
||||
field,
|
||||
hasCustomID,
|
||||
operator,
|
||||
path,
|
||||
val,
|
||||
}: SanitizeQueryValueArgs): unknown => {
|
||||
let formattedValue = val
|
||||
|
||||
// Disregard invalid _ids
|
||||
if (path === '_id' && typeof val === 'string' && val.split(',').length === 1) {
|
||||
if (!hasCustomID) {
|
||||
const isValid = mongoose.Types.ObjectId.isValid(val);
|
||||
const isValid = mongoose.Types.ObjectId.isValid(val)
|
||||
|
||||
if (!isValid) {
|
||||
return undefined;
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
if (field.type === 'number') {
|
||||
const parsedNumber = parseFloat(val);
|
||||
const parsedNumber = parseFloat(val)
|
||||
|
||||
if (Number.isNaN(parsedNumber)) {
|
||||
return undefined;
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cast incoming values as proper searchable types
|
||||
if (field.type === 'checkbox' && typeof val === 'string') {
|
||||
if (val.toLowerCase() === 'true') formattedValue = true;
|
||||
if (val.toLowerCase() === 'false') formattedValue = false;
|
||||
if (val.toLowerCase() === 'true') formattedValue = true
|
||||
if (val.toLowerCase() === 'false') formattedValue = false
|
||||
}
|
||||
|
||||
if (['all', 'not_in', 'in'].includes(operator) && typeof formattedValue === 'string') {
|
||||
formattedValue = createArrayFromCommaDelineated(formattedValue);
|
||||
if (['all', 'in', 'not_in'].includes(operator) && typeof formattedValue === 'string') {
|
||||
formattedValue = createArrayFromCommaDelineated(formattedValue)
|
||||
|
||||
if (field.type === 'number') {
|
||||
formattedValue = formattedValue.map((arrayVal) => parseFloat(arrayVal));
|
||||
formattedValue = formattedValue.map((arrayVal) => parseFloat(arrayVal))
|
||||
}
|
||||
}
|
||||
|
||||
if (field.type === 'number' && typeof formattedValue === 'string') {
|
||||
formattedValue = Number(val);
|
||||
formattedValue = Number(val)
|
||||
}
|
||||
|
||||
if (field.type === 'date' && typeof val === 'string') {
|
||||
formattedValue = new Date(val);
|
||||
formattedValue = new Date(val)
|
||||
if (Number.isNaN(Date.parse(formattedValue))) {
|
||||
return undefined;
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (['relationship', 'upload'].includes(field.type)) {
|
||||
if (val === 'null') {
|
||||
formattedValue = null;
|
||||
formattedValue = null
|
||||
}
|
||||
|
||||
if (operator === 'in' && Array.isArray(formattedValue)) {
|
||||
formattedValue = formattedValue.reduce((formattedValues, inVal) => {
|
||||
const newValues = [inVal];
|
||||
if (mongoose.Types.ObjectId.isValid(inVal)) newValues.push(new mongoose.Types.ObjectId(inVal));
|
||||
const newValues = [inVal]
|
||||
if (mongoose.Types.ObjectId.isValid(inVal))
|
||||
newValues.push(new mongoose.Types.ObjectId(inVal))
|
||||
|
||||
const parsedNumber = parseFloat(inVal);
|
||||
if (!Number.isNaN(parsedNumber)) newValues.push(parsedNumber);
|
||||
const parsedNumber = parseFloat(inVal)
|
||||
if (!Number.isNaN(parsedNumber)) newValues.push(parsedNumber)
|
||||
|
||||
return [
|
||||
...formattedValues,
|
||||
...newValues,
|
||||
];
|
||||
}, []);
|
||||
return [...formattedValues, ...newValues]
|
||||
}, [])
|
||||
}
|
||||
}
|
||||
|
||||
// Set up specific formatting necessary by operators
|
||||
|
||||
if (operator === 'near') {
|
||||
let lng;
|
||||
let lat;
|
||||
let maxDistance;
|
||||
let minDistance;
|
||||
let lng
|
||||
let lat
|
||||
let maxDistance
|
||||
let minDistance
|
||||
|
||||
if (Array.isArray(formattedValue)) {
|
||||
[lng, lat, maxDistance, minDistance] = formattedValue;
|
||||
;[lng, lat, maxDistance, minDistance] = formattedValue
|
||||
}
|
||||
|
||||
if (typeof formattedValue === 'string') {
|
||||
[lng, lat, maxDistance, minDistance] = createArrayFromCommaDelineated(formattedValue);
|
||||
;[lng, lat, maxDistance, minDistance] = createArrayFromCommaDelineated(formattedValue)
|
||||
}
|
||||
|
||||
if (lng == null || lat == null || (maxDistance == null && minDistance == null)) {
|
||||
formattedValue = undefined;
|
||||
formattedValue = undefined
|
||||
} else {
|
||||
formattedValue = {
|
||||
$geometry: { type: 'Point', coordinates: [parseFloat(lng), parseFloat(lat)] },
|
||||
};
|
||||
$geometry: { coordinates: [parseFloat(lng), parseFloat(lat)], type: 'Point' },
|
||||
}
|
||||
|
||||
if (maxDistance) formattedValue.$maxDistance = parseFloat(maxDistance);
|
||||
if (minDistance) formattedValue.$minDistance = parseFloat(minDistance);
|
||||
if (maxDistance) formattedValue.$maxDistance = parseFloat(maxDistance)
|
||||
if (minDistance) formattedValue.$minDistance = parseFloat(minDistance)
|
||||
}
|
||||
}
|
||||
|
||||
if (operator === 'within' || operator === 'intersects') {
|
||||
formattedValue = {
|
||||
$geometry: formattedValue,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (path !== '_id' || (path === '_id' && hasCustomID && field.type === 'text')) {
|
||||
if (operator === 'contains') {
|
||||
formattedValue = { $regex: formattedValue, $options: 'i' };
|
||||
formattedValue = { $options: 'i', $regex: formattedValue }
|
||||
}
|
||||
}
|
||||
|
||||
if (operator === 'exists') {
|
||||
formattedValue = (formattedValue === 'true' || formattedValue === true);
|
||||
formattedValue = formattedValue === 'true' || formattedValue === true
|
||||
}
|
||||
|
||||
return formattedValue;
|
||||
};
|
||||
return formattedValue
|
||||
}
|
||||
|
||||
@@ -1,46 +1,39 @@
|
||||
import type { PaginateOptions } from 'mongoose';
|
||||
import type { QueryDrafts } from 'payload/dist/database/types';
|
||||
import flattenWhereToOperators from 'payload/dist/database/flattenWhereToOperators';
|
||||
import { PayloadRequest } from 'payload/dist/express/types';
|
||||
import { combineQueries } from 'payload/dist/database/combineQueries';
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields';
|
||||
import type { MongooseAdapter } from '.';
|
||||
import { buildSortParam } from './queries/buildSortParam';
|
||||
import { withSession } from './withSession';
|
||||
import type { PaginateOptions } from 'mongoose'
|
||||
import type { QueryDrafts } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
|
||||
export const queryDrafts: QueryDrafts = async function queryDrafts(
|
||||
import { flattenWhereToOperators, combineQueries } from 'payload/database'
|
||||
|
||||
import type { MongooseAdapter } from '.'
|
||||
|
||||
import { buildSortParam } from './queries/buildSortParam'
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields'
|
||||
import { withSession } from './withSession'
|
||||
|
||||
export const queryDrafts: QueryDrafts = async function queryDrafts<T>(
|
||||
this: MongooseAdapter,
|
||||
{
|
||||
collection,
|
||||
where,
|
||||
page,
|
||||
limit,
|
||||
sort: sortArg,
|
||||
locale,
|
||||
pagination,
|
||||
req = {} as PayloadRequest,
|
||||
},
|
||||
{ collection, limit, locale, page, pagination, req = {} as PayloadRequest, sort: sortArg, where },
|
||||
) {
|
||||
const VersionModel = this.versions[collection];
|
||||
const collectionConfig = this.payload.collections[collection].config;
|
||||
const options = withSession(this, req.transactionID);
|
||||
const VersionModel = this.versions[collection]
|
||||
const collectionConfig = this.payload.collections[collection].config
|
||||
const options = withSession(this, req.transactionID)
|
||||
|
||||
let hasNearConstraint;
|
||||
let sort;
|
||||
|
||||
if (where) {
|
||||
const constraints = flattenWhereToOperators(where);
|
||||
hasNearConstraint = constraints.some((prop) => Object.keys(prop).some((key) => key === 'near'));
|
||||
const constraints = flattenWhereToOperators(where)
|
||||
hasNearConstraint = constraints.some((prop) => Object.keys(prop).some((key) => key === 'near'))
|
||||
}
|
||||
|
||||
if (!hasNearConstraint) {
|
||||
sort = buildSortParam({
|
||||
sort: sortArg || collectionConfig.defaultSort,
|
||||
fields: collectionConfig.fields,
|
||||
timestamps: true,
|
||||
config: this.payload.config,
|
||||
fields: collectionConfig.fields,
|
||||
locale,
|
||||
});
|
||||
sort: sortArg || collectionConfig.defaultSort,
|
||||
timestamps: true,
|
||||
})
|
||||
}
|
||||
|
||||
const combinedWhere = combineQueries({ latest: { equals: true } }, where);
|
||||
@@ -79,11 +72,11 @@ export const queryDrafts: QueryDrafts = async function queryDrafts(
|
||||
_id: doc.parent,
|
||||
id: doc.parent,
|
||||
...doc.version,
|
||||
updatedAt: doc.updatedAt,
|
||||
createdAt: doc.createdAt,
|
||||
};
|
||||
updatedAt: doc.updatedAt,
|
||||
}
|
||||
|
||||
return sanitizeInternalFields(doc);
|
||||
return sanitizeInternalFields(doc)
|
||||
}),
|
||||
};
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
export const email = 'test@test.com';
|
||||
export const password = 'test123';
|
||||
export const email = 'test@test.com'
|
||||
export const password = 'test123'
|
||||
export const connection = {
|
||||
url: 'mongodb://127.0.0.1',
|
||||
port: 27018,
|
||||
name: 'payloadmemory',
|
||||
};
|
||||
port: 27018,
|
||||
url: 'mongodb://127.0.0.1',
|
||||
}
|
||||
|
||||
@@ -1,32 +1,36 @@
|
||||
import type { TransactionOptions } from 'mongodb';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { BeginTransaction } from 'payload/dist/database/types';
|
||||
import { APIError } from 'payload/errors';
|
||||
// @ts-expect-error // TODO: Fix this import
|
||||
import type { TransactionOptions } from 'mongodb'
|
||||
import type { BeginTransaction } from 'payload/database'
|
||||
|
||||
let transactionsNotAvailable: boolean;
|
||||
import { APIError } from 'payload/errors'
|
||||
import { v4 as uuid } from 'uuid'
|
||||
|
||||
let transactionsNotAvailable: boolean
|
||||
export const beginTransaction: BeginTransaction = async function beginTransaction(
|
||||
options: TransactionOptions = {},
|
||||
) {
|
||||
let id = null;
|
||||
let id = null
|
||||
if (!this.connection) {
|
||||
throw new APIError('beginTransaction called while no connection to the database exists');
|
||||
throw new APIError('beginTransaction called while no connection to the database exists')
|
||||
}
|
||||
|
||||
if (transactionsNotAvailable) return id;
|
||||
if (transactionsNotAvailable) return id
|
||||
|
||||
if (!this.connection.get('replicaSet')) {
|
||||
transactionsNotAvailable = true;
|
||||
this.payload.logger.warn('Database transactions for MongoDB are only available when connecting to a replica set. Operations will continue without using transactions.');
|
||||
transactionsNotAvailable = true
|
||||
this.payload.logger.warn(
|
||||
'Database transactions for MongoDB are only available when connecting to a replica set. Operations will continue without using transactions.',
|
||||
)
|
||||
} else {
|
||||
id = uuid();
|
||||
id = uuid()
|
||||
if (!this.sessions[id]) {
|
||||
this.sessions[id] = await this.connection.getClient().startSession();
|
||||
this.sessions[id] = await this.connection.getClient().startSession()
|
||||
}
|
||||
if (this.sessions[id].inTransaction()) {
|
||||
this.payload.logger.warn('beginTransaction called while transaction already exists');
|
||||
this.payload.logger.warn('beginTransaction called while transaction already exists')
|
||||
} else {
|
||||
await this.sessions[id].startTransaction(options);
|
||||
await this.sessions[id].startTransaction(options)
|
||||
}
|
||||
}
|
||||
return id;
|
||||
};
|
||||
return id
|
||||
}
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
import { CommitTransaction } from 'payload/dist/database/types';
|
||||
|
||||
import type { CommitTransaction } from 'payload/database'
|
||||
|
||||
export const commitTransaction: CommitTransaction = async function commitTransaction(id) {
|
||||
if (!this.connection.get('replicaSet')) {
|
||||
return;
|
||||
return
|
||||
}
|
||||
if (!this.session[id]?.inTransaction()) {
|
||||
this.payload.logger.warn('commitTransaction called when no transaction exists');
|
||||
return;
|
||||
this.payload.logger.warn('commitTransaction called when no transaction exists')
|
||||
return
|
||||
}
|
||||
await this.session[id].commitTransaction();
|
||||
await this.session[id].endSession();
|
||||
delete this.session[id];
|
||||
};
|
||||
await this.session[id].commitTransaction()
|
||||
await this.session[id].endSession()
|
||||
delete this.session[id]
|
||||
}
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import { RollbackTransaction } from 'payload/dist/database/types';
|
||||
import type { RollbackTransaction } from 'payload/database'
|
||||
|
||||
|
||||
export const rollbackTransaction: RollbackTransaction = async function rollbackTransaction(id = '') {
|
||||
export const rollbackTransaction: RollbackTransaction = async function rollbackTransaction(
|
||||
id = '',
|
||||
) {
|
||||
if (!this.session[id]?.inTransaction()) {
|
||||
this.payload.logger.warn('rollbackTransaction called when no transaction exists');
|
||||
return;
|
||||
this.payload.logger.warn('rollbackTransaction called when no transaction exists')
|
||||
return
|
||||
}
|
||||
await this.session[id].abortTransaction();
|
||||
await this.session[id].endSession();
|
||||
delete this.session[id];
|
||||
};
|
||||
await this.session[id].abortTransaction()
|
||||
await this.session[id].endSession()
|
||||
delete this.session[id]
|
||||
}
|
||||
|
||||
@@ -1,23 +1,56 @@
|
||||
import type { AggregatePaginateModel, IndexDefinition, IndexOptions, Model, PaginateModel, SchemaOptions } from 'mongoose';
|
||||
import { SanitizedConfig } from 'payload/dist/config/types';
|
||||
import { ArrayField, BlockField, CheckboxField, CodeField, CollapsibleField, DateField, EmailField, Field, GroupField, JSONField, NumberField, PointField, RadioField, RelationshipField, RichTextField, RowField, SelectField, TabsField, TextField, TextareaField, UploadField } from 'payload/dist/fields/config/types';
|
||||
import type { BuildQueryArgs } from './queries/buildQuery';
|
||||
import type {
|
||||
AggregatePaginateModel,
|
||||
IndexDefinition,
|
||||
IndexOptions,
|
||||
Model,
|
||||
PaginateModel,
|
||||
SchemaOptions,
|
||||
} from 'mongoose'
|
||||
import type { SanitizedConfig } from 'payload/config'
|
||||
import type {
|
||||
ArrayField,
|
||||
BlockField,
|
||||
CheckboxField,
|
||||
CodeField,
|
||||
CollapsibleField,
|
||||
DateField,
|
||||
EmailField,
|
||||
Field,
|
||||
GroupField,
|
||||
JSONField,
|
||||
NumberField,
|
||||
PointField,
|
||||
RadioField,
|
||||
RelationshipField,
|
||||
RichTextField,
|
||||
RowField,
|
||||
SelectField,
|
||||
TabsField,
|
||||
TextField,
|
||||
TextareaField,
|
||||
UploadField,
|
||||
} from 'payload/types'
|
||||
|
||||
export interface CollectionModel extends Model<any>, PaginateModel<any>, AggregatePaginateModel<any>, PassportLocalModel {
|
||||
import type { BuildQueryArgs } from './queries/buildQuery'
|
||||
|
||||
export interface CollectionModel
|
||||
extends Model<any>,
|
||||
PaginateModel<any>,
|
||||
AggregatePaginateModel<any>,
|
||||
PassportLocalModel {
|
||||
/** buildQuery is used to transform payload's where operator into what can be used by mongoose (e.g. id => _id) */
|
||||
buildQuery: (args: BuildQueryArgs) => Promise<Record<string, unknown>> // TODO: Delete this
|
||||
}
|
||||
type Register<T = any> = (doc: T, password: string) => T;
|
||||
type Register<T = any> = (doc: T, password: string) => T
|
||||
|
||||
interface PassportLocalModel {
|
||||
register: Register
|
||||
authenticate: any
|
||||
register: Register
|
||||
}
|
||||
|
||||
|
||||
export interface AuthCollectionModel extends CollectionModel {
|
||||
resetPasswordToken: string;
|
||||
resetPasswordExpiration: Date;
|
||||
resetPasswordExpiration: Date
|
||||
resetPasswordToken: string
|
||||
}
|
||||
|
||||
export type TypeOfIndex = {
|
||||
@@ -25,80 +58,82 @@ export type TypeOfIndex = {
|
||||
options?: IndexOptions
|
||||
}
|
||||
|
||||
|
||||
export interface GlobalModel extends Model<Document> {
|
||||
buildQuery: (query: unknown, locale?: string) => Promise<Record<string, unknown>>
|
||||
}
|
||||
|
||||
export type BuildSchema<TSchema> = (args: {
|
||||
config: SanitizedConfig,
|
||||
fields: Field[],
|
||||
options: BuildSchemaOptions,
|
||||
config: SanitizedConfig
|
||||
fields: Field[]
|
||||
options: BuildSchemaOptions
|
||||
}) => TSchema
|
||||
|
||||
export type BuildSchemaOptions = {
|
||||
options?: SchemaOptions
|
||||
allowIDField?: boolean
|
||||
disableUnique?: boolean
|
||||
draftsEnabled?: boolean
|
||||
indexSortableFields?: boolean
|
||||
options?: SchemaOptions
|
||||
}
|
||||
|
||||
export type FieldGenerator<TSchema, TField> = {
|
||||
field: TField,
|
||||
schema: TSchema,
|
||||
config: SanitizedConfig,
|
||||
options: BuildSchemaOptions,
|
||||
config: SanitizedConfig
|
||||
field: TField
|
||||
options: BuildSchemaOptions
|
||||
schema: TSchema
|
||||
}
|
||||
|
||||
/**
|
||||
* Field config types that need representation in the database
|
||||
*/
|
||||
type FieldType = 'number'
|
||||
| 'text'
|
||||
| 'email'
|
||||
| 'textarea'
|
||||
| 'richText'
|
||||
type FieldType =
|
||||
| 'array'
|
||||
| 'blocks'
|
||||
| 'checkbox'
|
||||
| 'code'
|
||||
| 'collapsible'
|
||||
| 'date'
|
||||
| 'email'
|
||||
| 'group'
|
||||
| 'json'
|
||||
| 'number'
|
||||
| 'point'
|
||||
| 'radio'
|
||||
| 'checkbox'
|
||||
| 'date'
|
||||
| 'upload'
|
||||
| 'relationship'
|
||||
| 'richText'
|
||||
| 'row'
|
||||
| 'collapsible'
|
||||
| 'tabs'
|
||||
| 'array'
|
||||
| 'group'
|
||||
| 'select'
|
||||
| 'blocks'
|
||||
| 'tabs'
|
||||
| 'text'
|
||||
| 'textarea'
|
||||
| 'upload'
|
||||
|
||||
export type FieldGeneratorFunction<TSchema, TField extends Field> = (args: FieldGenerator<TSchema, TField>) => void
|
||||
export type FieldGeneratorFunction<TSchema, TField extends Field> = (
|
||||
args: FieldGenerator<TSchema, TField>,
|
||||
) => void
|
||||
|
||||
/**
|
||||
* Object mapping types to a schema based on TSchema
|
||||
*/
|
||||
export type FieldToSchemaMap<TSchema> = {
|
||||
number: FieldGeneratorFunction<TSchema, NumberField>
|
||||
text: FieldGeneratorFunction<TSchema, TextField>
|
||||
email: FieldGeneratorFunction<TSchema, EmailField>
|
||||
textarea: FieldGeneratorFunction<TSchema, TextareaField>
|
||||
richText: FieldGeneratorFunction<TSchema, RichTextField>
|
||||
array: FieldGeneratorFunction<TSchema, ArrayField>
|
||||
blocks: FieldGeneratorFunction<TSchema, BlockField>
|
||||
checkbox: FieldGeneratorFunction<TSchema, CheckboxField>
|
||||
code: FieldGeneratorFunction<TSchema, CodeField>
|
||||
collapsible: FieldGeneratorFunction<TSchema, CollapsibleField>
|
||||
date: FieldGeneratorFunction<TSchema, DateField>
|
||||
email: FieldGeneratorFunction<TSchema, EmailField>
|
||||
group: FieldGeneratorFunction<TSchema, GroupField>
|
||||
json: FieldGeneratorFunction<TSchema, JSONField>
|
||||
number: FieldGeneratorFunction<TSchema, NumberField>
|
||||
point: FieldGeneratorFunction<TSchema, PointField>
|
||||
radio: FieldGeneratorFunction<TSchema, RadioField>
|
||||
checkbox: FieldGeneratorFunction<TSchema, CheckboxField>
|
||||
date: FieldGeneratorFunction<TSchema, DateField>
|
||||
upload: FieldGeneratorFunction<TSchema, UploadField>
|
||||
relationship: FieldGeneratorFunction<TSchema, RelationshipField>
|
||||
richText: FieldGeneratorFunction<TSchema, RichTextField>
|
||||
row: FieldGeneratorFunction<TSchema, RowField>
|
||||
collapsible: FieldGeneratorFunction<TSchema, CollapsibleField>
|
||||
tabs: FieldGeneratorFunction<TSchema, TabsField>
|
||||
array: FieldGeneratorFunction<TSchema, ArrayField>
|
||||
group: FieldGeneratorFunction<TSchema, GroupField>
|
||||
select: FieldGeneratorFunction<TSchema, SelectField>
|
||||
blocks: FieldGeneratorFunction<TSchema, BlockField>
|
||||
tabs: FieldGeneratorFunction<TSchema, TabsField>
|
||||
text: FieldGeneratorFunction<TSchema, TextField>
|
||||
textarea: FieldGeneratorFunction<TSchema, TextareaField>
|
||||
upload: FieldGeneratorFunction<TSchema, UploadField>
|
||||
}
|
||||
|
||||
@@ -1,32 +1,30 @@
|
||||
import type { UpdateGlobal } from 'payload/dist/database/types';
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields';
|
||||
import type { PayloadRequest } from 'payload/dist/express/types';
|
||||
import type { MongooseAdapter } from '.';
|
||||
import { withSession } from './withSession';
|
||||
import type { UpdateGlobal } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
|
||||
import type { MongooseAdapter } from '.'
|
||||
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields'
|
||||
import { withSession } from './withSession'
|
||||
|
||||
export const updateGlobal: UpdateGlobal = async function updateGlobal(
|
||||
this: MongooseAdapter,
|
||||
{ slug, data, req = {} as PayloadRequest },
|
||||
{ data, req = {} as PayloadRequest, slug },
|
||||
) {
|
||||
const Model = this.globals;
|
||||
const Model = this.globals
|
||||
const options = {
|
||||
...withSession(this, req.transactionID),
|
||||
new: true,
|
||||
lean: true,
|
||||
};
|
||||
new: true,
|
||||
}
|
||||
|
||||
let result;
|
||||
result = await Model.findOneAndUpdate(
|
||||
{ globalType: slug },
|
||||
data,
|
||||
options,
|
||||
);
|
||||
let result
|
||||
result = await Model.findOneAndUpdate({ globalType: slug }, data, options)
|
||||
|
||||
result = JSON.parse(JSON.stringify(result));
|
||||
result = JSON.parse(JSON.stringify(result))
|
||||
|
||||
// custom id type reset
|
||||
result.id = result._id;
|
||||
result = sanitizeInternalFields(result);
|
||||
result.id = result._id
|
||||
result = sanitizeInternalFields(result)
|
||||
|
||||
return result;
|
||||
};
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1,50 +1,53 @@
|
||||
import { ValidationError } from 'payload/errors';
|
||||
import type { PayloadRequest } from 'payload/types';
|
||||
import type { UpdateOne } from 'payload/dist/database/types';
|
||||
import i18nInit from 'payload/dist/translations/init';
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields';
|
||||
import type { MongooseAdapter } from '.';
|
||||
import { withSession } from './withSession';
|
||||
import type { UpdateOne } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
|
||||
import { ValidationError } from 'payload/errors'
|
||||
import { i18nInit } from 'payload/utilities'
|
||||
|
||||
import type { MongooseAdapter } from '.'
|
||||
|
||||
import sanitizeInternalFields from './utilities/sanitizeInternalFields'
|
||||
import { withSession } from './withSession'
|
||||
|
||||
export const updateOne: UpdateOne = async function updateOne(
|
||||
this: MongooseAdapter,
|
||||
{ collection, data, where: whereArg, id, locale, req = {} as PayloadRequest },
|
||||
{ collection, data, id, locale, req = {} as PayloadRequest, where: whereArg },
|
||||
) {
|
||||
const where = id ? { id: { equals: id } } : whereArg;
|
||||
const Model = this.collections[collection];
|
||||
const where = id ? { id: { equals: id } } : whereArg
|
||||
const Model = this.collections[collection]
|
||||
const options = {
|
||||
...withSession(this, req.transactionID),
|
||||
new: true,
|
||||
lean: true,
|
||||
};
|
||||
new: true,
|
||||
}
|
||||
|
||||
const query = await Model.buildQuery({
|
||||
payload: this.payload,
|
||||
locale,
|
||||
payload: this.payload,
|
||||
where,
|
||||
});
|
||||
})
|
||||
|
||||
let result;
|
||||
let result
|
||||
try {
|
||||
result = await Model.findOneAndUpdate(query, data, options);
|
||||
result = await Model.findOneAndUpdate(query, data, options)
|
||||
} catch (error) {
|
||||
// Handle uniqueness error from MongoDB
|
||||
throw error.code === 11000 && error.keyValue
|
||||
? new ValidationError(
|
||||
[
|
||||
{
|
||||
message: 'Value must be unique',
|
||||
field: Object.keys(error.keyValue)[0],
|
||||
},
|
||||
],
|
||||
req?.t ?? i18nInit(this.payload.config.i18n).t,
|
||||
)
|
||||
: error;
|
||||
[
|
||||
{
|
||||
field: Object.keys(error.keyValue)[0],
|
||||
message: 'Value must be unique',
|
||||
},
|
||||
],
|
||||
req?.t ?? i18nInit(this.payload.config.i18n).t,
|
||||
)
|
||||
: error
|
||||
}
|
||||
|
||||
result = JSON.parse(JSON.stringify(result));
|
||||
result.id = result._id;
|
||||
result = sanitizeInternalFields(result);
|
||||
result = JSON.parse(JSON.stringify(result))
|
||||
result.id = result._id
|
||||
result = sanitizeInternalFields(result)
|
||||
|
||||
return result;
|
||||
};
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1,35 +1,37 @@
|
||||
import type { UpdateVersion } from 'payload/dist/database/types';
|
||||
import type { PayloadRequest } from 'payload/dist/express/types';
|
||||
import type { MongooseAdapter } from '.';
|
||||
import { withSession } from './withSession';
|
||||
import type { UpdateVersion } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
|
||||
import type { MongooseAdapter } from '.'
|
||||
|
||||
import { withSession } from './withSession'
|
||||
|
||||
export const updateVersion: UpdateVersion = async function updateVersion(
|
||||
this: MongooseAdapter,
|
||||
{ collectionSlug, where, locale, versionData, req = {} as PayloadRequest },
|
||||
{ collectionSlug, locale, req = {} as PayloadRequest, versionData, where },
|
||||
) {
|
||||
const VersionModel = this.versions[collectionSlug];
|
||||
const VersionModel = this.versions[collectionSlug]
|
||||
const options = {
|
||||
...withSession(this, req.transactionID),
|
||||
new: true,
|
||||
lean: true,
|
||||
};
|
||||
new: true,
|
||||
}
|
||||
|
||||
const query = await VersionModel.buildQuery({
|
||||
payload: this.payload,
|
||||
locale,
|
||||
payload: this.payload,
|
||||
where,
|
||||
});
|
||||
})
|
||||
|
||||
const doc = await VersionModel.findOneAndUpdate(query, versionData, options);
|
||||
const doc = await VersionModel.findOneAndUpdate(query, versionData, options)
|
||||
|
||||
const result = JSON.parse(JSON.stringify(doc));
|
||||
const result = JSON.parse(JSON.stringify(doc))
|
||||
|
||||
const verificationToken = doc._verificationToken;
|
||||
const verificationToken = doc._verificationToken
|
||||
|
||||
// custom id type reset
|
||||
result.id = result._id;
|
||||
result.id = result._id
|
||||
if (verificationToken) {
|
||||
result._verificationToken = verificationToken;
|
||||
result._verificationToken = verificationToken
|
||||
}
|
||||
return result;
|
||||
};
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1,21 +1,22 @@
|
||||
const internalFields = ['__v'];
|
||||
const internalFields = ['__v']
|
||||
|
||||
const sanitizeInternalFields = <T extends Record<string, unknown>>(incomingDoc: T): T =>
|
||||
Object.entries(incomingDoc).reduce((newDoc, [key, val]): T => {
|
||||
if (key === '_id') {
|
||||
return {
|
||||
...newDoc,
|
||||
id: val,
|
||||
}
|
||||
}
|
||||
|
||||
if (internalFields.indexOf(key) > -1) {
|
||||
return newDoc
|
||||
}
|
||||
|
||||
const sanitizeInternalFields = <T extends Record<string, unknown>>(incomingDoc: T): T => Object.entries(incomingDoc).reduce((newDoc, [key, val]): T => {
|
||||
if (key === '_id') {
|
||||
return {
|
||||
...newDoc,
|
||||
id: val,
|
||||
};
|
||||
}
|
||||
[key]: val,
|
||||
}
|
||||
}, {} as T)
|
||||
|
||||
if (internalFields.indexOf(key) > -1) {
|
||||
return newDoc;
|
||||
}
|
||||
|
||||
return {
|
||||
...newDoc,
|
||||
[key]: val,
|
||||
};
|
||||
}, {} as T);
|
||||
|
||||
export default sanitizeInternalFields;
|
||||
export default sanitizeInternalFields
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
import path from 'path';
|
||||
import type { Webpack } from 'payload/dist/database/types';
|
||||
import type { Webpack } from 'payload/database'
|
||||
|
||||
import path from 'path'
|
||||
|
||||
export const webpack: Webpack = (config) => {
|
||||
return {
|
||||
...config,
|
||||
resolve: {
|
||||
...config.resolve || {},
|
||||
...(config.resolve || {}),
|
||||
alias: {
|
||||
...config.resolve?.alias || {},
|
||||
...(config.resolve?.alias || {}),
|
||||
[path.resolve(__dirname, './index')]: path.resolve(__dirname, 'mock'),
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
import type { ClientSession } from 'mongoose';
|
||||
import { MongooseAdapter } from './index';
|
||||
import type { ClientSession } from 'mongoose'
|
||||
|
||||
import type { MongooseAdapter } from './index'
|
||||
|
||||
/**
|
||||
* returns the session belonging to the transaction of the req.session if exists
|
||||
* @returns ClientSession
|
||||
*/
|
||||
export function withSession(db: MongooseAdapter, transactionID?: string | number): { session: ClientSession } | object {
|
||||
return db.sessions[transactionID] ? { session: db.sessions[transactionID] } : {};
|
||||
export function withSession(
|
||||
db: MongooseAdapter,
|
||||
transactionID?: number | string,
|
||||
): { session: ClientSession } | object {
|
||||
return db.sessions[transactionID] ? { session: db.sessions[transactionID] } : {}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,24 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"declaration": true, /* Generates corresponding '.d.ts' file. */
|
||||
"module": "commonjs", /* Specify what module code is generated. */
|
||||
"rootDir": "./src", /* Specify the root folder within your source files. */
|
||||
"outDir": "./dist", /* Specify an output folder for all emitted files. */
|
||||
"esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */
|
||||
"forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */
|
||||
"skipLibCheck": true /* Skip type checking all .d.ts files. */
|
||||
}
|
||||
"composite": true, // Make sure typescript knows that this module depends on their references
|
||||
"noEmit": false /* Do not emit outputs. */,
|
||||
"emitDeclarationOnly": true,
|
||||
"outDir": "./dist" /* Specify an output folder for all emitted files. */,
|
||||
"rootDir": "./src" /* Specify the root folder within your source files. */
|
||||
},
|
||||
"exclude": [
|
||||
"dist",
|
||||
"build",
|
||||
"tests",
|
||||
"test",
|
||||
"node_modules",
|
||||
".eslintrc.js",
|
||||
"src/**/*.spec.js",
|
||||
"src/**/*.spec.jsx",
|
||||
"src/**/*.spec.ts",
|
||||
"src/**/*.spec.tsx"
|
||||
],
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "src/**/*.d.ts", "src/**/*.json"],
|
||||
"references": [{ "path": "../payload" }] // db-mongodb depends on payload
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
10
packages/db-postgres/.eslintignore
Normal file
10
packages/db-postgres/.eslintignore
Normal file
@@ -0,0 +1,10 @@
|
||||
.tmp
|
||||
**/.git
|
||||
**/.hg
|
||||
**/.pnp.*
|
||||
**/.svn
|
||||
**/.yarn/**
|
||||
**/build
|
||||
**/dist/**
|
||||
**/node_modules
|
||||
**/temp
|
||||
15
packages/db-postgres/.eslintrc.cjs
Normal file
15
packages/db-postgres/.eslintrc.cjs
Normal file
@@ -0,0 +1,15 @@
|
||||
/** @type {import('prettier').Config} */
|
||||
module.exports = {
|
||||
extends: ['@payloadcms'],
|
||||
overrides: [
|
||||
{
|
||||
extends: ['plugin:@typescript-eslint/disable-type-checked'],
|
||||
files: ['*.js', '*.cjs', '*.json', '*.md', '*.yml', '*.yaml'],
|
||||
},
|
||||
],
|
||||
parserOptions: {
|
||||
project: ['./tsconfig.json'],
|
||||
tsconfigRootDir: __dirname,
|
||||
},
|
||||
root: true,
|
||||
}
|
||||
10
packages/db-postgres/.prettierignore
Normal file
10
packages/db-postgres/.prettierignore
Normal file
@@ -0,0 +1,10 @@
|
||||
.tmp
|
||||
**/.git
|
||||
**/.hg
|
||||
**/.pnp.*
|
||||
**/.svn
|
||||
**/.yarn/**
|
||||
**/build
|
||||
**/dist/**
|
||||
**/node_modules
|
||||
**/temp
|
||||
15
packages/db-postgres/.swcrc
Normal file
15
packages/db-postgres/.swcrc
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/swcrc",
|
||||
"sourceMaps": "inline",
|
||||
"jsc": {
|
||||
"target": "esnext",
|
||||
"parser": {
|
||||
"syntax": "typescript",
|
||||
"tsx": true,
|
||||
"dts": true
|
||||
}
|
||||
},
|
||||
"module": {
|
||||
"type": "commonjs"
|
||||
}
|
||||
}
|
||||
@@ -1,30 +1,38 @@
|
||||
{
|
||||
"name": "@payloadcms/db-postgres",
|
||||
"version": "0.0.1",
|
||||
"description": "The officially supported Postgres database adapter for Payload",
|
||||
"main": "index.js",
|
||||
"repository": "https://github.com/payloadcms/payload",
|
||||
"author": "Payload CMS, Inc.",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"build": "tsc"
|
||||
},
|
||||
"author": "Payload CMS, Inc.",
|
||||
"peerDependencies": {
|
||||
"better-sqlite3": "^8.5.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@libsql/client": "^0.3.1",
|
||||
"drizzle-kit": "^0.19.13-a511135",
|
||||
"drizzle-orm": "^0.28.0",
|
||||
"pg": "^8.11.1",
|
||||
"prompts": "^2.4.2",
|
||||
"to-snake-case": "^1.0.0"
|
||||
"drizzle-kit": "0.19.13-e99bac1",
|
||||
"drizzle-orm": "0.28.5",
|
||||
"pg": "8.11.3",
|
||||
"prompts": "2.4.2",
|
||||
"to-snake-case": "1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/pg": "^8.10.2",
|
||||
"@types/to-snake-case": "^1.0.0",
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"@types/pg": "8.10.2",
|
||||
"@types/to-snake-case": "1.0.0",
|
||||
"better-sqlite3": "^8.5.0",
|
||||
"payload": "payloadcms/payload#build/chore/update-2.0",
|
||||
"typescript": "^4.9.4"
|
||||
}
|
||||
"payload": "workspace:*"
|
||||
},
|
||||
"main": "./src/index.ts",
|
||||
"types": "./src/index.ts",
|
||||
"publishConfig": {
|
||||
"registry": "https://registry.npmjs.org/",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts"
|
||||
},
|
||||
"repository": "https://github.com/payloadcms/payload",
|
||||
"scripts": {
|
||||
"build:swc": "swc ./src -d ./dist --config-file .swcrc",
|
||||
"build:types": "tsc --emitDeclarationOnly --outDir dist",
|
||||
"builddisabled": "pnpm build:swc && pnpm build:types"
|
||||
},
|
||||
"version": "0.0.1"
|
||||
}
|
||||
|
||||
@@ -13,133 +13,133 @@ import { DrizzleDB } from './types';
|
||||
|
||||
// Migration table def in order to use query using drizzle
|
||||
const migrationsSchema = pgTable('payload_migrations', {
|
||||
name: varchar('name'),
|
||||
batch: numeric('batch'),
|
||||
name: varchar('name'),
|
||||
schema: jsonb('schema'),
|
||||
});
|
||||
})
|
||||
|
||||
export const connect: Connect = async function connect(
|
||||
this: PostgresAdapter,
|
||||
payload,
|
||||
) {
|
||||
let db: DrizzleDB;
|
||||
export const connect: Connect = async function connect(this: PostgresAdapter, payload) {
|
||||
let db: DrizzleDB
|
||||
|
||||
this.schema = {
|
||||
...this.tables,
|
||||
...this.relations,
|
||||
...this.enums,
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
if ('pool' in this && this.pool !== false) {
|
||||
const pool = new Pool(this.pool);
|
||||
db = drizzle(pool, { schema: this.schema });
|
||||
await pool.connect();
|
||||
const pool = new Pool(this.pool)
|
||||
db = drizzle(pool, { schema: this.schema })
|
||||
await pool.connect()
|
||||
}
|
||||
|
||||
if ('client' in this && this.client !== false) {
|
||||
const client = new Client(this.client);
|
||||
db = drizzle(client, { schema: this.schema });
|
||||
await client.connect();
|
||||
const client = new Client(this.client)
|
||||
db = drizzle(client, { schema: this.schema })
|
||||
await client.connect()
|
||||
}
|
||||
|
||||
if (process.env.PAYLOAD_DROP_DATABASE === 'true') {
|
||||
this.payload.logger.info('---- DROPPING TABLES ----');
|
||||
await db.execute(sql`drop schema public cascade;\ncreate schema public;`);
|
||||
this.payload.logger.info('---- DROPPED TABLES ----');
|
||||
this.payload.logger.info('---- DROPPING TABLES ----')
|
||||
await db.execute(sql`drop schema public cascade;\ncreate schema public;`)
|
||||
this.payload.logger.info('---- DROPPED TABLES ----')
|
||||
}
|
||||
} catch (err) {
|
||||
payload.logger.error(
|
||||
`Error: cannot connect to Postgres. Details: ${err.message}`,
|
||||
err,
|
||||
);
|
||||
process.exit(1);
|
||||
payload.logger.error(`Error: cannot connect to Postgres. Details: ${err.message}`, err)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
this.payload.logger.info('Connected to Postgres successfully');
|
||||
this.db = db;
|
||||
this.payload.logger.info('Connected to Postgres successfully')
|
||||
this.db = db
|
||||
|
||||
// Only push schema if not in production
|
||||
if (process.env.NODE_ENV === 'production') return;
|
||||
if (process.env.NODE_ENV === 'production') return
|
||||
|
||||
// This will prompt if clarifications are needed for Drizzle to push new schema
|
||||
const { hasDataLoss, warnings, statementsToExecute, apply } = await pushSchema(this.schema, this.db);
|
||||
const { apply, hasDataLoss, statementsToExecute, warnings } = await pushSchema(
|
||||
this.schema,
|
||||
this.db,
|
||||
)
|
||||
|
||||
this.payload.logger.debug({
|
||||
msg: 'Schema push results',
|
||||
hasDataLoss,
|
||||
warnings,
|
||||
msg: 'Schema push results',
|
||||
statementsToExecute,
|
||||
});
|
||||
warnings,
|
||||
})
|
||||
|
||||
if (warnings.length) {
|
||||
this.payload.logger.warn({
|
||||
msg: `Warnings detected during schema push: ${warnings.join('\n')}`,
|
||||
warnings,
|
||||
});
|
||||
})
|
||||
|
||||
if (hasDataLoss) {
|
||||
this.payload.logger.warn({
|
||||
msg: 'DATA LOSS WARNING: Possible data loss detected if schema is pushed.',
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
const { confirm: acceptWarnings } = await prompts(
|
||||
{
|
||||
type: 'confirm',
|
||||
name: 'confirm',
|
||||
message: 'Accept warnings and push schema to database?',
|
||||
initial: false,
|
||||
message: 'Accept warnings and push schema to database?',
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
},
|
||||
{
|
||||
onCancel: () => {
|
||||
process.exit(0);
|
||||
process.exit(0)
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
|
||||
// Exit if user does not accept warnings.
|
||||
// Q: Is this the right type of exit for this interaction?
|
||||
if (!acceptWarnings) {
|
||||
process.exit(0);
|
||||
process.exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
this.migrationDir = '.migrations';
|
||||
this.migrationDir = '.migrations'
|
||||
|
||||
// Create drizzle snapshot if it doesn't exist
|
||||
if (!fs.existsSync(`${this.migrationDir}/drizzle-snapshot.json`)) {
|
||||
// Ensure migration dir exists
|
||||
if (!fs.existsSync(this.migrationDir)) {
|
||||
fs.mkdirSync(this.migrationDir);
|
||||
fs.mkdirSync(this.migrationDir)
|
||||
}
|
||||
|
||||
const drizzleJSON = generateDrizzleJson(this.schema);
|
||||
const drizzleJSON = generateDrizzleJson(this.schema)
|
||||
|
||||
fs.writeFileSync(`${this.migrationDir}/drizzle-snapshot.json`, JSON.stringify(drizzleJSON, null, 2));
|
||||
fs.writeFileSync(
|
||||
`${this.migrationDir}/drizzle-snapshot.json`,
|
||||
JSON.stringify(drizzleJSON, null, 2),
|
||||
)
|
||||
}
|
||||
|
||||
const jsonSchema = configToJSONSchema(this.payload.config);
|
||||
const jsonSchema = configToJSONSchema(this.payload.config)
|
||||
|
||||
await apply();
|
||||
await apply()
|
||||
|
||||
const devPush = await this.db
|
||||
.select()
|
||||
.from(migrationsSchema)
|
||||
.where(eq(migrationsSchema.batch, '-1'));
|
||||
.where(eq(migrationsSchema.batch, '-1'))
|
||||
|
||||
if (!devPush.length) {
|
||||
await this.db.insert(migrationsSchema).values({
|
||||
name: 'dev',
|
||||
batch: '-1',
|
||||
name: 'dev',
|
||||
schema: JSON.stringify(jsonSchema),
|
||||
});
|
||||
})
|
||||
} else {
|
||||
await this.db
|
||||
.update(migrationsSchema)
|
||||
.set({
|
||||
schema: JSON.stringify(jsonSchema),
|
||||
})
|
||||
.where(eq(migrationsSchema.batch, '-1'));
|
||||
.where(eq(migrationsSchema.batch, '-1'))
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import { Create } from 'payload/dist/database/types';
|
||||
import toSnakeCase from 'to-snake-case';
|
||||
import { upsertRow } from '../upsertRow';
|
||||
import type { Create } from 'payload/database'
|
||||
|
||||
export const create: Create = async function create({
|
||||
collection: collectionSlug,
|
||||
data,
|
||||
req,
|
||||
}) {
|
||||
const collection = this.payload.collections[collectionSlug].config;
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import { upsertRow } from '../upsertRow'
|
||||
|
||||
export const create: Create = async function create({ collection: collectionSlug, data, req }) {
|
||||
const collection = this.payload.collections[collectionSlug].config
|
||||
|
||||
const result = await upsertRow({
|
||||
adapter: this,
|
||||
@@ -15,7 +13,7 @@ export const create: Create = async function create({
|
||||
fields: collection.fields,
|
||||
operation: 'create',
|
||||
tableName: toSnakeCase(collectionSlug),
|
||||
});
|
||||
})
|
||||
|
||||
return result;
|
||||
};
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
/* eslint-disable no-restricted-syntax, no-await-in-loop */
|
||||
import fs from 'fs';
|
||||
import { CreateMigration } from 'payload/dist/database/types';
|
||||
import type { CreateMigration } from 'payload/database'
|
||||
import type { DatabaseAdapter, Init } from 'payload/database'
|
||||
|
||||
import { generateDrizzleJson, generateMigration } from 'drizzle-kit/utils';
|
||||
import { eq } from 'drizzle-orm';
|
||||
import { jsonb, numeric, pgEnum, pgTable, varchar } from 'drizzle-orm/pg-core';
|
||||
import { SanitizedCollectionConfig } from 'payload/dist/collections/config/types';
|
||||
import type { DatabaseAdapter, Init } from 'payload/dist/database/types';
|
||||
import { configToJSONSchema } from 'payload/dist/utilities/configToJSONSchema';
|
||||
import prompts from 'prompts';
|
||||
import { buildTable } from './schema/build';
|
||||
import type { GenericEnum, GenericRelation, GenericTable, PostgresAdapter } from './types';
|
||||
import { generateDrizzleJson, generateMigration } from 'drizzle-kit/utils'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { jsonb, numeric, pgEnum, pgTable, varchar } from 'drizzle-orm/pg-core'
|
||||
import fs from 'fs'
|
||||
import { SanitizedCollectionConfig } from 'payload/types'
|
||||
import { configToJSONSchema } from 'payload/utilities'
|
||||
import prompts from 'prompts'
|
||||
|
||||
import type { GenericEnum, GenericRelation, GenericTable, PostgresAdapter } from './types'
|
||||
|
||||
import { buildTable } from './schema/build'
|
||||
|
||||
const migrationTemplate = (upSQL?: string) => `
|
||||
import payload, { Payload } from 'payload';
|
||||
@@ -22,7 +24,7 @@ export async function up(payload: Payload): Promise<void> {
|
||||
export async function down(payload: Payload): Promise<void> {
|
||||
// Migration code
|
||||
};
|
||||
`;
|
||||
`
|
||||
|
||||
export const createMigration: CreateMigration = async function createMigration(
|
||||
this: PostgresAdapter,
|
||||
@@ -30,27 +32,30 @@ export const createMigration: CreateMigration = async function createMigration(
|
||||
migrationDir,
|
||||
migrationName,
|
||||
) {
|
||||
payload.logger.info({ msg: 'Creating migration from postgres adapter...' });
|
||||
const dir = migrationDir || '.migrations'; // TODO: Verify path after linking
|
||||
payload.logger.info({ msg: 'Creating migration from postgres adapter...' })
|
||||
const dir = migrationDir || '.migrations' // TODO: Verify path after linking
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir);
|
||||
fs.mkdirSync(dir)
|
||||
}
|
||||
|
||||
const [yyymmdd, hhmmss] = new Date().toISOString().split('T');
|
||||
const formattedDate = yyymmdd.replace(/\D/g, '');
|
||||
const formattedTime = hhmmss.split('.')[0].replace(/\D/g, '');
|
||||
const [yyymmdd, hhmmss] = new Date().toISOString().split('T')
|
||||
const formattedDate = yyymmdd.replace(/\D/g, '')
|
||||
const formattedTime = hhmmss.split('.')[0].replace(/\D/g, '')
|
||||
|
||||
const timestamp = `${formattedDate}_${formattedTime}`;
|
||||
const timestamp = `${formattedDate}_${formattedTime}`
|
||||
|
||||
const formattedName = migrationName.replace(/\W/g, '_');
|
||||
const fileName = `${timestamp}_${formattedName}.ts`;
|
||||
const filePath = `${dir}/${fileName}`;
|
||||
const formattedName = migrationName.replace(/\W/g, '_')
|
||||
const fileName = `${timestamp}_${formattedName}.ts`
|
||||
const filePath = `${dir}/${fileName}`
|
||||
|
||||
const snapshotJSON = fs.readFileSync(`${dir}/drizzle-snapshot.json`, 'utf8');
|
||||
const drizzleJsonBefore = generateDrizzleJson(JSON.parse(snapshotJSON));
|
||||
const drizzleJsonAfter = generateDrizzleJson(this.schema, drizzleJsonBefore.id);
|
||||
const sqlStatements = await generateMigration(drizzleJsonBefore, drizzleJsonAfter);
|
||||
fs.writeFileSync(filePath, migrationTemplate(sqlStatements.length ? sqlStatements?.join('\n') : undefined));
|
||||
const snapshotJSON = fs.readFileSync(`${dir}/drizzle-snapshot.json`, 'utf8')
|
||||
const drizzleJsonBefore = generateDrizzleJson(JSON.parse(snapshotJSON))
|
||||
const drizzleJsonAfter = generateDrizzleJson(this.schema, drizzleJsonBefore.id)
|
||||
const sqlStatements = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
|
||||
fs.writeFileSync(
|
||||
filePath,
|
||||
migrationTemplate(sqlStatements.length ? sqlStatements?.join('\n') : undefined),
|
||||
)
|
||||
|
||||
// TODO:
|
||||
// Get the most recent migration schema from the file system
|
||||
@@ -61,4 +66,4 @@ export const createMigration: CreateMigration = async function createMigration(
|
||||
// and then inject them each into the `migrationTemplate` above,
|
||||
// outputting the file into the migrations folder accordingly
|
||||
// also make sure to output the JSON schema snapshot into a `./migrationsDir/meta` folder like Drizzle does
|
||||
};
|
||||
}
|
||||
|
||||
@@ -22,14 +22,15 @@ export const buildFindManyArgs = ({
|
||||
}: BuildFindQueryArgs): Record<string, unknown> => {
|
||||
const result: Result = {
|
||||
with: {},
|
||||
};
|
||||
}
|
||||
|
||||
const _locales: Result = {
|
||||
columns: {
|
||||
id: false,
|
||||
_parentID: false,
|
||||
id: false,
|
||||
},
|
||||
};
|
||||
where: createLocaleWhereQuery({ fallbackLocale, locale }),
|
||||
}
|
||||
|
||||
if (adapter.tables[`${tableName}_relationships`]) {
|
||||
result.with._relationships = {
|
||||
@@ -42,13 +43,14 @@ export const buildFindManyArgs = ({
|
||||
}
|
||||
|
||||
if (adapter.tables[`${tableName}_locales`]) {
|
||||
result.with._locales = _locales;
|
||||
result.with._locales = _locales
|
||||
}
|
||||
|
||||
const locatedBlocks: Block[] = [];
|
||||
const locatedArrays: { [path: string]: ArrayField } = {};
|
||||
const locatedBlocks: Block[] = []
|
||||
const locatedArrays: { [path: string]: ArrayField } = {}
|
||||
|
||||
traverseFields({
|
||||
_locales,
|
||||
adapter,
|
||||
currentArgs: result,
|
||||
currentTableName: tableName,
|
||||
@@ -60,7 +62,7 @@ export const buildFindManyArgs = ({
|
||||
path: '',
|
||||
topLevelArgs: result,
|
||||
topLevelTableName: tableName,
|
||||
});
|
||||
})
|
||||
|
||||
return result;
|
||||
};
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
/* eslint-disable no-param-reassign */
|
||||
import { SanitizedConfig } from 'payload/config';
|
||||
import { buildFindManyArgs } from './buildFindManyArgs';
|
||||
import { PostgresAdapter } from '../types';
|
||||
import type { SanitizedConfig } from 'payload/config'
|
||||
|
||||
import type { PostgresAdapter } from '../types'
|
||||
|
||||
import { buildFindManyArgs } from './buildFindManyArgs'
|
||||
|
||||
type BuildWithFromDepthArgs = {
|
||||
adapter: PostgresAdapter
|
||||
config: SanitizedConfig
|
||||
depth: number
|
||||
fallbackLocale?: string | false
|
||||
fallbackLocale?: false | string
|
||||
locale?: string
|
||||
}
|
||||
|
||||
@@ -19,23 +21,23 @@ export const buildWithFromDepth = ({
|
||||
locale,
|
||||
}: BuildWithFromDepthArgs): Record<string, unknown> | undefined => {
|
||||
const result = config.collections.reduce((slugs, coll) => {
|
||||
const { slug } = coll;
|
||||
const { slug } = coll
|
||||
|
||||
if (depth >= 1) {
|
||||
const args = buildFindManyArgs({
|
||||
adapter,
|
||||
config,
|
||||
collection: coll,
|
||||
config,
|
||||
depth: depth - 1,
|
||||
fallbackLocale,
|
||||
locale,
|
||||
});
|
||||
})
|
||||
|
||||
slugs[`${slug}ID`] = args;
|
||||
slugs[`${slug}ID`] = args
|
||||
}
|
||||
|
||||
return slugs;
|
||||
}, {});
|
||||
return slugs
|
||||
}, {})
|
||||
|
||||
return result;
|
||||
};
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -12,20 +12,26 @@ type TraverseFieldArgs = {
|
||||
depth?: number,
|
||||
fields: Field[]
|
||||
_locales: Record<string, unknown>
|
||||
locatedArrays: { [path: string]: ArrayField },
|
||||
locatedBlocks: Block[],
|
||||
path: string,
|
||||
topLevelArgs: Record<string, unknown>,
|
||||
adapter: PostgresAdapter
|
||||
config: SanitizedConfig
|
||||
currentArgs: Record<string, unknown>
|
||||
currentTableName: string
|
||||
depth?: number
|
||||
fields: Field[]
|
||||
locatedArrays: { [path: string]: ArrayField }
|
||||
locatedBlocks: Block[]
|
||||
path: string
|
||||
topLevelArgs: Record<string, unknown>
|
||||
topLevelTableName: string
|
||||
}
|
||||
|
||||
export const traverseFields = ({
|
||||
_locales,
|
||||
adapter,
|
||||
currentArgs,
|
||||
currentTableName,
|
||||
depth,
|
||||
fields,
|
||||
_locales,
|
||||
locatedArrays,
|
||||
locatedBlocks,
|
||||
path,
|
||||
@@ -37,39 +43,39 @@ export const traverseFields = ({
|
||||
switch (field.type) {
|
||||
case 'array': {
|
||||
const withArray: Result = {
|
||||
orderBy: ({ _order }, { asc }) => [asc(_order)],
|
||||
columns: {
|
||||
_parentID: false,
|
||||
_order: false,
|
||||
_parentID: false,
|
||||
},
|
||||
orderBy: ({ _order }, { asc }) => [asc(_order)],
|
||||
with: {},
|
||||
};
|
||||
}
|
||||
|
||||
const arrayTableName = `${currentTableName}_${toSnakeCase(field.name)}`;
|
||||
const arrayTableName = `${currentTableName}_${toSnakeCase(field.name)}`
|
||||
|
||||
if (adapter.tables[`${arrayTableName}_locales`]) withArray.with._locales = _locales;
|
||||
currentArgs.with[`${path}${field.name}`] = withArray;
|
||||
if (adapter.tables[`${arrayTableName}_locales`]) withArray.with._locales = _locales
|
||||
currentArgs.with[`${path}${field.name}`] = withArray
|
||||
|
||||
traverseFields({
|
||||
_locales,
|
||||
adapter,
|
||||
currentArgs: withArray,
|
||||
currentTableName: arrayTableName,
|
||||
depth,
|
||||
fields: field.fields,
|
||||
_locales,
|
||||
locatedArrays,
|
||||
locatedBlocks,
|
||||
path: '',
|
||||
topLevelArgs,
|
||||
topLevelTableName,
|
||||
});
|
||||
})
|
||||
|
||||
break;
|
||||
break
|
||||
}
|
||||
|
||||
case 'blocks':
|
||||
field.blocks.forEach((block) => {
|
||||
const blockKey = `_blocks_${block.slug}`;
|
||||
const blockKey = `_blocks_${block.slug}`
|
||||
|
||||
if (!topLevelArgs[blockKey]) {
|
||||
const withBlock: Result = {
|
||||
@@ -78,52 +84,52 @@ export const traverseFields = ({
|
||||
},
|
||||
orderBy: ({ _order }, { asc }) => [asc(_order)],
|
||||
with: {},
|
||||
};
|
||||
}
|
||||
|
||||
if (adapter.tables[`${topLevelTableName}_${toSnakeCase(block.slug)}_locales`]) withBlock.with._locales = _locales;
|
||||
topLevelArgs.with[blockKey] = withBlock;
|
||||
|
||||
traverseFields({
|
||||
_locales,
|
||||
adapter,
|
||||
currentArgs: withBlock,
|
||||
currentTableName,
|
||||
depth,
|
||||
fields: block.fields,
|
||||
_locales,
|
||||
locatedArrays,
|
||||
locatedBlocks,
|
||||
path,
|
||||
topLevelArgs,
|
||||
topLevelTableName,
|
||||
});
|
||||
})
|
||||
}
|
||||
});
|
||||
})
|
||||
|
||||
break;
|
||||
break
|
||||
|
||||
case 'group':
|
||||
traverseFields({
|
||||
_locales,
|
||||
adapter,
|
||||
currentArgs,
|
||||
currentTableName,
|
||||
depth,
|
||||
fields: field.fields,
|
||||
_locales,
|
||||
locatedArrays,
|
||||
locatedBlocks,
|
||||
path: `${path}${field.name}_`,
|
||||
topLevelArgs,
|
||||
topLevelTableName,
|
||||
});
|
||||
})
|
||||
|
||||
break;
|
||||
break
|
||||
|
||||
default: {
|
||||
break;
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
})
|
||||
|
||||
return topLevelArgs;
|
||||
};
|
||||
return topLevelArgs
|
||||
}
|
||||
|
||||
@@ -1,19 +1,22 @@
|
||||
import toSnakeCase from 'to-snake-case';
|
||||
import type { FindOne } from 'payload/dist/database/types';
|
||||
import type { PayloadRequest } from 'payload/dist/express/types';
|
||||
import type { SanitizedCollectionConfig } from 'payload/dist/collections/config/types';
|
||||
import buildQuery from './queries/buildQuery';
|
||||
import { buildFindManyArgs } from './find/buildFindManyArgs';
|
||||
import { transform } from './transform/read';
|
||||
import type { FindOne } from 'payload/database'
|
||||
import type { SanitizedCollectionConfig } from 'payload/types'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import { buildFindManyArgs } from './find/buildFindManyArgs'
|
||||
import buildQuery from './queries/buildQuery'
|
||||
import { transform } from './transform/read'
|
||||
|
||||
export const findOne: FindOne = async function findOne({
|
||||
collection,
|
||||
where: incomingWhere,
|
||||
locale,
|
||||
req = {} as PayloadRequest,
|
||||
where,
|
||||
}) {
|
||||
const collectionConfig: SanitizedCollectionConfig = this.payload.collections[collection].config;
|
||||
const tableName = toSnakeCase(collection);
|
||||
const collectionConfig: SanitizedCollectionConfig = this.payload.collections[collection].config
|
||||
const tableName = toSnakeCase(collection)
|
||||
|
||||
const { where } = await buildQuery({
|
||||
adapter: this,
|
||||
@@ -32,11 +35,12 @@ export const findOne: FindOne = async function findOne({
|
||||
|
||||
findManyArgs.where = where;
|
||||
|
||||
const doc = await this.db.query[tableName].findFirst(findManyArgs);
|
||||
const doc = await this.db.query[tableName].findFirst(findManyArgs)
|
||||
|
||||
return transform({
|
||||
config: this.payload.config,
|
||||
data: doc,
|
||||
fallbackLocale: req.fallbackLocale,
|
||||
fields: collectionConfig.fields,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -31,16 +31,17 @@ export function postgresAdapter(args: Args): PostgresAdapterResult {
|
||||
// @ts-expect-error
|
||||
return createDatabaseAdapter<PostgresAdapter>({
|
||||
...args,
|
||||
enums: {},
|
||||
relations: {},
|
||||
tables: {},
|
||||
payload,
|
||||
connect,
|
||||
create,
|
||||
createMigration,
|
||||
db: undefined,
|
||||
enums: {},
|
||||
find,
|
||||
// queryDrafts,
|
||||
findOne,
|
||||
// destroy,
|
||||
init,
|
||||
webpack,
|
||||
createMigration,
|
||||
payload,
|
||||
// beginTransaction,
|
||||
// rollbackTransaction,
|
||||
// commitTransaction,
|
||||
@@ -59,8 +60,8 @@ export function postgresAdapter(args: Args): PostgresAdapterResult {
|
||||
createVersion,
|
||||
// updateVersion,
|
||||
// deleteVersions,
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
return adapter;
|
||||
return adapter
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
exports.postgresAdapter = () => ({});
|
||||
exports.postgresAdapter = () => ({})
|
||||
|
||||
@@ -9,7 +9,7 @@ export async function buildAndOrConditions({
|
||||
joins,
|
||||
where,
|
||||
adapter,
|
||||
locale,
|
||||
collectionSlug,
|
||||
fields,
|
||||
tableName,
|
||||
selectFields,
|
||||
@@ -24,7 +24,7 @@ export async function buildAndOrConditions({
|
||||
tableName: string,
|
||||
selectFields: Record<string, GenericColumn>
|
||||
}): Promise<SQL[]> {
|
||||
const completedConditions = [];
|
||||
const completedConditions = []
|
||||
// Loop over all AND / OR operations and add them to the AND / OR query param
|
||||
// Operations should come through as an array
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
@@ -36,7 +36,7 @@ export async function buildAndOrConditions({
|
||||
joins,
|
||||
where: condition,
|
||||
adapter,
|
||||
locale,
|
||||
collectionSlug,
|
||||
fields,
|
||||
tableName,
|
||||
selectFields,
|
||||
@@ -46,5 +46,5 @@ export async function buildAndOrConditions({
|
||||
}
|
||||
}
|
||||
}
|
||||
return completedConditions;
|
||||
return completedConditions
|
||||
}
|
||||
|
||||
@@ -96,4 +96,4 @@ const buildQuery = async function buildQuery({
|
||||
};
|
||||
};
|
||||
|
||||
export default buildQuery;
|
||||
export default buildQuery
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
import { and, eq, gt, gte, ilike, inArray, isNotNull, isNull, lt, lte, ne, notInArray, or } from 'drizzle-orm';
|
||||
|
||||
export const operatorMap = {
|
||||
greater_than_equal: gte,
|
||||
less_than_equal: lte,
|
||||
less_than: lt,
|
||||
// near: near,
|
||||
and,
|
||||
equals: eq,
|
||||
// TODO: isNotNull isn't right as it depends on if the query value is true or false
|
||||
exists: isNotNull,
|
||||
greater_than: gt,
|
||||
greater_than_equal: gte,
|
||||
// TODO:
|
||||
in: inArray,
|
||||
like: ilike,
|
||||
// TODO:
|
||||
@@ -20,4 +24,4 @@ export const operatorMap = {
|
||||
// intersects: intersects,
|
||||
and,
|
||||
or,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
/* eslint-disable no-restricted-syntax */
|
||||
import type { SQL } from 'drizzle-orm'
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { Operator, Where } from 'payload/types';
|
||||
import { Field } from 'payload/dist/fields/config/types';
|
||||
@@ -25,7 +26,7 @@ export async function parseParams({
|
||||
joins,
|
||||
where,
|
||||
adapter,
|
||||
locale,
|
||||
collectionSlug,
|
||||
fields,
|
||||
tableName,
|
||||
selectFields,
|
||||
@@ -118,5 +119,5 @@ export async function parseParams({
|
||||
[result] = constraints;
|
||||
}
|
||||
|
||||
return result;
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -4,40 +4,37 @@
|
||||
|
||||
// type PushDiff = (schema: DrizzleSchemaExports) => Promise<{ warnings: string[], apply: () => Promise<void> }>
|
||||
|
||||
|
||||
// drizzle-kit@utils
|
||||
|
||||
import { generateDrizzleJson, generateMigration, pushSchema } from 'drizzle-kit/utils';
|
||||
import { drizzle } from 'drizzle-orm/node-postgres';
|
||||
import { Pool } from 'pg';
|
||||
import { generateDrizzleJson, generateMigration, pushSchema } from 'drizzle-kit/utils'
|
||||
import { drizzle } from 'drizzle-orm/node-postgres'
|
||||
import { Pool } from 'pg'
|
||||
|
||||
async function generateUsage() {
|
||||
const schema = await import('./data/users');
|
||||
const schemaAfter = await import('./data/users-after');
|
||||
const schema = await import('./data/users')
|
||||
const schemaAfter = await import('./data/users-after')
|
||||
|
||||
const drizzleJsonBefore = generateDrizzleJson(schema);
|
||||
const drizzleJsonAfter = generateDrizzleJson(schemaAfter);
|
||||
const drizzleJsonBefore = generateDrizzleJson(schema)
|
||||
const drizzleJsonAfter = generateDrizzleJson(schemaAfter)
|
||||
|
||||
const sqlStatements = await generateMigration(drizzleJsonBefore, drizzleJsonAfter);
|
||||
const sqlStatements = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
|
||||
|
||||
console.log(sqlStatements);
|
||||
console.log(sqlStatements)
|
||||
}
|
||||
|
||||
async function pushUsage() {
|
||||
const schemaAfter = await import('./data/users-after');
|
||||
const schemaAfter = await import('./data/users-after')
|
||||
|
||||
const db = drizzle(
|
||||
new Pool({ connectionString: '' }),
|
||||
);
|
||||
const db = drizzle(new Pool({ connectionString: '' }))
|
||||
|
||||
const response = await pushSchema(schemaAfter, db);
|
||||
const response = await pushSchema(schemaAfter, db)
|
||||
|
||||
console.log('\n');
|
||||
console.log('hasDataLoss: ', response.hasDataLoss);
|
||||
console.log('warnings: ', response.warnings);
|
||||
console.log('statements: ', response.statementsToExecute);
|
||||
console.log('\n')
|
||||
console.log('hasDataLoss: ', response.hasDataLoss)
|
||||
console.log('warnings: ', response.warnings)
|
||||
console.log('statements: ', response.statementsToExecute)
|
||||
|
||||
await response.apply();
|
||||
await response.apply()
|
||||
|
||||
process.exit(0);
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
/* eslint-disable no-param-reassign */
|
||||
import type { Relation } from 'drizzle-orm'
|
||||
import type { AnyPgColumnBuilder, IndexBuilder } from 'drizzle-orm/pg-core'
|
||||
import type { Field } from 'payload/types'
|
||||
|
||||
import { relations } from 'drizzle-orm'
|
||||
import {
|
||||
AnyPgColumnBuilder,
|
||||
index,
|
||||
integer,
|
||||
numeric,
|
||||
pgTable,
|
||||
serial,
|
||||
varchar,
|
||||
index,
|
||||
numeric,
|
||||
timestamp,
|
||||
IndexBuilder,
|
||||
unique,
|
||||
UniqueConstraintBuilder,
|
||||
} from 'drizzle-orm/pg-core';
|
||||
@@ -46,35 +48,35 @@ export const buildTable = ({
|
||||
const columns: Record<string, AnyPgColumnBuilder> = baseColumns;
|
||||
const indexes: Record<string, (cols: GenericColumns) => IndexBuilder> = {};
|
||||
|
||||
let hasLocalizedField = false;
|
||||
let hasLocalizedRelationshipField = false;
|
||||
const localesColumns: Record<string, AnyPgColumnBuilder> = {};
|
||||
const localesIndexes: Record<string, (cols: GenericColumns) => IndexBuilder> = {};
|
||||
let localesTable: GenericTable;
|
||||
let hasLocalizedField = false
|
||||
let hasLocalizedRelationshipField = false
|
||||
const localesColumns: Record<string, AnyPgColumnBuilder> = {}
|
||||
const localesIndexes: Record<string, (cols: GenericColumns) => IndexBuilder> = {}
|
||||
let localesTable: GenericTable
|
||||
|
||||
const relationships: Set<string> = new Set();
|
||||
let relationshipsTable: GenericTable;
|
||||
const relationships: Set<string> = new Set()
|
||||
let relationshipsTable: GenericTable
|
||||
|
||||
const arrayBlockRelations: Map<string, string> = new Map();
|
||||
const arrayBlockRelations: Map<string, string> = new Map()
|
||||
|
||||
const idField = fields.find((field) => fieldAffectsData(field) && field.name === 'id');
|
||||
let idColType = 'integer';
|
||||
const idField = fields.find((field) => fieldAffectsData(field) && field.name === 'id')
|
||||
let idColType = 'integer'
|
||||
|
||||
if (idField) {
|
||||
if (idField.type === 'number') {
|
||||
idColType = 'numeric';
|
||||
columns.id = numeric('id').primaryKey();
|
||||
idColType = 'numeric'
|
||||
columns.id = numeric('id').primaryKey()
|
||||
}
|
||||
|
||||
if (idField.type === 'text') {
|
||||
idColType = 'varchar';
|
||||
columns.id = varchar('id').primaryKey();
|
||||
idColType = 'varchar'
|
||||
columns.id = varchar('id').primaryKey()
|
||||
}
|
||||
} else {
|
||||
columns.id = serial('id').primaryKey();
|
||||
columns.id = serial('id').primaryKey()
|
||||
}
|
||||
|
||||
({ hasLocalizedField, hasLocalizedRelationshipField } = traverseFields({
|
||||
;({ hasLocalizedField, hasLocalizedRelationshipField } = traverseFields({
|
||||
adapter,
|
||||
arrayBlockRelations,
|
||||
buildRelationships,
|
||||
@@ -86,11 +88,11 @@ export const buildTable = ({
|
||||
newTableName: tableName,
|
||||
parentTableName: tableName,
|
||||
relationships,
|
||||
}));
|
||||
}))
|
||||
|
||||
if (timestamps) {
|
||||
columns.createdAt = timestamp('created_at').defaultNow().notNull();
|
||||
columns.updatedAt = timestamp('updated_at').defaultNow().notNull();
|
||||
columns.createdAt = timestamp('created_at').defaultNow().notNull()
|
||||
columns.updatedAt = timestamp('updated_at').defaultNow().notNull()
|
||||
}
|
||||
|
||||
const table = pgTable(tableName, columns, (cols) => {
|
||||
@@ -114,24 +116,27 @@ export const buildTable = ({
|
||||
localesColumns._parentID = parentIDColumnMap[idColType]('_parent_id').references(() => table.id, { onDelete: 'cascade' }).notNull();
|
||||
|
||||
localesTable = pgTable(localeTableName, localesColumns, (cols) => {
|
||||
return Object.entries(localesIndexes).reduce((acc, [colName, func]) => {
|
||||
acc[colName] = func(cols);
|
||||
return acc;
|
||||
}, {
|
||||
_localeParent: unique().on(cols._locale, cols._parentID),
|
||||
});
|
||||
});
|
||||
return Object.entries(localesIndexes).reduce(
|
||||
(acc, [colName, func]) => {
|
||||
acc[colName] = func(cols)
|
||||
return acc
|
||||
},
|
||||
{
|
||||
_localeParent: unique().on(cols._locale, cols._parentID),
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
adapter.tables[localeTableName] = localesTable;
|
||||
adapter.tables[localeTableName] = localesTable
|
||||
|
||||
const localesTableRelations = relations(localesTable, ({ one }) => ({
|
||||
_parentID: one(table, {
|
||||
fields: [localesTable._parentID],
|
||||
references: [table.id],
|
||||
}),
|
||||
}));
|
||||
}))
|
||||
|
||||
adapter.relations[`relations_${localeTableName}`] = localesTableRelations;
|
||||
adapter.relations[`relations_${localeTableName}`] = localesTableRelations
|
||||
}
|
||||
|
||||
if (buildRelationships) {
|
||||
@@ -141,21 +146,29 @@ export const buildTable = ({
|
||||
parent: parentIDColumnMap[idColType]('parent_id').references(() => table.id, { onDelete: 'cascade' }).notNull(),
|
||||
path: varchar('path').notNull(),
|
||||
order: integer('order'),
|
||||
};
|
||||
parent: parentIDColumnMap[idColType]('parent_id')
|
||||
.references(() => table.id)
|
||||
.notNull(),
|
||||
path: varchar('path').notNull(),
|
||||
}
|
||||
|
||||
if (hasLocalizedRelationshipField) {
|
||||
relationshipColumns.locale = adapter.enums._locales('locale');
|
||||
}
|
||||
|
||||
relationships.forEach((relationTo) => {
|
||||
const formattedRelationTo = toSnakeCase(relationTo);
|
||||
let colType = 'integer';
|
||||
const relatedCollectionCustomID = adapter.payload.collections[relationTo].config.fields.find((field) => fieldAffectsData(field) && field.name === 'id');
|
||||
if (relatedCollectionCustomID?.type === 'number') colType = 'numeric';
|
||||
if (relatedCollectionCustomID?.type === 'text') colType = 'varchar';
|
||||
const formattedRelationTo = toSnakeCase(relationTo)
|
||||
let colType = 'integer'
|
||||
const relatedCollectionCustomID = adapter.payload.collections[
|
||||
relationTo
|
||||
].config.fields.find((field) => fieldAffectsData(field) && field.name === 'id')
|
||||
if (relatedCollectionCustomID?.type === 'number') colType = 'numeric'
|
||||
if (relatedCollectionCustomID?.type === 'text') colType = 'varchar'
|
||||
|
||||
relationshipColumns[`${relationTo}ID`] = parentIDColumnMap[colType](`${formattedRelationTo}_id`).references(() => adapter.tables[formattedRelationTo].id);
|
||||
});
|
||||
relationshipColumns[`${relationTo}ID`] = parentIDColumnMap[colType](
|
||||
`${formattedRelationTo}_id`,
|
||||
).references(() => adapter.tables[formattedRelationTo].id)
|
||||
})
|
||||
|
||||
const relationshipsTableName = `${tableName}_relationships`;
|
||||
|
||||
@@ -172,54 +185,54 @@ export const buildTable = ({
|
||||
return result;
|
||||
});
|
||||
|
||||
adapter.tables[relationshipsTableName] = relationshipsTable;
|
||||
adapter.tables[relationshipsTableName] = relationshipsTable
|
||||
|
||||
const relationshipsTableRelations = relations(relationshipsTable, ({ one }) => {
|
||||
const result: Record<string, Relation<string>> = {
|
||||
parent: one(table, {
|
||||
relationName: '_relationships',
|
||||
fields: [relationshipsTable.parent],
|
||||
references: [table.id],
|
||||
relationName: '_relationships',
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
relationships.forEach((relationTo) => {
|
||||
const relatedTableName = toSnakeCase(relationTo);
|
||||
const idColumnName = `${relationTo}ID`;
|
||||
const relatedTableName = toSnakeCase(relationTo)
|
||||
const idColumnName = `${relationTo}ID`
|
||||
result[idColumnName] = one(adapter.tables[relatedTableName], {
|
||||
fields: [relationshipsTable[idColumnName]],
|
||||
references: [adapter.tables[relatedTableName].id],
|
||||
});
|
||||
});
|
||||
})
|
||||
})
|
||||
|
||||
return result;
|
||||
});
|
||||
return result
|
||||
})
|
||||
|
||||
adapter.relations[`relations_${relationshipsTableName}`] = relationshipsTableRelations;
|
||||
adapter.relations[`relations_${relationshipsTableName}`] = relationshipsTableRelations
|
||||
}
|
||||
}
|
||||
|
||||
const tableRelations = relations(table, ({ many }) => {
|
||||
const result: Record<string, Relation<string>> = {};
|
||||
const result: Record<string, Relation<string>> = {}
|
||||
|
||||
arrayBlockRelations.forEach((val, key) => {
|
||||
result[key] = many(adapter.tables[val]);
|
||||
});
|
||||
result[key] = many(adapter.tables[val])
|
||||
})
|
||||
|
||||
if (hasLocalizedField) {
|
||||
result._locales = many(localesTable);
|
||||
result._locales = many(localesTable)
|
||||
}
|
||||
|
||||
if (relationships.size && relationshipsTable) {
|
||||
result._relationships = many(relationshipsTable, {
|
||||
relationName: '_relationships',
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
return result;
|
||||
});
|
||||
return result
|
||||
})
|
||||
|
||||
adapter.relations[`relations_${tableName}`] = tableRelations;
|
||||
|
||||
return { arrayBlockRelations };
|
||||
};
|
||||
return { arrayBlockRelations }
|
||||
}
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
/* eslint-disable no-param-reassign */
|
||||
import { uniqueIndex, index } from 'drizzle-orm/pg-core';
|
||||
import { GenericColumn } from '../types';
|
||||
import { index, uniqueIndex } from 'drizzle-orm/pg-core'
|
||||
|
||||
import type { GenericColumn } from '../types'
|
||||
|
||||
type CreateIndexArgs = {
|
||||
name: string
|
||||
columnName: string
|
||||
name: string
|
||||
unique?: boolean
|
||||
}
|
||||
|
||||
export const createIndex = ({ name, columnName, unique }: CreateIndexArgs) => {
|
||||
export const createIndex = ({ columnName, name, unique }: CreateIndexArgs) => {
|
||||
return (table: { [x: string]: GenericColumn }) => {
|
||||
if (unique) return uniqueIndex(`${columnName}_idx`).on(table[name]);
|
||||
return index(`${columnName}_idx`).on(table[name]);
|
||||
};
|
||||
};
|
||||
if (unique) return uniqueIndex(`${columnName}_idx`).on(table[name])
|
||||
return index(`${columnName}_idx`).on(table[name])
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { integer, numeric, varchar } from 'drizzle-orm/pg-core';
|
||||
import { integer, numeric, varchar } from 'drizzle-orm/pg-core'
|
||||
|
||||
export const parentIDColumnMap = {
|
||||
integer,
|
||||
varchar,
|
||||
numeric,
|
||||
};
|
||||
varchar,
|
||||
}
|
||||
|
||||
@@ -26,8 +26,8 @@ type Args = {
|
||||
adapter: PostgresAdapter
|
||||
arrayBlockRelations: Map<string, string>
|
||||
buildRelationships: boolean
|
||||
columns: Record<string, AnyPgColumnBuilder>
|
||||
columnPrefix?: string
|
||||
columns: Record<string, AnyPgColumnBuilder>
|
||||
fieldPrefix?: string
|
||||
fields: Field[]
|
||||
forceLocalized?: boolean
|
||||
@@ -60,22 +60,22 @@ export const traverseFields = ({
|
||||
parentTableName,
|
||||
relationships,
|
||||
}: Args): Result => {
|
||||
let hasLocalizedField = false;
|
||||
let hasLocalizedRelationshipField = false;
|
||||
let hasLocalizedField = false
|
||||
let hasLocalizedRelationshipField = false
|
||||
|
||||
let parentIDColType = 'integer';
|
||||
if (columns.id instanceof PgNumericBuilder) parentIDColType = 'numeric';
|
||||
if (columns.id instanceof PgVarcharBuilder) parentIDColType = 'varchar';
|
||||
let parentIDColType = 'integer'
|
||||
if (columns.id instanceof PgNumericBuilder) parentIDColType = 'numeric'
|
||||
if (columns.id instanceof PgVarcharBuilder) parentIDColType = 'varchar'
|
||||
|
||||
fields.forEach((field) => {
|
||||
if ('name' in field && field.name === 'id') return;
|
||||
let columnName: string;
|
||||
if ('name' in field && field.name === 'id') return
|
||||
let columnName: string
|
||||
|
||||
let targetTable = columns;
|
||||
let targetIndexes = indexes;
|
||||
let targetTable = columns
|
||||
let targetIndexes = indexes
|
||||
|
||||
if (fieldAffectsData(field)) {
|
||||
columnName = `${columnPrefix || ''}${toSnakeCase(field.name)}`;
|
||||
columnName = `${columnPrefix || ''}${toSnakeCase(field.name)}`
|
||||
|
||||
// If field is localized,
|
||||
// add the column to the locale table instead of main table
|
||||
@@ -97,37 +97,37 @@ export const traverseFields = ({
|
||||
case 'textarea': {
|
||||
// TODO: handle hasMany
|
||||
// TODO: handle min / max length
|
||||
targetTable[`${fieldPrefix || ''}${field.name}`] = varchar(columnName);
|
||||
break;
|
||||
targetTable[`${fieldPrefix || ''}${field.name}`] = varchar(columnName)
|
||||
break
|
||||
}
|
||||
|
||||
case 'number': {
|
||||
// TODO: handle hasMany
|
||||
// TODO: handle min / max
|
||||
targetTable[`${fieldPrefix || ''}${field.name}`] = numeric(columnName);
|
||||
break;
|
||||
targetTable[`${fieldPrefix || ''}${field.name}`] = numeric(columnName)
|
||||
break
|
||||
}
|
||||
|
||||
case 'richText':
|
||||
case 'json': {
|
||||
targetTable[`${fieldPrefix || ''}${field.name}`] = jsonb(columnName);
|
||||
break;
|
||||
targetTable[`${fieldPrefix || ''}${field.name}`] = jsonb(columnName)
|
||||
break
|
||||
}
|
||||
|
||||
case 'date': {
|
||||
break;
|
||||
break
|
||||
}
|
||||
|
||||
case 'point': {
|
||||
break;
|
||||
break
|
||||
}
|
||||
|
||||
case 'radio': {
|
||||
break;
|
||||
break
|
||||
}
|
||||
|
||||
case 'select': {
|
||||
break;
|
||||
break
|
||||
}
|
||||
|
||||
case 'array': {
|
||||
@@ -145,7 +145,11 @@ export const traverseFields = ({
|
||||
baseExtraConfig._parentOrder = (cols) => unique().on(cols._parentID, cols._order);
|
||||
}
|
||||
|
||||
const arrayTableName = `${newTableName}_${toSnakeCase(field.name)}`;
|
||||
if (field.localized && adapter.payload.config.localization) {
|
||||
baseColumns._locale = adapter.enums._locales('_locale').notNull()
|
||||
}
|
||||
|
||||
const arrayTableName = `${newTableName}_${toSnakeCase(field.name)}`
|
||||
|
||||
const { arrayBlockRelations: subArrayBlockRelations } = buildTable({
|
||||
adapter,
|
||||
@@ -153,9 +157,9 @@ export const traverseFields = ({
|
||||
baseExtraConfig,
|
||||
fields: field.fields,
|
||||
tableName: arrayTableName,
|
||||
});
|
||||
})
|
||||
|
||||
arrayBlockRelations.set(`${fieldPrefix || ''}${field.name}`, arrayTableName);
|
||||
arrayBlockRelations.set(`${fieldPrefix || ''}${field.name}`, arrayTableName)
|
||||
|
||||
const arrayTableRelations = relations(adapter.tables[arrayTableName], ({ many, one }) => {
|
||||
const result: Record<string, Relation<string>> = {
|
||||
@@ -163,22 +167,22 @@ export const traverseFields = ({
|
||||
fields: [adapter.tables[arrayTableName]._parentID],
|
||||
references: [adapter.tables[parentTableName].id],
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
if (hasLocalesTable(field.fields)) {
|
||||
result._locales = many(adapter.tables[`${arrayTableName}_locales`]);
|
||||
result._locales = many(adapter.tables[`${arrayTableName}_locales`])
|
||||
}
|
||||
|
||||
subArrayBlockRelations.forEach((val, key) => {
|
||||
result[key] = many(adapter.tables[val]);
|
||||
});
|
||||
result[key] = many(adapter.tables[val])
|
||||
})
|
||||
|
||||
return result;
|
||||
});
|
||||
return result
|
||||
})
|
||||
|
||||
adapter.relations[`relations_${arrayTableName}`] = arrayTableRelations;
|
||||
adapter.relations[`relations_${arrayTableName}`] = arrayTableRelations
|
||||
|
||||
break;
|
||||
break
|
||||
}
|
||||
|
||||
case 'blocks': {
|
||||
@@ -206,34 +210,37 @@ export const traverseFields = ({
|
||||
baseExtraConfig,
|
||||
fields: block.fields,
|
||||
tableName: blockTableName,
|
||||
});
|
||||
})
|
||||
|
||||
const blockTableRelations = relations(adapter.tables[blockTableName], ({ many, one }) => {
|
||||
const result: Record<string, Relation<string>> = {
|
||||
_parentID: one(adapter.tables[parentTableName], {
|
||||
fields: [adapter.tables[blockTableName]._parentID],
|
||||
references: [adapter.tables[parentTableName].id],
|
||||
}),
|
||||
};
|
||||
const blockTableRelations = relations(
|
||||
adapter.tables[blockTableName],
|
||||
({ many, one }) => {
|
||||
const result: Record<string, Relation<string>> = {
|
||||
_parentID: one(adapter.tables[parentTableName], {
|
||||
fields: [adapter.tables[blockTableName]._parentID],
|
||||
references: [adapter.tables[parentTableName].id],
|
||||
}),
|
||||
}
|
||||
|
||||
if (hasLocalesTable(block.fields)) {
|
||||
result._locales = many(adapter.tables[`${blockTableName}_locales`]);
|
||||
}
|
||||
if (hasLocalesTable(block.fields)) {
|
||||
result._locales = many(adapter.tables[`${blockTableName}_locales`])
|
||||
}
|
||||
|
||||
subArrayBlockRelations.forEach((val, key) => {
|
||||
result[key] = many(adapter.tables[val]);
|
||||
});
|
||||
subArrayBlockRelations.forEach((val, key) => {
|
||||
result[key] = many(adapter.tables[val])
|
||||
})
|
||||
|
||||
return result;
|
||||
});
|
||||
return result
|
||||
},
|
||||
)
|
||||
|
||||
adapter.relations[`relations_${blockTableName}`] = blockTableRelations;
|
||||
adapter.relations[`relations_${blockTableName}`] = blockTableRelations
|
||||
}
|
||||
|
||||
arrayBlockRelations.set(`_blocks_${block.slug}`, blockTableName);
|
||||
});
|
||||
arrayBlockRelations.set(`_blocks_${block.slug}`, blockTableName)
|
||||
})
|
||||
|
||||
break;
|
||||
break
|
||||
}
|
||||
|
||||
case 'group': {
|
||||
@@ -255,11 +262,11 @@ export const traverseFields = ({
|
||||
newTableName: `${parentTableName}_${toSnakeCase(field.name)}`,
|
||||
parentTableName,
|
||||
relationships,
|
||||
});
|
||||
})
|
||||
|
||||
if (groupHasLocalizedField) hasLocalizedField = true;
|
||||
if (groupHasLocalizedRelationshipField) hasLocalizedRelationshipField = true;
|
||||
break;
|
||||
if (groupHasLocalizedField) hasLocalizedField = true
|
||||
if (groupHasLocalizedRelationshipField) hasLocalizedRelationshipField = true
|
||||
break
|
||||
}
|
||||
|
||||
case 'tabs': {
|
||||
@@ -282,15 +289,12 @@ export const traverseFields = ({
|
||||
newTableName: `${parentTableName}_${toSnakeCase(tab.name)}`,
|
||||
parentTableName,
|
||||
relationships,
|
||||
});
|
||||
})
|
||||
|
||||
if (tabHasLocalizedField) hasLocalizedField = true;
|
||||
if (tabHasLocalizedRelationshipField) hasLocalizedRelationshipField = true;
|
||||
if (tabHasLocalizedField) hasLocalizedField = true
|
||||
if (tabHasLocalizedRelationshipField) hasLocalizedRelationshipField = true
|
||||
} else {
|
||||
({
|
||||
hasLocalizedField,
|
||||
hasLocalizedRelationshipField,
|
||||
} = traverseFields({
|
||||
;({ hasLocalizedField, hasLocalizedRelationshipField } = traverseFields({
|
||||
adapter,
|
||||
arrayBlockRelations,
|
||||
buildRelationships,
|
||||
@@ -302,18 +306,15 @@ export const traverseFields = ({
|
||||
newTableName: parentTableName,
|
||||
parentTableName,
|
||||
relationships,
|
||||
}));
|
||||
}))
|
||||
}
|
||||
});
|
||||
break;
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
case 'row':
|
||||
case 'collapsible': {
|
||||
({
|
||||
hasLocalizedField,
|
||||
hasLocalizedRelationshipField,
|
||||
} = traverseFields({
|
||||
;({ hasLocalizedField, hasLocalizedRelationshipField } = traverseFields({
|
||||
adapter,
|
||||
arrayBlockRelations,
|
||||
buildRelationships,
|
||||
@@ -325,27 +326,27 @@ export const traverseFields = ({
|
||||
newTableName: parentTableName,
|
||||
parentTableName,
|
||||
relationships,
|
||||
}));
|
||||
break;
|
||||
}))
|
||||
break
|
||||
}
|
||||
|
||||
case 'relationship':
|
||||
case 'upload':
|
||||
if (Array.isArray(field.relationTo)) {
|
||||
field.relationTo.forEach((relation) => relationships.add(relation));
|
||||
field.relationTo.forEach((relation) => relationships.add(relation))
|
||||
} else {
|
||||
relationships.add(field.relationTo);
|
||||
relationships.add(field.relationTo)
|
||||
}
|
||||
|
||||
if (field.localized && adapter.payload.config.localization) {
|
||||
hasLocalizedRelationshipField = true;
|
||||
}
|
||||
break;
|
||||
break
|
||||
|
||||
default:
|
||||
break;
|
||||
break
|
||||
}
|
||||
});
|
||||
})
|
||||
|
||||
return { hasLocalizedField, hasLocalizedRelationshipField };
|
||||
};
|
||||
return { hasLocalizedField, hasLocalizedRelationshipField }
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ import { createBlocksMap } from '../../utilities/createBlocksMap';
|
||||
type TransformArgs = {
|
||||
config: SanitizedConfig
|
||||
data: Record<string, unknown>
|
||||
fallbackLocale?: string | false
|
||||
fallbackLocale?: false | string
|
||||
fields: Field[]
|
||||
locale?: string
|
||||
}
|
||||
@@ -21,14 +21,14 @@ export const transform = <T extends TypeWithID>({
|
||||
data,
|
||||
fields,
|
||||
}: TransformArgs): T => {
|
||||
let relationships: Record<string, Record<string, unknown>[]> = {};
|
||||
let relationships: Record<string, Record<string, unknown>[]> = {}
|
||||
|
||||
if ('_relationships' in data) {
|
||||
relationships = createRelationshipMap(data._relationships);
|
||||
delete data._relationships;
|
||||
relationships = createRelationshipMap(data._relationships)
|
||||
delete data._relationships
|
||||
}
|
||||
|
||||
const blocks = createBlocksMap(data);
|
||||
const blocks = createBlocksMap(data)
|
||||
|
||||
const result = traverseFields<T>({
|
||||
blocks,
|
||||
|
||||
@@ -55,7 +55,7 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
siblingData,
|
||||
table,
|
||||
}: TraverseFieldsArgs): T => {
|
||||
const sanitizedPath = path ? `${path}.` : path;
|
||||
const sanitizedPath = path ? `${path}.` : path
|
||||
|
||||
const formatted = fields.reduce((result, field) => {
|
||||
if (fieldAffectsData(field)) {
|
||||
@@ -160,8 +160,8 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
});
|
||||
}
|
||||
|
||||
return {};
|
||||
});
|
||||
return {}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -181,7 +181,7 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
if (!relationsByLocale[row.locale]) relationsByLocale[row.locale] = [];
|
||||
relationsByLocale[row.locale].push(row);
|
||||
}
|
||||
});
|
||||
})
|
||||
|
||||
Object.entries(relationsByLocale).forEach(([locale, relations]) => {
|
||||
transformRelationship({
|
||||
@@ -233,7 +233,7 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
delete table[subFieldKey];
|
||||
}
|
||||
}
|
||||
});
|
||||
})
|
||||
|
||||
if (field.localized) {
|
||||
Object.entries(ref).forEach(([groupLocale, groupLocaleData]) => {
|
||||
@@ -310,11 +310,11 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
result[field.name] = localizedFieldData;
|
||||
}
|
||||
|
||||
return result;
|
||||
return result
|
||||
}
|
||||
|
||||
return siblingData;
|
||||
}, siblingData);
|
||||
return siblingData
|
||||
}, siblingData)
|
||||
|
||||
return formatted as T;
|
||||
};
|
||||
return formatted as T
|
||||
}
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
/* eslint-disable no-param-reassign */
|
||||
import { Field } from 'payload/types';
|
||||
import { traverseFields } from './traverseFields';
|
||||
import { RowToInsert } from './types';
|
||||
import type { Field } from 'payload/types'
|
||||
|
||||
import type { RowToInsert } from './types'
|
||||
|
||||
import { traverseFields } from './traverseFields'
|
||||
|
||||
type Args = {
|
||||
data: Record<string, unknown>
|
||||
@@ -22,9 +24,8 @@ export const transformForWrite = ({
|
||||
row: {},
|
||||
locales: {},
|
||||
relationships: [],
|
||||
blocks: {},
|
||||
arrays: {},
|
||||
};
|
||||
row: {},
|
||||
}
|
||||
|
||||
// This function is responsible for building up the
|
||||
// above rowToInsert
|
||||
@@ -40,7 +41,7 @@ export const transformForWrite = ({
|
||||
path,
|
||||
relationships: rowToInsert.relationships,
|
||||
row: rowToInsert.row,
|
||||
});
|
||||
})
|
||||
|
||||
return rowToInsert;
|
||||
};
|
||||
return rowToInsert
|
||||
}
|
||||
|
||||
@@ -159,7 +159,7 @@ export const traverseFields = ({
|
||||
path: `${path || ''}${field.name}.`,
|
||||
relationships,
|
||||
row,
|
||||
});
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -320,5 +320,5 @@ export const traverseFields = ({
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
||||
})
|
||||
}
|
||||
|
||||
@@ -7,6 +7,9 @@ export type ArrayRowToInsert = {
|
||||
arrays: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
columnName: string
|
||||
locale: Record<string, unknown>
|
||||
row: Record<string, unknown>
|
||||
}
|
||||
|
||||
export type BlockRowToInsert = {
|
||||
@@ -17,6 +20,8 @@ export type BlockRowToInsert = {
|
||||
arrays: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
locale: Record<string, unknown>
|
||||
row: Record<string, unknown>
|
||||
}
|
||||
|
||||
export type RowToInsert = {
|
||||
@@ -31,4 +36,10 @@ export type RowToInsert = {
|
||||
arrays: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
blocks: {
|
||||
[blockType: string]: BlockRowToInsert[]
|
||||
}
|
||||
locale: Record<string, unknown>
|
||||
relationships: Record<string, unknown>[]
|
||||
row: Record<string, unknown>
|
||||
}
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
import { ColumnBaseConfig, ColumnDataType, Relation, Relations } from 'drizzle-orm';
|
||||
import { NodePgDatabase } from 'drizzle-orm/node-postgres';
|
||||
import { PgColumn, PgEnum, PgTableWithColumns } from 'drizzle-orm/pg-core';
|
||||
import { Payload } from 'payload';
|
||||
import { DatabaseAdapter } from 'payload/dist/database/types';
|
||||
import { ClientConfig, PoolConfig } from 'pg';
|
||||
import type { ColumnBaseConfig, ColumnDataType, Relation, Relations } from 'drizzle-orm'
|
||||
import type { NodePgDatabase } from 'drizzle-orm/node-postgres'
|
||||
import type { PgColumn, PgEnum, PgTableWithColumns } from 'drizzle-orm/pg-core'
|
||||
import type { Payload } from 'payload'
|
||||
import type { DatabaseAdapter } from 'payload/database'
|
||||
import type { ClientConfig, PoolConfig } from 'pg'
|
||||
|
||||
export type DrizzleDB = NodePgDatabase<Record<string, never>>
|
||||
|
||||
type BaseArgs = {
|
||||
migrationDir?: string;
|
||||
migrationName?: string;
|
||||
migrationDir?: string
|
||||
migrationName?: string
|
||||
}
|
||||
|
||||
type ClientArgs = {
|
||||
/** Client connection options for the Node package `pg` */
|
||||
client?: ClientConfig | string | false
|
||||
client?: ClientConfig | false | string
|
||||
} & BaseArgs
|
||||
|
||||
type PoolArgs = {
|
||||
@@ -24,26 +24,33 @@ type PoolArgs = {
|
||||
|
||||
export type Args = ClientArgs | PoolArgs
|
||||
|
||||
export type GenericColumn = PgColumn<ColumnBaseConfig<ColumnDataType, string>, Record<string, unknown>>
|
||||
export type GenericColumn = PgColumn<
|
||||
ColumnBaseConfig<ColumnDataType, string>,
|
||||
Record<string, unknown>
|
||||
>
|
||||
|
||||
export type GenericColumns = {
|
||||
[x: string]: GenericColumn
|
||||
}
|
||||
|
||||
export type GenericTable = PgTableWithColumns<{
|
||||
name: string, schema: undefined, columns: GenericColumns, dialect: string
|
||||
columns: GenericColumns
|
||||
dialect: string
|
||||
name: string
|
||||
schema: undefined
|
||||
}>
|
||||
|
||||
export type GenericEnum = PgEnum<[string, ...string[]]>
|
||||
|
||||
export type GenericRelation = Relations<string, Record<string, Relation<string>>>
|
||||
|
||||
export type PostgresAdapter = DatabaseAdapter & Args & {
|
||||
db: DrizzleDB
|
||||
enums: Record<string, GenericEnum>
|
||||
relations: Record<string, GenericRelation>
|
||||
tables: Record<string, GenericTable>
|
||||
schema: Record<string, GenericEnum | GenericTable | GenericRelation>
|
||||
}
|
||||
export type PostgresAdapter = DatabaseAdapter &
|
||||
Args & {
|
||||
db: DrizzleDB
|
||||
enums: Record<string, GenericEnum>
|
||||
relations: Record<string, GenericRelation>
|
||||
schema: Record<string, GenericEnum | GenericRelation | GenericTable>
|
||||
tables: Record<string, GenericTable>
|
||||
}
|
||||
|
||||
export type PostgresAdapterResult = (args: { payload: Payload }) => PostgresAdapter
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import { UpdateOne } from 'payload/dist/database/types';
|
||||
import toSnakeCase from 'to-snake-case';
|
||||
import { SQL } from 'drizzle-orm';
|
||||
import buildQuery from '../queries/buildQuery';
|
||||
import { upsertRow } from '../upsertRow';
|
||||
import type { SQL } from 'drizzle-orm'
|
||||
import type { UpdateOne } from 'payload/database'
|
||||
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import buildQuery from '../queries/buildQuery'
|
||||
import { upsertRow } from '../upsertRow'
|
||||
|
||||
export const updateOne: UpdateOne = async function updateOne({
|
||||
collection: collectionSlug,
|
||||
@@ -13,9 +15,9 @@ export const updateOne: UpdateOne = async function updateOne({
|
||||
req,
|
||||
where,
|
||||
}) {
|
||||
const collection = this.payload.collections[collectionSlug].config;
|
||||
const collection = this.payload.collections[collectionSlug].config
|
||||
|
||||
let query: SQL<unknown>;
|
||||
let query: SQL<unknown>
|
||||
|
||||
if (where) {
|
||||
query = await buildQuery({
|
||||
@@ -23,7 +25,7 @@ export const updateOne: UpdateOne = async function updateOne({
|
||||
collectionSlug,
|
||||
locale,
|
||||
where,
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
const result = await upsertRow({
|
||||
@@ -35,7 +37,7 @@ export const updateOne: UpdateOne = async function updateOne({
|
||||
operation: 'update',
|
||||
tableName: toSnakeCase(collectionSlug),
|
||||
where: query,
|
||||
});
|
||||
})
|
||||
|
||||
return result;
|
||||
};
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -27,29 +27,33 @@ export const upsertRow = async ({
|
||||
fields,
|
||||
path,
|
||||
tableName,
|
||||
});
|
||||
})
|
||||
|
||||
// First, we insert the main row
|
||||
let insertedRow: Record<string, unknown>;
|
||||
let insertedRow: Record<string, unknown>
|
||||
|
||||
if (operation === 'update') {
|
||||
const target = upsertTarget || adapter.tables[tableName].id;
|
||||
const target = upsertTarget || adapter.tables[tableName].id
|
||||
|
||||
if (id) {
|
||||
rowToInsert.row.id = id;
|
||||
[insertedRow] = await adapter.db.insert(adapter.tables[tableName])
|
||||
rowToInsert.row.id = id
|
||||
;[insertedRow] = await adapter.db
|
||||
.insert(adapter.tables[tableName])
|
||||
.values(rowToInsert.row)
|
||||
.onConflictDoUpdate({ target, set: rowToInsert.row })
|
||||
.returning();
|
||||
.onConflictDoUpdate({ set: rowToInsert.row, target })
|
||||
.returning()
|
||||
} else {
|
||||
[insertedRow] = await adapter.db.insert(adapter.tables[tableName])
|
||||
;[insertedRow] = await adapter.db
|
||||
.insert(adapter.tables[tableName])
|
||||
.values(rowToInsert.row)
|
||||
.onConflictDoUpdate({ target, set: rowToInsert.row, where })
|
||||
.returning();
|
||||
.onConflictDoUpdate({ set: rowToInsert.row, target, where })
|
||||
.returning()
|
||||
}
|
||||
} else {
|
||||
[insertedRow] = await adapter.db.insert(adapter.tables[tableName])
|
||||
.values(rowToInsert.row).returning();
|
||||
;[insertedRow] = await adapter.db
|
||||
.insert(adapter.tables[tableName])
|
||||
.values(rowToInsert.row)
|
||||
.returning()
|
||||
}
|
||||
|
||||
const localesToInsert: Record<string, unknown>[] = [];
|
||||
@@ -58,7 +62,7 @@ export const upsertRow = async ({
|
||||
|
||||
// Maintain a list of promises to run locale, blocks, and relationships
|
||||
// all in parallel
|
||||
const promises = [];
|
||||
const promises = []
|
||||
|
||||
// If there are locale rows with data, add the parent and locale to each
|
||||
if (Object.keys(rowToInsert.locales).length > 0) {
|
||||
@@ -72,20 +76,20 @@ export const upsertRow = async ({
|
||||
// If there are relationships, add parent to each
|
||||
if (rowToInsert.relationships.length > 0) {
|
||||
rowToInsert.relationships.forEach((relation) => {
|
||||
relation.parent = insertedRow.id;
|
||||
relationsToInsert.push(relation);
|
||||
});
|
||||
relation.parent = insertedRow.id
|
||||
relationsToInsert.push(relation)
|
||||
})
|
||||
}
|
||||
|
||||
// If there are blocks, add parent to each, and then
|
||||
// store by table name and rows
|
||||
Object.keys(rowToInsert.blocks).forEach((blockName) => {
|
||||
rowToInsert.blocks[blockName].forEach((blockRow) => {
|
||||
blockRow.row._parentID = insertedRow.id;
|
||||
if (!blocksToInsert[blockName]) blocksToInsert[blockName] = [];
|
||||
blocksToInsert[blockName].push(blockRow);
|
||||
});
|
||||
});
|
||||
blockRow.row._parentID = insertedRow.id
|
||||
if (!blocksToInsert[blockName]) blocksToInsert[blockName] = []
|
||||
blocksToInsert[blockName].push(blockRow)
|
||||
})
|
||||
})
|
||||
|
||||
// //////////////////////////////////
|
||||
// INSERT LOCALES
|
||||
@@ -131,7 +135,7 @@ export const upsertRow = async ({
|
||||
// INSERT BLOCKS
|
||||
// //////////////////////////////////
|
||||
|
||||
const insertedBlockRows: Record<string, Record<string, unknown>[]> = {};
|
||||
const insertedBlockRows: Record<string, Record<string, unknown>[]> = {}
|
||||
|
||||
Object.entries(blocksToInsert).forEach(([blockName, blockRows]) => {
|
||||
// For each block, push insert into promises to run parallel
|
||||
@@ -153,7 +157,7 @@ export const upsertRow = async ({
|
||||
blockRows[i].row = row;
|
||||
});
|
||||
|
||||
const blockLocaleIndexMap: number[] = [];
|
||||
const blockLocaleIndexMap: number[] = []
|
||||
|
||||
const blockLocaleRowsToInsert = blockRows.reduce((acc, blockRow, i) => {
|
||||
if (Object.entries(blockRow.locales).length > 0) {
|
||||
@@ -167,8 +171,8 @@ export const upsertRow = async ({
|
||||
});
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, []);
|
||||
return acc
|
||||
}, [])
|
||||
|
||||
if (blockLocaleRowsToInsert.length > 0) {
|
||||
await adapter.db.insert(adapter.tables[`${tableName}_${blockName}_locales`])
|
||||
@@ -179,9 +183,9 @@ export const upsertRow = async ({
|
||||
adapter,
|
||||
arrays: blockRows.map(({ arrays }) => arrays),
|
||||
parentRows: insertedBlockRows[blockName],
|
||||
});
|
||||
});
|
||||
});
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// //////////////////////////////////
|
||||
// INSERT ARRAYS RECURSIVELY
|
||||
@@ -202,10 +206,10 @@ export const upsertRow = async ({
|
||||
adapter,
|
||||
arrays: [rowToInsert.arrays],
|
||||
parentRows: [insertedRow],
|
||||
});
|
||||
});
|
||||
})
|
||||
})
|
||||
|
||||
await Promise.all(promises.map((promise) => promise()));
|
||||
await Promise.all(promises.map((promise) => promise()))
|
||||
|
||||
// //////////////////////////////////
|
||||
// RETRIEVE NEWLY UPDATED ROW
|
||||
@@ -232,5 +236,5 @@ export const upsertRow = async ({
|
||||
fields,
|
||||
});
|
||||
|
||||
return result;
|
||||
};
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@ export const insertArrays = async ({
|
||||
parentRows,
|
||||
}: Args): Promise<void> => {
|
||||
// Maintain a map of flattened rows by table
|
||||
const rowsByTable: RowsByTable = {};
|
||||
const rowsByTable: RowsByTable = {}
|
||||
|
||||
arrays.forEach((arraysByTable, parentRowIndex) => {
|
||||
Object.entries(arraysByTable).forEach(([tableName, arrayRows]) => {
|
||||
@@ -39,13 +39,13 @@ export const insertArrays = async ({
|
||||
};
|
||||
}
|
||||
|
||||
const parentID = parentRows[parentRowIndex].id;
|
||||
const parentID = parentRows[parentRowIndex].id
|
||||
|
||||
// Add any sub arrays that need to be created
|
||||
// We will call this recursively below
|
||||
arrayRows.forEach((arrayRow) => {
|
||||
if (Object.keys(arrayRow.arrays).length > 0) {
|
||||
rowsByTable[tableName].arrays.push(arrayRow.arrays);
|
||||
rowsByTable[tableName].arrays.push(arrayRow.arrays)
|
||||
}
|
||||
|
||||
// Set up parent IDs for both row and locale row
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Field } from 'payload/types';
|
||||
import { SQL } from 'drizzle-orm';
|
||||
import { GenericColumn, PostgresAdapter } from '../types';
|
||||
import type { SQL } from 'drizzle-orm'
|
||||
import type { Field } from 'payload/types'
|
||||
|
||||
import type { GenericColumn, PostgresAdapter } from '../types'
|
||||
|
||||
type BaseArgs = {
|
||||
adapter: PostgresAdapter
|
||||
@@ -11,17 +12,17 @@ type BaseArgs = {
|
||||
}
|
||||
|
||||
type CreateArgs = BaseArgs & {
|
||||
upsertTarget?: never
|
||||
where?: never
|
||||
id?: never
|
||||
operation: 'create'
|
||||
upsertTarget?: never
|
||||
where?: never
|
||||
}
|
||||
|
||||
type UpdateArgs = BaseArgs & {
|
||||
upsertTarget?: GenericColumn
|
||||
id?: number | string
|
||||
operation: 'update'
|
||||
upsertTarget?: GenericColumn
|
||||
where?: SQL<unknown>
|
||||
id?: string | number
|
||||
}
|
||||
|
||||
export type Args = CreateArgs | UpdateArgs
|
||||
|
||||
@@ -4,38 +4,38 @@ export type BlocksMap = {
|
||||
}
|
||||
|
||||
export const createBlocksMap = (data: Record<string, unknown>): BlocksMap => {
|
||||
const blocksMap: BlocksMap = {};
|
||||
const blocksMap: BlocksMap = {}
|
||||
|
||||
Object.entries(data).forEach(([key, rows]) => {
|
||||
if (key.startsWith('_blocks_') && Array.isArray(rows)) {
|
||||
const blockType = key.replace('_blocks_', '');
|
||||
const blockType = key.replace('_blocks_', '')
|
||||
|
||||
rows.forEach((row) => {
|
||||
if ('_path' in row) {
|
||||
if (!(row._path in blocksMap)) blocksMap[row._path] = [];
|
||||
if (!(row._path in blocksMap)) blocksMap[row._path] = []
|
||||
|
||||
row.blockType = blockType;
|
||||
blocksMap[row._path].push(row);
|
||||
row.blockType = blockType
|
||||
blocksMap[row._path].push(row)
|
||||
|
||||
delete row._path;
|
||||
}
|
||||
});
|
||||
})
|
||||
|
||||
delete data[key];
|
||||
delete data[key]
|
||||
}
|
||||
});
|
||||
})
|
||||
|
||||
Object.entries(blocksMap).reduce((sortedBlocksMap, [path, blocks]) => {
|
||||
sortedBlocksMap[path] = blocks.sort((a, b) => {
|
||||
if (typeof a._order === 'number' && typeof b._order === 'number') {
|
||||
return a._order - b._order;
|
||||
return a._order - b._order
|
||||
}
|
||||
|
||||
return 0;
|
||||
});
|
||||
return 0
|
||||
})
|
||||
|
||||
return sortedBlocksMap;
|
||||
}, {});
|
||||
return sortedBlocksMap
|
||||
}, {})
|
||||
|
||||
return blocksMap;
|
||||
};
|
||||
return blocksMap
|
||||
}
|
||||
|
||||
@@ -1,22 +1,24 @@
|
||||
// Flatten relationships to object with path keys
|
||||
// for easier retrieval
|
||||
export const createRelationshipMap = (rawRelationships: unknown): Record<string, Record<string, unknown>[]> => {
|
||||
let relationships = {};
|
||||
export const createRelationshipMap = (
|
||||
rawRelationships: unknown,
|
||||
): Record<string, Record<string, unknown>[]> => {
|
||||
let relationships = {}
|
||||
|
||||
if (Array.isArray(rawRelationships)) {
|
||||
relationships = rawRelationships.reduce((res, relation) => {
|
||||
const formattedRelation = {
|
||||
...relation,
|
||||
};
|
||||
}
|
||||
|
||||
delete formattedRelation.path;
|
||||
delete formattedRelation.path
|
||||
|
||||
if (!res[relation.path]) res[relation.path] = [];
|
||||
res[relation.path].push(formattedRelation);
|
||||
if (!res[relation.path]) res[relation.path] = []
|
||||
res[relation.path].push(formattedRelation)
|
||||
|
||||
return res;
|
||||
}, {});
|
||||
return res
|
||||
}, {})
|
||||
}
|
||||
|
||||
return relationships;
|
||||
};
|
||||
return relationships
|
||||
}
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import { fieldAffectsData, fieldHasSubFields } from 'payload/dist/fields/config/types';
|
||||
import { Field } from 'payload/types';
|
||||
import type { Field } from 'payload/types'
|
||||
|
||||
import { fieldAffectsData, fieldHasSubFields } from 'payload/types'
|
||||
|
||||
export const hasLocalesTable = (fields: Field[]): boolean => {
|
||||
return fields.some((field) => {
|
||||
if (fieldAffectsData(field) && field.localized) return true;
|
||||
if (fieldHasSubFields(field) && field.type !== 'array') return hasLocalesTable(field.fields);
|
||||
if (field.type === 'tabs') return field.tabs.some((tab) => hasLocalesTable(tab.fields));
|
||||
return false;
|
||||
});
|
||||
};
|
||||
if (fieldAffectsData(field) && field.localized) return true
|
||||
if (fieldHasSubFields(field) && field.type !== 'array') return hasLocalesTable(field.fields)
|
||||
if (field.type === 'tabs') return field.tabs.some((tab) => hasLocalesTable(tab.fields))
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
export function isArrayOfRows(data: unknown): data is Record<string, unknown>[] {
|
||||
return Array.isArray(data);
|
||||
return Array.isArray(data)
|
||||
}
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
import path from 'path';
|
||||
import type { Webpack } from 'payload/dist/database/types';
|
||||
import type { Webpack } from 'payload/database'
|
||||
|
||||
import path from 'path'
|
||||
|
||||
export const webpack: Webpack = (config) => {
|
||||
return {
|
||||
...config,
|
||||
resolve: {
|
||||
...config.resolve || {},
|
||||
...(config.resolve || {}),
|
||||
alias: {
|
||||
...config.resolve?.alias || {},
|
||||
...(config.resolve?.alias || {}),
|
||||
[path.resolve(__dirname, './index')]: path.resolve(__dirname, 'mock'),
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,24 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"declaration": true, /* Generates corresponding '.d.ts' file. */
|
||||
"module": "commonjs", /* Specify what module code is generated. */
|
||||
"rootDir": "./src", /* Specify the root folder within your source files. */
|
||||
"outDir": "./dist", /* Specify an output folder for all emitted files. */
|
||||
"esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */
|
||||
"forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */
|
||||
"skipLibCheck": true /* Skip type checking all .d.ts files. */
|
||||
}
|
||||
"composite": true, // Make sure typescript knows that this module depends on their references
|
||||
"noEmit": false /* Do not emit outputs. */,
|
||||
"emitDeclarationOnly": true,
|
||||
"outDir": "./dist" /* Specify an output folder for all emitted files. */,
|
||||
"rootDir": "./src" /* Specify the root folder within your source files. */
|
||||
},
|
||||
"exclude": [
|
||||
"dist",
|
||||
"build",
|
||||
"tests",
|
||||
"test",
|
||||
"node_modules",
|
||||
".eslintrc.js",
|
||||
"src/**/*.spec.js",
|
||||
"src/**/*.spec.jsx",
|
||||
"src/**/*.spec.ts",
|
||||
"src/**/*.spec.tsx"
|
||||
],
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "src/**/*.d.ts", "src/**/*.json"],
|
||||
"references": [{ "path": "../payload" }] // db-postgres depends on payload
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,8 @@
|
||||
module.exports = {
|
||||
env: {
|
||||
jest: true,
|
||||
},
|
||||
plugins: ['jest', 'jest-dom'],
|
||||
extends: ['./rules/jest.cjs', './rules/jest-dom.cjs'].map(require.resolve),
|
||||
rules: {},
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
module.exports = {
|
||||
rules: {
|
||||
'jest-dom/prefer-checked': 'error',
|
||||
'jest-dom/prefer-enabled-disabled': 'error',
|
||||
'jest-dom/prefer-focus': 'error',
|
||||
'jest-dom/prefer-required': 'error',
|
||||
'jest-dom/prefer-to-have-attribute': 'error',
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
module.exports = {
|
||||
rules: {
|
||||
'jest/consistent-test-it': ['error', { fn: 'it' }],
|
||||
'jest/expect-expect': 'error',
|
||||
'jest/prefer-lowercase-title': ['error', { ignore: ['describe'] }],
|
||||
'jest/no-alias-methods': 'error',
|
||||
'jest/no-commented-out-tests': 'off',
|
||||
'jest/no-disabled-tests': 'off',
|
||||
'jest/no-duplicate-hooks': 'error',
|
||||
'jest/no-export': 'error',
|
||||
'jest/no-focused-tests': 'error',
|
||||
'jest/no-hooks': 'off',
|
||||
'jest/no-identical-title': 'error',
|
||||
'jest/no-if': 'error',
|
||||
'jest/no-jasmine-globals': 'error',
|
||||
'jest/no-large-snapshots': 'error',
|
||||
'jest/no-mocks-import': 'error',
|
||||
'jest/no-standalone-expect': 'error',
|
||||
'jest/no-done-callback': 'error',
|
||||
'jest/no-test-prefixes': 'error',
|
||||
'jest/no-test-return-statement': 'error',
|
||||
'jest/prefer-called-with': 'error',
|
||||
'jest/prefer-expect-assertions': 'off',
|
||||
'jest/prefer-hooks-on-top': 'error',
|
||||
'jest/prefer-spy-on': 'error',
|
||||
'jest/prefer-strict-equal': 'error',
|
||||
'jest/prefer-to-contain': 'error',
|
||||
'jest/prefer-to-have-length': 'error',
|
||||
'jest/prefer-todo': 'error',
|
||||
'jest/require-top-level-describe': 'error',
|
||||
'jest/require-to-throw-message': 'error',
|
||||
'jest/valid-describe-callback': 'error',
|
||||
'jest/valid-expect-in-promise': 'error',
|
||||
'jest/valid-expect': 'error',
|
||||
'jest/valid-title': 'error',
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
module.exports = {
|
||||
env: {
|
||||
browser: true,
|
||||
},
|
||||
plugins: ['jsx-a11y', 'react-hooks', 'react'],
|
||||
settings: {
|
||||
react: {
|
||||
version: 'detect',
|
||||
},
|
||||
},
|
||||
parserOptions: {
|
||||
ecmaFeatures: {
|
||||
jsx: true,
|
||||
},
|
||||
},
|
||||
extends: ['./rules/react-a11y.cjs', './rules/react.cjs'].map(require.resolve),
|
||||
rules: {},
|
||||
}
|
||||
@@ -0,0 +1,245 @@
|
||||
// Sourced from https://github.com/airbnb/javascript/blob/master/packages/eslint-config-airbnb/rules/react-a11y.js
|
||||
|
||||
module.exports = {
|
||||
rules: {
|
||||
// Enforce that anchors have content
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/anchor-has-content.md
|
||||
'jsx-a11y/anchor-has-content': ['error', { components: [] }],
|
||||
|
||||
// Require ARIA roles to be valid and non-abstract
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/aria-role.md
|
||||
'jsx-a11y/aria-role': ['error', { ignoreNonDom: false }],
|
||||
|
||||
// Enforce all aria-* props are valid.
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/aria-props.md
|
||||
'jsx-a11y/aria-props': 'error',
|
||||
|
||||
// Enforce ARIA state and property values are valid.
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/aria-proptypes.md
|
||||
'jsx-a11y/aria-proptypes': 'error',
|
||||
|
||||
// Enforce that elements that do not support ARIA roles, states, and
|
||||
// properties do not have those attributes.
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/aria-unsupported-elements.md
|
||||
'jsx-a11y/aria-unsupported-elements': 'error',
|
||||
|
||||
// Enforce that all elements that require alternative text have meaningful information
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/alt-text.md
|
||||
'jsx-a11y/alt-text': [
|
||||
'error',
|
||||
{
|
||||
elements: ['img', 'object', 'area', 'input[type="image"]'],
|
||||
img: [],
|
||||
object: [],
|
||||
area: [],
|
||||
'input[type="image"]': [],
|
||||
},
|
||||
],
|
||||
|
||||
// Prevent img alt text from containing redundant words like "image", "picture", or "photo"
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/img-redundant-alt.md
|
||||
'jsx-a11y/img-redundant-alt': 'error',
|
||||
|
||||
// require that JSX labels use "htmlFor"
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/label-has-for.md
|
||||
// deprecated: replaced by `label-has-associated-control` rule
|
||||
'jsx-a11y/label-has-for': [
|
||||
'off',
|
||||
{
|
||||
components: [],
|
||||
required: {
|
||||
every: ['nesting', 'id'],
|
||||
},
|
||||
allowChildren: false,
|
||||
},
|
||||
],
|
||||
|
||||
// Enforce that a label tag has a text label and an associated control.
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/b800f40a2a69ad48015ae9226fbe879f946757ed/docs/rules/label-has-associated-control.md
|
||||
'jsx-a11y/label-has-associated-control': [
|
||||
'error',
|
||||
{
|
||||
labelComponents: [],
|
||||
labelAttributes: [],
|
||||
controlComponents: [],
|
||||
assert: 'both',
|
||||
depth: 25,
|
||||
},
|
||||
],
|
||||
|
||||
// Enforce that a control (an interactive element) has a text label.
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/control-has-associated-label.md
|
||||
'jsx-a11y/control-has-associated-label': [
|
||||
'error',
|
||||
{
|
||||
labelAttributes: ['label'],
|
||||
controlComponents: [],
|
||||
ignoreElements: ['audio', 'canvas', 'embed', 'input', 'textarea', 'tr', 'video'],
|
||||
ignoreRoles: [
|
||||
'grid',
|
||||
'listbox',
|
||||
'menu',
|
||||
'menubar',
|
||||
'radiogroup',
|
||||
'row',
|
||||
'tablist',
|
||||
'toolbar',
|
||||
'tree',
|
||||
'treegrid',
|
||||
],
|
||||
depth: 5,
|
||||
},
|
||||
],
|
||||
|
||||
// require that mouseover/out come with focus/blur, for keyboard-only users
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/mouse-events-have-key-events.md
|
||||
'jsx-a11y/mouse-events-have-key-events': 'error',
|
||||
|
||||
// Prevent use of `accessKey`
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/no-access-key.md
|
||||
'jsx-a11y/no-access-key': 'error',
|
||||
|
||||
// require onBlur instead of onChange
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/no-onchange.md
|
||||
'jsx-a11y/no-onchange': 'off',
|
||||
|
||||
// Elements with an interactive role and interaction handlers must be focusable
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/interactive-supports-focus.md
|
||||
'jsx-a11y/interactive-supports-focus': 'error',
|
||||
|
||||
// Enforce that elements with ARIA roles must have all required attributes
|
||||
// for that role.
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/role-has-required-aria-props.md
|
||||
'jsx-a11y/role-has-required-aria-props': 'error',
|
||||
|
||||
// Enforce that elements with explicit or implicit roles defined contain
|
||||
// only aria-* properties supported by that role.
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/role-supports-aria-props.md
|
||||
'jsx-a11y/role-supports-aria-props': 'error',
|
||||
|
||||
// Enforce tabIndex value is not greater than zero.
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/tabindex-no-positive.md
|
||||
'jsx-a11y/tabindex-no-positive': 'error',
|
||||
|
||||
// ensure <hX> tags have content and are not aria-hidden
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/heading-has-content.md
|
||||
'jsx-a11y/heading-has-content': ['error', { components: [''] }],
|
||||
|
||||
// require HTML elements to have a "lang" prop
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/html-has-lang.md
|
||||
'jsx-a11y/html-has-lang': 'error',
|
||||
|
||||
// require HTML element's lang prop to be valid
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/lang.md
|
||||
'jsx-a11y/lang': 'error',
|
||||
|
||||
// prevent distracting elements, like <marquee> and <blink>
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/no-distracting-elements.md
|
||||
'jsx-a11y/no-distracting-elements': [
|
||||
'error',
|
||||
{
|
||||
elements: ['marquee', 'blink'],
|
||||
},
|
||||
],
|
||||
|
||||
// only allow <th> to have the "scope" attr
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/scope.md
|
||||
'jsx-a11y/scope': 'error',
|
||||
|
||||
// require onClick be accompanied by onKeyUp/onKeyDown/onKeyPress
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/click-events-have-key-events.md
|
||||
'jsx-a11y/click-events-have-key-events': 'error',
|
||||
|
||||
// Enforce that DOM elements without semantic behavior not have interaction handlers
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/no-static-element-interactions.md
|
||||
'jsx-a11y/no-static-element-interactions': [
|
||||
'error',
|
||||
{
|
||||
handlers: ['onClick', 'onMouseDown', 'onMouseUp', 'onKeyPress', 'onKeyDown', 'onKeyUp'],
|
||||
},
|
||||
],
|
||||
|
||||
// A non-interactive element does not support event handlers (mouse and key handlers)
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/no-noninteractive-element-interactions.md
|
||||
'jsx-a11y/no-noninteractive-element-interactions': [
|
||||
'error',
|
||||
{
|
||||
handlers: ['onClick', 'onMouseDown', 'onMouseUp', 'onKeyPress', 'onKeyDown', 'onKeyUp'],
|
||||
},
|
||||
],
|
||||
|
||||
// ensure emoji are accessible
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/accessible-emoji.md
|
||||
'jsx-a11y/accessible-emoji': 'error',
|
||||
|
||||
// elements with aria-activedescendant must be tabbable
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/aria-activedescendant-has-tabindex.md
|
||||
'jsx-a11y/aria-activedescendant-has-tabindex': 'error',
|
||||
|
||||
// ensure iframe elements have a unique title
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/iframe-has-title.md
|
||||
'jsx-a11y/iframe-has-title': 'error',
|
||||
|
||||
// prohibit autoFocus prop
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/no-autofocus.md
|
||||
'jsx-a11y/no-autofocus': ['error', { ignoreNonDOM: true }],
|
||||
|
||||
// ensure HTML elements do not specify redundant ARIA roles
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/no-redundant-roles.md
|
||||
'jsx-a11y/no-redundant-roles': 'error',
|
||||
|
||||
// media elements must have captions
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/media-has-caption.md
|
||||
'jsx-a11y/media-has-caption': [
|
||||
'error',
|
||||
{
|
||||
audio: [],
|
||||
video: [],
|
||||
track: [],
|
||||
},
|
||||
],
|
||||
|
||||
// WAI-ARIA roles should not be used to convert an interactive element to non-interactive
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/no-interactive-element-to-noninteractive-role.md
|
||||
'jsx-a11y/no-interactive-element-to-noninteractive-role': [
|
||||
'error',
|
||||
{
|
||||
tr: ['none', 'presentation'],
|
||||
},
|
||||
],
|
||||
|
||||
// WAI-ARIA roles should not be used to convert a non-interactive element to interactive
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/no-noninteractive-element-to-interactive-role.md
|
||||
'jsx-a11y/no-noninteractive-element-to-interactive-role': [
|
||||
'error',
|
||||
{
|
||||
ul: ['listbox', 'menu', 'menubar', 'radiogroup', 'tablist', 'tree', 'treegrid'],
|
||||
ol: ['listbox', 'menu', 'menubar', 'radiogroup', 'tablist', 'tree', 'treegrid'],
|
||||
li: ['menuitem', 'option', 'row', 'tab', 'treeitem'],
|
||||
table: ['grid'],
|
||||
td: ['gridcell'],
|
||||
},
|
||||
],
|
||||
|
||||
// Tab key navigation should be limited to elements on the page that can be interacted with.
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/no-noninteractive-tabindex.md
|
||||
'jsx-a11y/no-noninteractive-tabindex': [
|
||||
'error',
|
||||
{
|
||||
tags: [],
|
||||
roles: ['tabpanel'],
|
||||
},
|
||||
],
|
||||
|
||||
// ensure <a> tags are valid
|
||||
// https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/0745af376cdc8686d85a361ce36952b1fb1ccf6e/docs/rules/anchor-is-valid.md
|
||||
'jsx-a11y/anchor-is-valid': [
|
||||
'error',
|
||||
{
|
||||
components: ['Link'],
|
||||
specialLink: ['to'],
|
||||
aspects: ['noHref', 'invalidHref', 'preferButton'],
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,546 @@
|
||||
module.exports = {
|
||||
rules: {
|
||||
// View link below for react rules documentation
|
||||
// https://github.com/yannickcr/eslint-plugin-react#list-of-supported-rules
|
||||
|
||||
// Specify whether double or single quotes should be used in JSX attributes
|
||||
// https://eslint.org/docs/rules/jsx-quotes
|
||||
'jsx-quotes': ['error', 'prefer-double'],
|
||||
|
||||
'class-methods-use-this': [
|
||||
'error',
|
||||
{
|
||||
exceptMethods: [
|
||||
'render',
|
||||
'getInitialState',
|
||||
'getDefaultProps',
|
||||
'getChildContext',
|
||||
'componentWillMount',
|
||||
'UNSAFE_componentWillMount',
|
||||
'componentDidMount',
|
||||
'componentWillReceiveProps',
|
||||
'UNSAFE_componentWillReceiveProps',
|
||||
'shouldComponentUpdate',
|
||||
'componentWillUpdate',
|
||||
'UNSAFE_componentWillUpdate',
|
||||
'componentDidUpdate',
|
||||
'componentWillUnmount',
|
||||
'componentDidCatch',
|
||||
'getSnapshotBeforeUpdate',
|
||||
],
|
||||
},
|
||||
],
|
||||
|
||||
// Prevent missing displayName in a React component definition
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/display-name.md
|
||||
'react/display-name': ['off', { ignoreTranspilerName: false }],
|
||||
|
||||
// Forbid certain propTypes (any, array, object)
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/843d71a432baf0f01f598d7cf1eea75ad6896e4b/docs/rules/forbid-prop-types.md
|
||||
'react/forbid-prop-types': [
|
||||
'error',
|
||||
{
|
||||
forbid: ['any', 'array', 'object'],
|
||||
checkContextTypes: true,
|
||||
checkChildContextTypes: true,
|
||||
},
|
||||
],
|
||||
|
||||
// Forbid certain props on DOM Nodes
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/843d71a432baf0f01f598d7cf1eea75ad6896e4b/docs/rules/forbid-dom-props.md
|
||||
'react/forbid-dom-props': ['off', { forbid: [] }],
|
||||
|
||||
// Enforce boolean attributes notation in JSX
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-boolean-value.md
|
||||
'react/jsx-boolean-value': ['error', 'never', { always: [] }],
|
||||
|
||||
// Validate closing bracket location in JSX
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-closing-bracket-location.md
|
||||
'react/jsx-closing-bracket-location': ['error', 'line-aligned'],
|
||||
|
||||
// Validate closing tag location in JSX
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-closing-tag-location.md
|
||||
'react/jsx-closing-tag-location': 'error',
|
||||
|
||||
// Enforce or disallow spaces inside of curly braces in JSX attributes
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-curly-spacing.md
|
||||
'react/jsx-curly-spacing': ['error', 'never', { allowMultiline: true }],
|
||||
|
||||
// Enforce event handler naming conventions in JSX
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-handler-names.md
|
||||
'react/jsx-handler-names': [
|
||||
'off',
|
||||
{
|
||||
eventHandlerPrefix: 'handle',
|
||||
eventHandlerPropPrefix: 'on',
|
||||
},
|
||||
],
|
||||
|
||||
// Validate props indentation in JSX
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-indent-props.md
|
||||
'react/jsx-indent-props': ['error', 2],
|
||||
|
||||
// Validate JSX has key prop when in array or iterator
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-key.md
|
||||
'react/jsx-key': 'off',
|
||||
|
||||
// Limit maximum of props on a single line in JSX
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-max-props-per-line.md
|
||||
'react/jsx-max-props-per-line': ['error', { maximum: 1 }],
|
||||
|
||||
// Prevent usage of .bind() in JSX props
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-no-bind.md
|
||||
'react/jsx-no-bind': [
|
||||
'error',
|
||||
{
|
||||
ignoreRefs: true,
|
||||
allowArrowFunctions: true,
|
||||
allowFunctions: false,
|
||||
allowBind: false,
|
||||
ignoreDOMComponents: true,
|
||||
},
|
||||
],
|
||||
|
||||
// Prevent duplicate props in JSX
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-no-duplicate-props.md
|
||||
'react/jsx-no-duplicate-props': ['error', { ignoreCase: true }],
|
||||
|
||||
// Prevent usage of unwrapped JSX strings
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-no-literals.md
|
||||
'react/jsx-no-literals': ['off', { noStrings: true }],
|
||||
|
||||
// Disallow undeclared variables in JSX
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-no-undef.md
|
||||
'react/jsx-no-undef': 'error',
|
||||
|
||||
// Enforce PascalCase for user-defined JSX components
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-pascal-case.md
|
||||
'react/jsx-pascal-case': [
|
||||
'error',
|
||||
{
|
||||
allowAllCaps: true,
|
||||
ignore: [],
|
||||
},
|
||||
],
|
||||
|
||||
// Enforce propTypes declarations alphabetical sorting
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/sort-prop-types.md
|
||||
'react/sort-prop-types': [
|
||||
'off',
|
||||
{
|
||||
ignoreCase: true,
|
||||
callbacksLast: false,
|
||||
requiredFirst: false,
|
||||
sortShapeProp: true,
|
||||
},
|
||||
],
|
||||
|
||||
// Deprecated in favor of react/jsx-sort-props
|
||||
'react/jsx-sort-prop-types': 'off',
|
||||
|
||||
// Enforce props alphabetical sorting
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-sort-props.md
|
||||
'react/jsx-sort-props': [
|
||||
'off',
|
||||
{
|
||||
ignoreCase: true,
|
||||
callbacksLast: false,
|
||||
shorthandFirst: false,
|
||||
shorthandLast: false,
|
||||
noSortAlphabetically: false,
|
||||
reservedFirst: true,
|
||||
},
|
||||
],
|
||||
|
||||
// Enforce defaultProps declarations alphabetical sorting
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/843d71a432baf0f01f598d7cf1eea75ad6896e4b/docs/rules/jsx-sort-default-props.md
|
||||
'react/jsx-sort-default-props': [
|
||||
'off',
|
||||
{
|
||||
ignoreCase: true,
|
||||
},
|
||||
],
|
||||
|
||||
// Prevent React to be incorrectly marked as unused
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-uses-react.md
|
||||
'react/jsx-uses-react': ['error'],
|
||||
|
||||
// Prevent variables used in JSX to be incorrectly marked as unused
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-uses-vars.md
|
||||
'react/jsx-uses-vars': 'error',
|
||||
|
||||
// Prevent usage of dangerous JSX properties
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-danger.md
|
||||
'react/no-danger': 'off',
|
||||
|
||||
// Prevent usage of deprecated methods
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-deprecated.md
|
||||
'react/no-deprecated': ['error'],
|
||||
|
||||
// Prevent usage of setState in componentDidMount
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-did-mount-set-state.md
|
||||
// this is necessary for server-rendering
|
||||
'react/no-did-mount-set-state': 'off',
|
||||
|
||||
// Prevent usage of setState in componentDidUpdate
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-did-update-set-state.md
|
||||
'react/no-did-update-set-state': 'off',
|
||||
|
||||
// Prevent usage of setState in componentWillUpdate
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-will-update-set-state.md
|
||||
'react/no-will-update-set-state': 'error',
|
||||
|
||||
// Prevent direct mutation of this.state
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-direct-mutation-state.md
|
||||
'react/no-direct-mutation-state': 'off',
|
||||
|
||||
// Prevent usage of isMounted
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-is-mounted.md
|
||||
'react/no-is-mounted': 'error',
|
||||
|
||||
// Prevent multiple component definition per file
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-multi-comp.md
|
||||
'react/no-multi-comp': 'off',
|
||||
|
||||
// Prevent usage of setState
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-set-state.md
|
||||
'react/no-set-state': 'off',
|
||||
|
||||
// Prevent using string references
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-string-refs.md
|
||||
'react/no-string-refs': 'error',
|
||||
|
||||
// Prevent usage of unknown DOM property
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-unknown-property.md
|
||||
'react/no-unknown-property': 'error',
|
||||
|
||||
// Require ES6 class declarations over React.createClass
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/prefer-es6-class.md
|
||||
'react/prefer-es6-class': ['error', 'always'],
|
||||
|
||||
// Require stateless functions when not using lifecycle methods, setState or ref
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/prefer-stateless-function.md
|
||||
'react/prefer-stateless-function': ['error', { ignorePureComponents: true }],
|
||||
|
||||
// Prevent missing props validation in a React component definition
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/prop-types.md
|
||||
'react/prop-types': [
|
||||
'error',
|
||||
{
|
||||
ignore: [],
|
||||
customValidators: [],
|
||||
skipUndeclared: false,
|
||||
},
|
||||
],
|
||||
|
||||
// Prevent missing React when using JSX
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/react-in-jsx-scope.md
|
||||
'react/react-in-jsx-scope': 'error',
|
||||
|
||||
// Require render() methods to return something
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/require-render-return.md
|
||||
'react/require-render-return': 'error',
|
||||
|
||||
// Prevent extra closing tags for components without children
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/self-closing-comp.md
|
||||
'react/self-closing-comp': 'error',
|
||||
|
||||
// Enforce component methods order
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/843d71a432baf0f01f598d7cf1eea75ad6896e4b/docs/rules/sort-comp.md
|
||||
'react/sort-comp': [
|
||||
'error',
|
||||
{
|
||||
order: [
|
||||
'static-variables',
|
||||
'static-methods',
|
||||
'instance-variables',
|
||||
'lifecycle',
|
||||
'/^on.+$/',
|
||||
'getters',
|
||||
'setters',
|
||||
'/^(get|set)(?!(InitialState$|DefaultProps$|ChildContext$)).+$/',
|
||||
'instance-methods',
|
||||
'everything-else',
|
||||
'rendering',
|
||||
],
|
||||
groups: {
|
||||
lifecycle: [
|
||||
'displayName',
|
||||
'propTypes',
|
||||
'contextTypes',
|
||||
'childContextTypes',
|
||||
'mixins',
|
||||
'statics',
|
||||
'defaultProps',
|
||||
'constructor',
|
||||
'getDefaultProps',
|
||||
'getInitialState',
|
||||
'state',
|
||||
'getChildContext',
|
||||
'getDerivedStateFromProps',
|
||||
'componentWillMount',
|
||||
'UNSAFE_componentWillMount',
|
||||
'componentDidMount',
|
||||
'componentWillReceiveProps',
|
||||
'UNSAFE_componentWillReceiveProps',
|
||||
'shouldComponentUpdate',
|
||||
'componentWillUpdate',
|
||||
'UNSAFE_componentWillUpdate',
|
||||
'getSnapshotBeforeUpdate',
|
||||
'componentDidUpdate',
|
||||
'componentDidCatch',
|
||||
'componentWillUnmount',
|
||||
],
|
||||
rendering: ['/^render.+$/', 'render'],
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
// Prevent missing parentheses around multilines JSX
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/843d71a432baf0f01f598d7cf1eea75ad6896e4b/docs/rules/jsx-wrap-multilines.md
|
||||
'react/jsx-wrap-multilines': [
|
||||
'error',
|
||||
{
|
||||
declaration: 'parens-new-line',
|
||||
assignment: 'parens-new-line',
|
||||
return: 'parens-new-line',
|
||||
arrow: 'parens-new-line',
|
||||
condition: 'parens-new-line',
|
||||
logical: 'parens-new-line',
|
||||
prop: 'parens-new-line',
|
||||
},
|
||||
],
|
||||
|
||||
// Require that the first prop in a JSX element be on a new line when the element is multiline
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-first-prop-new-line.md
|
||||
'react/jsx-first-prop-new-line': ['error', 'multiline-multiprop'],
|
||||
|
||||
// Enforce spacing around jsx equals signs
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-equals-spacing.md
|
||||
'react/jsx-equals-spacing': ['error', 'never'],
|
||||
|
||||
// Enforce JSX indentation
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-indent.md
|
||||
'react/jsx-indent': ['error', 2],
|
||||
|
||||
// Disallow target="_blank" on links
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/ac102885765be5ff37847a871f239c6703e1c7cc/docs/rules/jsx-no-target-blank.md
|
||||
'react/jsx-no-target-blank': ['error', { enforceDynamicLinks: 'always' }],
|
||||
|
||||
// only .jsx files may have JSX
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-filename-extension.md
|
||||
'react/jsx-filename-extension': ['error', { extensions: ['.js', '.jsx', '.ts', '.tsx'] }],
|
||||
|
||||
// prevent accidental JS comments from being injected into JSX as text
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-no-comment-textnodes.md
|
||||
'react/jsx-no-comment-textnodes': 'error',
|
||||
|
||||
// disallow using React.render/ReactDOM.render's return value
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-render-return-value.md
|
||||
'react/no-render-return-value': 'error',
|
||||
|
||||
// require a shouldComponentUpdate method, or PureRenderMixin
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/require-optimization.md
|
||||
'react/require-optimization': ['off', { allowDecorators: [] }],
|
||||
|
||||
// warn against using findDOMNode()
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-find-dom-node.md
|
||||
'react/no-find-dom-node': 'error',
|
||||
|
||||
// Forbid certain props on Components
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/forbid-component-props.md
|
||||
'react/forbid-component-props': ['off', { forbid: [] }],
|
||||
|
||||
// Forbid certain elements
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/forbid-elements.md
|
||||
'react/forbid-elements': ['off', { forbid: [] }],
|
||||
|
||||
// Prevent problem with children and props.dangerouslySetInnerHTML
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-danger-with-children.md
|
||||
'react/no-danger-with-children': 'error',
|
||||
|
||||
// Prevent unused propType definitions
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-unused-prop-types.md
|
||||
'react/no-unused-prop-types': [
|
||||
'error',
|
||||
{
|
||||
customValidators: [],
|
||||
skipShapeProps: true,
|
||||
},
|
||||
],
|
||||
|
||||
// Require style prop value be an object or var
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/style-prop-object.md
|
||||
'react/style-prop-object': 'error',
|
||||
|
||||
// Prevent invalid characters from appearing in markup
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-unescaped-entities.md
|
||||
'react/no-unescaped-entities': 'error',
|
||||
|
||||
// Prevent passing of children as props
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-children-prop.md
|
||||
'react/no-children-prop': 'error',
|
||||
|
||||
// Validate whitespace in and around the JSX opening and closing brackets
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/843d71a432baf0f01f598d7cf1eea75ad6896e4b/docs/rules/jsx-tag-spacing.md
|
||||
'react/jsx-tag-spacing': [
|
||||
'error',
|
||||
{
|
||||
closingSlash: 'never',
|
||||
beforeSelfClosing: 'always',
|
||||
afterOpening: 'never',
|
||||
beforeClosing: 'never',
|
||||
},
|
||||
],
|
||||
|
||||
// Enforce spaces before the closing bracket of self-closing JSX elements
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-space-before-closing.md
|
||||
// Deprecated in favor of jsx-tag-spacing
|
||||
'react/jsx-space-before-closing': ['off', 'always'],
|
||||
|
||||
// Prevent usage of Array index in keys
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-array-index-key.md
|
||||
'react/no-array-index-key': 'off',
|
||||
|
||||
// Enforce a defaultProps definition for every prop that is not a required prop
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/843d71a432baf0f01f598d7cf1eea75ad6896e4b/docs/rules/require-default-props.md
|
||||
'react/require-default-props': [
|
||||
'error',
|
||||
{
|
||||
forbidDefaultForRequired: true,
|
||||
},
|
||||
],
|
||||
|
||||
// Forbids using non-exported propTypes
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/forbid-foreign-prop-types.md
|
||||
// this is intentionally set to "warn". it would be "error",
|
||||
// but it's only critical if you're stripping propTypes in production.
|
||||
'react/forbid-foreign-prop-types': ['warn', { allowInPropTypes: true }],
|
||||
|
||||
// Prevent void DOM elements from receiving children
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/void-dom-elements-no-children.md
|
||||
'react/void-dom-elements-no-children': 'error',
|
||||
|
||||
// Enforce all defaultProps have a corresponding non-required PropType
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/9e13ae2c51e44872b45cc15bf1ac3a72105bdd0e/docs/rules/default-props-match-prop-types.md
|
||||
'react/default-props-match-prop-types': ['error', { allowRequiredDefaults: false }],
|
||||
|
||||
// Prevent usage of shouldComponentUpdate when extending React.PureComponent
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/9e13ae2c51e44872b45cc15bf1ac3a72105bdd0e/docs/rules/no-redundant-should-component-update.md
|
||||
'react/no-redundant-should-component-update': 'error',
|
||||
|
||||
// Prevent unused state values
|
||||
// https://github.com/yannickcr/eslint-plugin-react/pull/1103/
|
||||
'react/no-unused-state': 'error',
|
||||
|
||||
// Enforces consistent naming for boolean props
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/843d71a432baf0f01f598d7cf1eea75ad6896e4b/docs/rules/boolean-prop-naming.md
|
||||
'react/boolean-prop-naming': [
|
||||
'off',
|
||||
{
|
||||
propTypeNames: ['bool', 'mutuallyExclusiveTrueProps'],
|
||||
rule: '^(is|has)[A-Z]([A-Za-z0-9]?)+',
|
||||
message: '',
|
||||
},
|
||||
],
|
||||
|
||||
// Prevents common casing typos
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/73abadb697034b5ccb514d79fb4689836fe61f91/docs/rules/no-typos.md
|
||||
'react/no-typos': 'error',
|
||||
|
||||
// Enforce curly braces or disallow unnecessary curly braces in JSX props and/or children
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-curly-brace-presence.md
|
||||
'react/jsx-curly-brace-presence': ['error', { props: 'never', children: 'never' }],
|
||||
|
||||
// One JSX Element Per Line
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/843d71a432baf0f01f598d7cf1eea75ad6896e4b/docs/rules/jsx-one-expression-per-line.md
|
||||
'react/jsx-one-expression-per-line': ['error', { allow: 'single-child' }],
|
||||
|
||||
// Enforce consistent usage of destructuring assignment of props, state, and context
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/843d71a432baf0f01f598d7cf1eea75ad6896e4b/docs/rules/destructuring-assignment.md
|
||||
'react/destructuring-assignment': ['error', 'always'],
|
||||
|
||||
// Prevent using this.state within a this.setState
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/843d71a432baf0f01f598d7cf1eea75ad6896e4b/docs/rules/no-access-state-in-setstate.md
|
||||
'react/no-access-state-in-setstate': 'error',
|
||||
|
||||
// Prevent usage of button elements without an explicit type attribute
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/843d71a432baf0f01f598d7cf1eea75ad6896e4b/docs/rules/button-has-type.md
|
||||
'react/button-has-type': [
|
||||
'error',
|
||||
{
|
||||
button: true,
|
||||
submit: true,
|
||||
reset: false,
|
||||
},
|
||||
],
|
||||
|
||||
// Ensures inline tags are not rendered without spaces between them
|
||||
'react/jsx-child-element-spacing': 'off',
|
||||
|
||||
// Prevent this from being used in stateless functional components
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/843d71a432baf0f01f598d7cf1eea75ad6896e4b/docs/rules/no-this-in-sfc.md
|
||||
'react/no-this-in-sfc': 'error',
|
||||
|
||||
// Validate JSX maximum depth
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/abe8381c0d6748047224c430ce47f02e40160ed0/docs/rules/jsx-max-depth.md
|
||||
'react/jsx-max-depth': 'off',
|
||||
|
||||
// Disallow multiple spaces between inline JSX props
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/ac102885765be5ff37847a871f239c6703e1c7cc/docs/rules/jsx-props-no-multi-spaces.md
|
||||
'react/jsx-props-no-multi-spaces': 'error',
|
||||
|
||||
// Prevent usage of UNSAFE_ methods
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/157cc932be2cfaa56b3f5b45df6f6d4322a2f660/docs/rules/no-unsafe.md
|
||||
'react/no-unsafe': 'off',
|
||||
|
||||
// Enforce shorthand or standard form for React fragments
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/bc976b837abeab1dffd90ac6168b746a83fc83cc/docs/rules/jsx-fragments.md
|
||||
'react/jsx-fragments': ['error', 'element'],
|
||||
|
||||
// Enforce linebreaks in curly braces in JSX attributes and expressions.
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-curly-newline.md
|
||||
'react/jsx-curly-newline': [
|
||||
'error',
|
||||
{
|
||||
multiline: 'consistent',
|
||||
singleline: 'consistent',
|
||||
},
|
||||
],
|
||||
|
||||
// Enforce state initialization style
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/state-in-constructor.md
|
||||
// TODO: set to "never" once babel-preset-airbnb supports public class fields
|
||||
'react/state-in-constructor': ['error', 'always'],
|
||||
|
||||
// Enforces where React component static properties should be positioned
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/static-property-placement.md
|
||||
// TODO: set to "static public field" once babel-preset-airbnb supports public class fields
|
||||
'react/static-property-placement': ['error', 'property assignment'],
|
||||
|
||||
// Disallow JSX props spreading
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-props-no-spreading.md
|
||||
'react/jsx-props-no-spreading': 'off',
|
||||
|
||||
// Enforce that props are read-only
|
||||
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/prefer-read-only-props.md
|
||||
'react/prefer-read-only-props': 'off',
|
||||
},
|
||||
|
||||
settings: {
|
||||
'import/resolver': {
|
||||
node: {
|
||||
extensions: ['.js', '.jsx', '.json'],
|
||||
},
|
||||
},
|
||||
react: {
|
||||
pragma: 'React',
|
||||
version: 'detect',
|
||||
},
|
||||
propWrapperFunctions: [
|
||||
'forbidExtraProps', // https://www.npmjs.com/package/airbnb-prop-types
|
||||
'exact', // https://www.npmjs.com/package/prop-types-exact
|
||||
'Object.freeze', // https://tc39.github.io/ecma262/#sec-object.freeze
|
||||
],
|
||||
},
|
||||
}
|
||||
62
packages/eslint-config-payload/eslint-config/index.cjs
Normal file
62
packages/eslint-config-payload/eslint-config/index.cjs
Normal file
@@ -0,0 +1,62 @@
|
||||
module.exports = {
|
||||
env: {
|
||||
es6: true,
|
||||
browser: true,
|
||||
node: true,
|
||||
},
|
||||
extends: [
|
||||
'eslint:recommended',
|
||||
'plugin:perfectionist/recommended-natural',
|
||||
'plugin:@typescript-eslint/recommended-type-checked',
|
||||
'plugin:regexp/recommended',
|
||||
'plugin:react/recommended',
|
||||
'plugin:react-hooks/recommended',
|
||||
'./configs/jest/index.cjs',
|
||||
'./configs/react/index.cjs',
|
||||
'prettier',
|
||||
],
|
||||
parser: '@typescript-eslint/parser',
|
||||
parserOptions: {
|
||||
ecmaFeatures: {
|
||||
jsx: true,
|
||||
},
|
||||
ecmaVersion: 'latest',
|
||||
sourceType: 'module',
|
||||
},
|
||||
plugins: ['@typescript-eslint'],
|
||||
overrides: [
|
||||
{
|
||||
files: ['*.js', '*.cjs'],
|
||||
extends: ['plugin:@typescript-eslint/disable-type-checked'],
|
||||
},
|
||||
],
|
||||
rules: {
|
||||
'@typescript-eslint/ban-ts-comment': 'off',
|
||||
'@typescript-eslint/consistent-type-imports': 'warn',
|
||||
'@typescript-eslint/no-explicit-any': 'warn',
|
||||
// Type-aware any rules:
|
||||
'@typescript-eslint/no-unsafe-assignment': 'off',
|
||||
'@typescript-eslint/no-unsafe-member-access': 'off',
|
||||
'@typescript-eslint/no-unsafe-call': 'off',
|
||||
'@typescript-eslint/no-unsafe-argument': 'off',
|
||||
'@typescript-eslint/no-unsafe-return': 'off',
|
||||
// This rule doesn't work well in .tsx files
|
||||
'@typescript-eslint/no-misused-promises': 'off',
|
||||
// Type-aware any rules end
|
||||
'@typescript-eslint/no-use-before-define': 'off',
|
||||
'arrow-body-style': 0,
|
||||
'import/prefer-default-export': 'off',
|
||||
'no-console': 'warn',
|
||||
'no-sparse-arrays': 'off',
|
||||
'no-underscore-dangle': 'off',
|
||||
'no-use-before-define': 'off',
|
||||
'react/no-unused-prop-types': 'off',
|
||||
'react/prop-types': 'off',
|
||||
'react/require-default-props': 'off',
|
||||
},
|
||||
settings: {
|
||||
'import/parsers': {
|
||||
'@typescript-eslint/parser': ['.ts', '.tsx'],
|
||||
},
|
||||
},
|
||||
}
|
||||
4
packages/eslint-config-payload/index.js
Normal file
4
packages/eslint-config-payload/index.js
Normal file
@@ -0,0 +1,4 @@
|
||||
module.exports = {
|
||||
root: true,
|
||||
extends: ['./eslint-config/index.cjs'],
|
||||
}
|
||||
30
packages/eslint-config-payload/package.json
Normal file
30
packages/eslint-config-payload/package.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"name": "@payloadcms/eslint-config",
|
||||
"version": "0.0.1",
|
||||
"private": true,
|
||||
"description": "Payload styles for ESLint and Prettier",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/eslint": "8.44.2",
|
||||
"@typescript-eslint/eslint-plugin": "6.6.0",
|
||||
"@typescript-eslint/parser": "6.6.0",
|
||||
"eslint": "8.48.0",
|
||||
"eslint-config-prettier": "9.0.0",
|
||||
"eslint-plugin-import": "2.28.1",
|
||||
"eslint-plugin-jest": "27.2.3",
|
||||
"eslint-plugin-jest-dom": "5.1.0",
|
||||
"eslint-plugin-jsx-a11y": "6.7.1",
|
||||
"eslint-plugin-node": "11.1.0",
|
||||
"eslint-plugin-perfectionist": "2.0.0",
|
||||
"eslint-plugin-playwright": "0.16.0",
|
||||
"eslint-plugin-react": "7.33.2",
|
||||
"eslint-plugin-react-hooks": "4.6.0",
|
||||
"eslint-plugin-regexp": "1.15.0"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "MIT"
|
||||
}
|
||||
10
packages/payload/.eslintignore
Normal file
10
packages/payload/.eslintignore
Normal file
@@ -0,0 +1,10 @@
|
||||
.tmp
|
||||
**/.git
|
||||
**/.hg
|
||||
**/.pnp.*
|
||||
**/.svn
|
||||
**/.yarn/**
|
||||
**/build
|
||||
**/dist/**
|
||||
**/node_modules
|
||||
**/temp
|
||||
36
packages/payload/.eslintrc.cjs
Normal file
36
packages/payload/.eslintrc.cjs
Normal file
@@ -0,0 +1,36 @@
|
||||
module.exports = {
|
||||
extends: ['@payloadcms'],
|
||||
ignorePatterns: ['**/payload-types.ts'],
|
||||
overrides: [
|
||||
{
|
||||
extends: ['plugin:@typescript-eslint/disable-type-checked'],
|
||||
files: [
|
||||
'*.js',
|
||||
'*.cjs',
|
||||
'playwright.config.ts',
|
||||
'playwright.bail.config.ts',
|
||||
'bin-cks.cjs',
|
||||
'bin-esm.mjs',
|
||||
'esm-loader.mjs',
|
||||
'esm-loader-playwright.mjs',
|
||||
'*.json',
|
||||
'*.md',
|
||||
'*.yml',
|
||||
'*.yaml',
|
||||
],
|
||||
},
|
||||
{
|
||||
files: ['*.e2e.ts'],
|
||||
rules: {
|
||||
'@typescript-eslint/no-unsafe-assignment': 'off',
|
||||
'@typescript-eslint/no-use-before-define': 'off',
|
||||
'jest/expect-expect': 'off',
|
||||
},
|
||||
},
|
||||
],
|
||||
parserOptions: {
|
||||
project: ['./tsconfig.json'],
|
||||
tsconfigRootDir: __dirname,
|
||||
},
|
||||
root: true,
|
||||
}
|
||||
10
packages/payload/.prettierignore
Normal file
10
packages/payload/.prettierignore
Normal file
@@ -0,0 +1,10 @@
|
||||
.tmp
|
||||
**/.git
|
||||
**/.hg
|
||||
**/.pnp.*
|
||||
**/.svn
|
||||
**/.yarn/**
|
||||
**/build
|
||||
**/dist/**
|
||||
**/node_modules
|
||||
**/temp
|
||||
15
packages/payload/.swcrc
Normal file
15
packages/payload/.swcrc
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/swcrc",
|
||||
"sourceMaps": "inline",
|
||||
"jsc": {
|
||||
"target": "esnext",
|
||||
"parser": {
|
||||
"syntax": "typescript",
|
||||
"tsx": true,
|
||||
"dts": true
|
||||
}
|
||||
},
|
||||
"module": {
|
||||
"type": "commonjs"
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user