Compare commits
70 Commits
payload/2.
...
plugin-clo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0a56d50334 | ||
|
|
02999a5659 | ||
|
|
365127bee4 | ||
|
|
b67e97aa7f | ||
|
|
61e8ce1743 | ||
|
|
034aa68cd4 | ||
|
|
268e6c485e | ||
|
|
4c1a5dca44 | ||
|
|
a12d1f4755 | ||
|
|
d55be73992 | ||
|
|
b9f236ae50 | ||
|
|
1d38e6d5d5 | ||
|
|
2f3c994cea | ||
|
|
0586f236bb | ||
|
|
d582619ead | ||
|
|
17fc2d13d0 | ||
|
|
800ffd2611 | ||
|
|
661ca74364 | ||
|
|
ec73b461a8 | ||
|
|
94885f3c65 | ||
|
|
31d0b309fe | ||
|
|
c86526b5c8 | ||
|
|
28a065072f | ||
|
|
efc0bc9ec9 | ||
|
|
ade1d27c95 | ||
|
|
1040731e32 | ||
|
|
30f28898b6 | ||
|
|
6cb0470906 | ||
|
|
170ea5badc | ||
|
|
cfb56589eb | ||
|
|
f312bac065 | ||
|
|
3dd3f5b135 | ||
|
|
59f4d125ab | ||
|
|
b2b2ee3338 | ||
|
|
7308abaabd | ||
|
|
9b1d0b2d0f | ||
|
|
9014f1fa63 | ||
|
|
ba75d876e3 | ||
|
|
f2b2e5cda9 | ||
|
|
f751f69239 | ||
|
|
f7ac9ff52a | ||
|
|
ba7a043a99 | ||
|
|
b149180db4 | ||
|
|
4efb9dd867 | ||
|
|
7002ca78b9 | ||
|
|
44ca3a4073 | ||
|
|
dc7c952ace | ||
|
|
c8a659cd39 | ||
|
|
6ba293c0f8 | ||
|
|
96a624ad5c | ||
|
|
545949dafc | ||
|
|
d9f61bbdc8 | ||
|
|
be06579b3e | ||
|
|
25e9bc62db | ||
|
|
aca567634b | ||
|
|
1f0934877c | ||
|
|
61da010991 | ||
|
|
ab9074220a | ||
|
|
afa90a4362 | ||
|
|
bc0516da90 | ||
|
|
46daf473c8 | ||
|
|
337b8ccbf3 | ||
|
|
ba2e4c278f | ||
|
|
3196036ae9 | ||
|
|
9bc3ad5159 | ||
|
|
94d18e8d74 | ||
|
|
c624eea0d8 | ||
|
|
f97627092c | ||
|
|
f00183029e | ||
|
|
b6c5aaa966 |
31
.github/ISSUE_TEMPLATE/1.bug_report_v3.yml
vendored
31
.github/ISSUE_TEMPLATE/1.bug_report_v3.yml
vendored
@@ -9,43 +9,38 @@ body:
|
||||
description: Want us to look into your issue faster? Follow the [reproduction-guide](https://github.com/payloadcms/payload/blob/main/.github/reproduction-guide.md) for more information.
|
||||
validations:
|
||||
required: false
|
||||
- type: input
|
||||
id: version
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Payload Version
|
||||
description: What version of Payload are you running?
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: node-version
|
||||
attributes:
|
||||
label: Node Version
|
||||
description: What version of Node are you running?
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: nextjs-version
|
||||
attributes:
|
||||
label: Next.js Version
|
||||
description: What version of Next.js are you running?
|
||||
label: Environment Info
|
||||
description: Paste output from `pnpm payload info` (>= beta.92) _or_ Payload, Node.js, and Next.js versions.
|
||||
render: text
|
||||
placeholder: |
|
||||
Payload:
|
||||
Node.js:
|
||||
Next.js:
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the Bug
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Reproduction Steps
|
||||
description: Steps to reproduce the behavior, please provide a clear description of how to reproduce the issue, based on the linked minimal reproduction. Screenshots can be provided in the issue body below. If using code blocks, make sure that [syntax highlighting is correct](https://docs.github.com/en/get-started/writing-on-github/working-with-advanced-formatting/creating-and-highlighting-code-blocks#syntax-highlighting) and double check that the rendered preview is not broken.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: adapters-plugins
|
||||
attributes:
|
||||
label: Adapters and Plugins
|
||||
description: What adapters and plugins are you using if relevant? ie. db-mongodb, db-postgres, storage-vercel-blob, etc.
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: Before submitting the issue, go through the steps you've written down to make sure the steps provided are detailed and clear.
|
||||
|
||||
25
.github/PULL_REQUEST_TEMPLATE.md
vendored
25
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,23 +1,10 @@
|
||||
## Description
|
||||
<!--
|
||||
|
||||
<!-- Please include a summary of the pull request and any related issues it fixes. Please also include relevant motivation and context. -->
|
||||
For external contributors, please include:
|
||||
|
||||
- [ ] I have read and understand the [CONTRIBUTING.md](https://github.com/payloadcms/payload/blob/main/CONTRIBUTING.md) document in this repository.
|
||||
- A summary of the pull request and any related issues it fixes.
|
||||
- Reasoning for the changes made or any additional context that may be useful.
|
||||
|
||||
## Type of change
|
||||
Ensure you have read and understand the [CONTRIBUTING.md](https://github.com/payloadcms/payload/blob/main/CONTRIBUTING.md) document in this repository.
|
||||
|
||||
<!-- Please delete options that are not relevant. -->
|
||||
|
||||
- [ ] Chore (non-breaking change which does not add functionality)
|
||||
- [ ] Bug fix (non-breaking change which fixes an issue)
|
||||
- [ ] New feature (non-breaking change which adds functionality)
|
||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||
- [ ] Change to the [templates](https://github.com/payloadcms/payload/tree/main/templates) directory (does not affect core functionality)
|
||||
- [ ] Change to the [examples](https://github.com/payloadcms/payload/tree/main/examples) directory (does not affect core functionality)
|
||||
- [ ] This change requires a documentation update
|
||||
|
||||
## Checklist:
|
||||
|
||||
- [ ] I have added tests that prove my fix is effective or that my feature works
|
||||
- [ ] Existing test suite passes locally with my changes
|
||||
- [ ] I have made corresponding changes to the documentation
|
||||
-->
|
||||
|
||||
13
.github/actions/release-commenter/.eslintrc.js
vendored
Normal file
13
.github/actions/release-commenter/.eslintrc.js
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
module.exports = {
|
||||
env: {
|
||||
es6: true,
|
||||
node: true,
|
||||
},
|
||||
extends: ['eslint:recommended', 'plugin:@typescript-eslint/eslint-recommended'],
|
||||
parser: '@typescript-eslint/parser',
|
||||
parserOptions: {
|
||||
ecmaVersion: 2018,
|
||||
sourceType: 'module',
|
||||
},
|
||||
plugins: ['@typescript-eslint'],
|
||||
}
|
||||
8
.github/actions/release-commenter/.prettierrc.js
vendored
Normal file
8
.github/actions/release-commenter/.prettierrc.js
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
module.exports = {
|
||||
printWidth: 100,
|
||||
parser: 'typescript',
|
||||
semi: false,
|
||||
singleQuote: true,
|
||||
trailingComma: 'all',
|
||||
arrowParens: 'avoid',
|
||||
}
|
||||
74
.github/actions/release-commenter/README.md
vendored
Normal file
74
.github/actions/release-commenter/README.md
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
# Release Commenter
|
||||
|
||||
This GitHub Action automatically comments on and/or labels Issues and PRs when a fix is released for them.
|
||||
|
||||
> [!IMPORTANT]
|
||||
> 🔧 Heavily modified version of https://github.com/apexskier/github-release-commenter
|
||||
|
||||
## Fork Modifications
|
||||
|
||||
- Filters to closed PRs only
|
||||
- Adds tag filter to support non-linear releases
|
||||
- Better logging
|
||||
- Moved to pnpm
|
||||
- Uses @vercel/ncc for packaging
|
||||
- Comments on locked issues by unlocking then re-locking
|
||||
|
||||
## How it works
|
||||
|
||||
Use this action in a workflow [triggered by a release](https://docs.github.com/en/free-pro-team@latest/actions/reference/events-that-trigger-workflows#release). It will scan commits between that and the prior release, find associated Issues and PRs, and comment on them to let people know a release has been made. Associated Issues and PRs can be directly [linked](https://docs.github.com/en/free-pro-team@latest/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue) to the commit or manually linked from a PR associated with the commit.
|
||||
|
||||
## Inputs
|
||||
|
||||
**GITHUB_TOKEN**
|
||||
|
||||
A GitHub personal access token with repo scope, such as [`secrets.GITHUB_TOKEN`](https://docs.github.com/en/free-pro-team@latest/actions/reference/authentication-in-a-workflow#about-the-github_token-secret).
|
||||
|
||||
**comment-template** (optional)
|
||||
|
||||
Override the comment posted on Issues and PRs. Set to the empty string to disable commenting. Several variables strings will be automatically replaced:
|
||||
|
||||
- `{release_link}` - a markdown link to the release
|
||||
- `{release_name}` - the release's name
|
||||
- `{release_tag}` - the release's tag
|
||||
|
||||
**label-template** (optional)
|
||||
|
||||
Add the given label. Multiple labels can be separated by commas. Several variable strings will be automatically replaced:
|
||||
|
||||
- `{release_name}` - the release's name
|
||||
- `{release_tag}` - the release's tag
|
||||
|
||||
**skip-label** (optional)
|
||||
|
||||
Skip processing if any of the given labels are present. Same processing rules as **label-template**. Default is "dependencies".
|
||||
|
||||
## Example
|
||||
|
||||
```yml
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
release:
|
||||
steps:
|
||||
- uses: apexskier/github-release-commenter@v1
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
comment-template: |
|
||||
Release {release_link} addresses this.
|
||||
```
|
||||
|
||||
## Known limitations
|
||||
|
||||
These are some known limitations of this action. I'd like to try to address them in the future.
|
||||
|
||||
- Non-linear releases aren't supported. For example, releasing a patch to a prior major release after a new major release has been bumped.
|
||||
- Non-sequential releases aren't supported. For example, if you release multiple prereleases between two official releases, this will only create a comment for the first prerelease in which a fix is released, not the final release.
|
||||
- The first release for a project will be ignored. This is intentional, as the use case is unlikely. Most projects will either have several alphas that don't need release comments, or won't use issues/PRs for the first commit.
|
||||
- If a large number of things are commented on, you may see the error `Error: You have triggered an abuse detection mechanism. Please wait a few minutes before you try again.`. Consider using the `skip-label` input to reduce your load on the GitHub API.
|
||||
|
||||
## Versions
|
||||
|
||||
Workflows will automatically update the tags `v1` and `latest`, allowing you to reference one of those instead of locking to a specific release.
|
||||
32
.github/actions/release-commenter/action.yml
vendored
Normal file
32
.github/actions/release-commenter/action.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
name: Release Commenter
|
||||
description: Comment on PRs and Issues when a fix is released
|
||||
branding:
|
||||
icon: message-square
|
||||
color: blue
|
||||
inputs:
|
||||
GITHUB_TOKEN:
|
||||
description: |
|
||||
A GitHub personal access token with repo scope, such as
|
||||
secrets.GITHUB_TOKEN.
|
||||
required: true
|
||||
comment-template:
|
||||
description: |
|
||||
Text template for the comment string.
|
||||
required: false
|
||||
default: |
|
||||
Included in release {release_link}
|
||||
label-template:
|
||||
description: Add the given label. Multiple labels can be separated by commas.
|
||||
required: false
|
||||
skip-label:
|
||||
description: Skip commenting if any of the given label are present. Multiple labels can be separated by commas.
|
||||
required: false
|
||||
default: "dependencies"
|
||||
tag-filter:
|
||||
description: |
|
||||
Filter tags by a regular expression. Must be escaped. e.g. 'v\\d' to isolate tags between major versions.
|
||||
required: false
|
||||
default: null
|
||||
runs:
|
||||
using: node20
|
||||
main: dist/index.js
|
||||
34199
.github/actions/release-commenter/dist/index.js
vendored
Normal file
34199
.github/actions/release-commenter/dist/index.js
vendored
Normal file
File diff suppressed because one or more lines are too long
7
.github/actions/release-commenter/jest.config.js
vendored
Normal file
7
.github/actions/release-commenter/jest.config.js
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
module.exports = {
|
||||
testEnvironment: 'node',
|
||||
testPathIgnorePatterns: ['/node_modules/', '<rootDir>/dist/'],
|
||||
transform: {
|
||||
'^.+\\.(t|j)sx?$': ['@swc/jest'],
|
||||
},
|
||||
}
|
||||
34
.github/actions/release-commenter/package.json
vendored
Normal file
34
.github/actions/release-commenter/package.json
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"name": "release-commenter",
|
||||
"version": "0.0.0",
|
||||
"description": "GitHub Action to automatically comment on PRs and Issues when a fix is released.",
|
||||
"main": "dist/index.js",
|
||||
"license": "MIT",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"clean": "rimraf dist",
|
||||
"build": "pnpm build:typecheck && pnpm build:ncc",
|
||||
"build:ncc": "ncc build src/index.ts -t -o dist",
|
||||
"build:typecheck": "tsc",
|
||||
"test": "jest"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.3.0",
|
||||
"@actions/github": "^5.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@octokit/webhooks-types": "^7.5.1",
|
||||
"@swc/jest": "^0.2.36",
|
||||
"@types/jest": "^27.5.2",
|
||||
"@types/node": "^20.16.5",
|
||||
"@typescript-eslint/eslint-plugin": "^4.33.0",
|
||||
"@typescript-eslint/parser": "^4.33.0",
|
||||
"@vercel/ncc": "0.38.1",
|
||||
"concurrently": "^8.2.2",
|
||||
"eslint": "^7.32.0",
|
||||
"jest": "^29.7.0",
|
||||
"prettier": "^3.3.3",
|
||||
"ts-jest": "^26.5.6",
|
||||
"typescript": "^4.9.5"
|
||||
}
|
||||
}
|
||||
5419
.github/actions/release-commenter/pnpm-lock.yaml
generated
vendored
Normal file
5419
.github/actions/release-commenter/pnpm-lock.yaml
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
266
.github/actions/release-commenter/src/__snapshots__/index.test.ts.snap
vendored
Normal file
266
.github/actions/release-commenter/src/__snapshots__/index.test.ts.snap
vendored
Normal file
@@ -0,0 +1,266 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`tests feature tests can apply labels 1`] = `
|
||||
[
|
||||
[
|
||||
{
|
||||
"issue_number": 123,
|
||||
"labels": [
|
||||
":dart: landed",
|
||||
"release-current_tag_name",
|
||||
"Release Name",
|
||||
],
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
"issue_number": 7,
|
||||
"labels": [
|
||||
":dart: landed",
|
||||
"release-current_tag_name",
|
||||
"Release Name",
|
||||
],
|
||||
},
|
||||
],
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`tests main test 1`] = `
|
||||
{
|
||||
"graphql": [MockFunction] {
|
||||
"calls": [
|
||||
[
|
||||
"
|
||||
{
|
||||
resource(url: "http://repository/commit/SHA1") {
|
||||
... on Commit {
|
||||
messageHeadlineHTML
|
||||
messageBodyHTML
|
||||
associatedPullRequests(first: 10) {
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
}
|
||||
edges {
|
||||
node {
|
||||
bodyHTML
|
||||
number
|
||||
state
|
||||
labels(first: 10) {
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
}
|
||||
nodes {
|
||||
name
|
||||
}
|
||||
}
|
||||
timelineItems(itemTypes: [CONNECTED_EVENT, DISCONNECTED_EVENT], first: 100) {
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
}
|
||||
nodes {
|
||||
... on ConnectedEvent {
|
||||
__typename
|
||||
isCrossRepository
|
||||
subject {
|
||||
... on Issue {
|
||||
number
|
||||
}
|
||||
}
|
||||
}
|
||||
... on DisconnectedEvent {
|
||||
__typename
|
||||
isCrossRepository
|
||||
subject {
|
||||
... on Issue {
|
||||
number
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
",
|
||||
],
|
||||
[
|
||||
"
|
||||
{
|
||||
resource(url: "http://repository/commit/SHA2") {
|
||||
... on Commit {
|
||||
messageHeadlineHTML
|
||||
messageBodyHTML
|
||||
associatedPullRequests(first: 10) {
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
}
|
||||
edges {
|
||||
node {
|
||||
bodyHTML
|
||||
number
|
||||
state
|
||||
labels(first: 10) {
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
}
|
||||
nodes {
|
||||
name
|
||||
}
|
||||
}
|
||||
timelineItems(itemTypes: [CONNECTED_EVENT, DISCONNECTED_EVENT], first: 100) {
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
}
|
||||
nodes {
|
||||
... on ConnectedEvent {
|
||||
__typename
|
||||
isCrossRepository
|
||||
subject {
|
||||
... on Issue {
|
||||
number
|
||||
}
|
||||
}
|
||||
}
|
||||
... on DisconnectedEvent {
|
||||
__typename
|
||||
isCrossRepository
|
||||
subject {
|
||||
... on Issue {
|
||||
number
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
",
|
||||
],
|
||||
],
|
||||
"results": [
|
||||
{
|
||||
"type": "return",
|
||||
"value": Promise {},
|
||||
},
|
||||
{
|
||||
"type": "return",
|
||||
"value": Promise {},
|
||||
},
|
||||
],
|
||||
},
|
||||
"rest": {
|
||||
"issues": {
|
||||
"addLabels": [MockFunction],
|
||||
"createComment": [MockFunction] {
|
||||
"calls": [
|
||||
[
|
||||
{
|
||||
"body": "Included in release [current_tag_name](http://current_release). Replacements: current_tag_name, current_tag_name.",
|
||||
"issue_number": 3,
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
"body": "Included in release [current_tag_name](http://current_release). Replacements: current_tag_name, current_tag_name.",
|
||||
"issue_number": 123,
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
"body": "Included in release [current_tag_name](http://current_release). Replacements: current_tag_name, current_tag_name.",
|
||||
"issue_number": 7,
|
||||
},
|
||||
],
|
||||
],
|
||||
"results": [
|
||||
{
|
||||
"type": "return",
|
||||
"value": Promise {},
|
||||
},
|
||||
{
|
||||
"type": "return",
|
||||
"value": Promise {},
|
||||
},
|
||||
{
|
||||
"type": "return",
|
||||
"value": Promise {},
|
||||
},
|
||||
],
|
||||
},
|
||||
"get": [MockFunction] {
|
||||
"calls": [
|
||||
[
|
||||
{
|
||||
"issue_number": 3,
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
"issue_number": 123,
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
"issue_number": 7,
|
||||
},
|
||||
],
|
||||
],
|
||||
"results": [
|
||||
{
|
||||
"type": "return",
|
||||
"value": Promise {},
|
||||
},
|
||||
{
|
||||
"type": "return",
|
||||
"value": Promise {},
|
||||
},
|
||||
{
|
||||
"type": "return",
|
||||
"value": Promise {},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
"repos": {
|
||||
"compareCommits": [MockFunction] {
|
||||
"calls": [
|
||||
[
|
||||
{
|
||||
"base": "prior_tag_name",
|
||||
"head": "current_tag_name",
|
||||
},
|
||||
],
|
||||
],
|
||||
"results": [
|
||||
{
|
||||
"type": "return",
|
||||
"value": Promise {},
|
||||
},
|
||||
],
|
||||
},
|
||||
"listReleases": [MockFunction] {
|
||||
"calls": [
|
||||
[
|
||||
{
|
||||
"per_page": 100,
|
||||
},
|
||||
],
|
||||
],
|
||||
"results": [
|
||||
{
|
||||
"type": "return",
|
||||
"value": Promise {},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
`;
|
||||
399
.github/actions/release-commenter/src/index.test.ts
vendored
Normal file
399
.github/actions/release-commenter/src/index.test.ts
vendored
Normal file
@@ -0,0 +1,399 @@
|
||||
import type * as githubModule from '@actions/github'
|
||||
import type * as coreModule from '@actions/core'
|
||||
import { mock } from 'node:test'
|
||||
|
||||
jest.mock('@actions/core')
|
||||
jest.mock('@actions/github')
|
||||
|
||||
type Mocked<T> = {
|
||||
-readonly [P in keyof T]: T[P] extends Function ? jest.Mock<T[P]> : jest.Mocked<Partial<T[P]>>
|
||||
}
|
||||
|
||||
const github = require('@actions/github') as jest.Mocked<Mocked<typeof githubModule>>
|
||||
const core = require('@actions/core') as jest.Mocked<Mocked<typeof coreModule>>
|
||||
|
||||
describe('tests', () => {
|
||||
let mockOctokit: any = {}
|
||||
let currentTag: string = 'current_tag_name'
|
||||
|
||||
;(core.warning as any) = jest.fn(console.warn.bind(console))
|
||||
;(core.error as any) = jest.fn(console.error.bind(console))
|
||||
|
||||
let commentTempate: string = ''
|
||||
let labelTemplate: string | null = null
|
||||
const skipLabelTemplate: string | null = 'skip,test'
|
||||
let tagFilter: string | RegExp | null = null
|
||||
|
||||
let simpleMockOctokit: any = {}
|
||||
|
||||
beforeEach(() => {
|
||||
tagFilter = null
|
||||
currentTag = 'current_tag_name'
|
||||
;(github.context as any) = {
|
||||
payload: {
|
||||
repo: {
|
||||
owner: 'owner',
|
||||
repo: 'repo',
|
||||
},
|
||||
release: {
|
||||
tag_name: currentTag,
|
||||
},
|
||||
repository: { html_url: 'http://repository' },
|
||||
},
|
||||
}
|
||||
|
||||
github.getOctokit.mockReset().mockImplementationOnce(((token: string) => {
|
||||
expect(token).toBe('GITHUB_TOKEN_VALUE')
|
||||
return mockOctokit
|
||||
}) as any)
|
||||
;(core.getInput as any).mockImplementation((key: string) => {
|
||||
if (key == 'GITHUB_TOKEN') {
|
||||
return 'GITHUB_TOKEN_VALUE'
|
||||
}
|
||||
if (key == 'comment-template') {
|
||||
return commentTempate
|
||||
}
|
||||
if (key == 'label-template') {
|
||||
return labelTemplate
|
||||
}
|
||||
if (key == 'skip-label') {
|
||||
return skipLabelTemplate
|
||||
}
|
||||
if (key == 'tag-filter') {
|
||||
return tagFilter
|
||||
}
|
||||
fail(`Unexpected input key ${key}`)
|
||||
})
|
||||
|
||||
commentTempate =
|
||||
'Included in release {release_link}. Replacements: {release_name}, {release_tag}.'
|
||||
labelTemplate = null
|
||||
simpleMockOctokit = {
|
||||
rest: {
|
||||
issues: {
|
||||
get: jest.fn(() => Promise.resolve({ data: { locked: false } })),
|
||||
createComment: jest.fn(() => Promise.resolve()),
|
||||
addLabels: jest.fn(() => Promise.resolve()),
|
||||
},
|
||||
repos: {
|
||||
listReleases: jest.fn(() =>
|
||||
Promise.resolve({
|
||||
data: [
|
||||
{
|
||||
name: 'Release Name',
|
||||
tag_name: 'current_tag_name',
|
||||
html_url: 'http://current_release',
|
||||
},
|
||||
{
|
||||
tag_name: 'prior_tag_name',
|
||||
html_url: 'http://prior_release',
|
||||
},
|
||||
],
|
||||
}),
|
||||
),
|
||||
compareCommits: jest.fn(() =>
|
||||
Promise.resolve({
|
||||
data: { commits: [{ sha: 'SHA1' }] },
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
graphql: jest.fn(() =>
|
||||
Promise.resolve({
|
||||
resource: {
|
||||
messageHeadlineHTML: '',
|
||||
messageBodyHTML:
|
||||
'<span class="issue-keyword tooltipped tooltipped-se" aria-label="This commit closes issue #123.">Closes</span> <p><span class="issue-keyword tooltipped tooltipped-se" aria-label="This pull request closes issue #7.">Closes</span>',
|
||||
associatedPullRequests: {
|
||||
pageInfo: { hasNextPage: false },
|
||||
edges: [],
|
||||
},
|
||||
},
|
||||
}),
|
||||
),
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
expect(core.error).not.toHaveBeenCalled()
|
||||
expect(core.warning).not.toHaveBeenCalled()
|
||||
expect(core.setFailed).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
test('main test', async () => {
|
||||
mockOctokit = {
|
||||
...simpleMockOctokit,
|
||||
rest: {
|
||||
issues: {
|
||||
get: jest.fn(() => Promise.resolve({ data: { locked: false } })),
|
||||
createComment: jest.fn(() => Promise.resolve()),
|
||||
addLabels: jest.fn(() => Promise.resolve()),
|
||||
},
|
||||
repos: {
|
||||
listReleases: jest.fn(() =>
|
||||
Promise.resolve({
|
||||
data: [
|
||||
{
|
||||
tag_name: 'current_tag_name',
|
||||
html_url: 'http://current_release',
|
||||
},
|
||||
{
|
||||
tag_name: 'prior_tag_name',
|
||||
html_url: 'http://prior_release',
|
||||
},
|
||||
],
|
||||
}),
|
||||
),
|
||||
compareCommits: jest.fn(() =>
|
||||
Promise.resolve({
|
||||
data: { commits: [{ sha: 'SHA1' }, { sha: 'SHA2' }] },
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
graphql: jest.fn(() =>
|
||||
Promise.resolve({
|
||||
resource: {
|
||||
messageHeadlineHTML:
|
||||
'<span class="issue-keyword tooltipped tooltipped-se" aria-label="This commit closes issue #3.">Closes</span> <a class="issue-link js-issue-link" data-error-text="Failed to load title" data-id="718013420" data-permission-text="Title is private" data-url="https://github.com/apexskier/github-release-commenter/issues/1" data-hovercard-type="issue" data-hovercard-url="/apexskier/github-release-commenter/issues/1/hovercard" href="https://github.com/apexskier/github-release-commenter/issues/1">#1</a>',
|
||||
messageBodyHTML:
|
||||
'<span class="issue-keyword tooltipped tooltipped-se" aria-label="This commit closes issue #123.">Closes</span> <p><span class="issue-keyword tooltipped tooltipped-se" aria-label="This pull request closes issue #7.">Closes</span>',
|
||||
associatedPullRequests: {
|
||||
pageInfo: { hasNextPage: false },
|
||||
edges: [
|
||||
{
|
||||
node: {
|
||||
bodyHTML:
|
||||
'<span class="issue-keyword tooltipped tooltipped-se" aria-label="This commit closes issue #4.">Closes</span> <span class="issue-keyword tooltipped tooltipped-se" aria-label="This commit closes issue #5.">Closes</span>',
|
||||
number: 9,
|
||||
labels: {
|
||||
pageInfo: { hasNextPage: false },
|
||||
nodes: [{ name: 'label1' }, { name: 'label2' }],
|
||||
},
|
||||
timelineItems: {
|
||||
pageInfo: { hasNextPage: false },
|
||||
nodes: [
|
||||
{
|
||||
isCrossRepository: true,
|
||||
__typename: 'ConnectedEvent',
|
||||
subject: { number: 1 },
|
||||
},
|
||||
{
|
||||
isCrossRepository: false,
|
||||
__typename: 'ConnectedEvent',
|
||||
subject: { number: 2 },
|
||||
},
|
||||
{
|
||||
isCrossRepository: false,
|
||||
__typename: 'DisconnectedEvent',
|
||||
subject: { number: 2 },
|
||||
},
|
||||
{
|
||||
isCrossRepository: false,
|
||||
__typename: 'ConnectedEvent',
|
||||
subject: { number: 2 },
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
node: {
|
||||
bodyHTML: '',
|
||||
number: 42,
|
||||
labels: {
|
||||
pageInfo: { hasNextPage: false },
|
||||
nodes: [{ name: 'label1' }, { name: 'skip' }],
|
||||
},
|
||||
timelineItems: {
|
||||
pageInfo: { hasNextPage: false },
|
||||
nodes: [
|
||||
{
|
||||
isCrossRepository: true,
|
||||
__typename: 'ConnectedEvent',
|
||||
subject: { number: 82 },
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}),
|
||||
),
|
||||
}
|
||||
|
||||
jest.isolateModules(() => {
|
||||
require('./index')
|
||||
})
|
||||
|
||||
await new Promise<void>(setImmediate)
|
||||
|
||||
expect(mockOctokit).toMatchSnapshot()
|
||||
expect(mockOctokit.rest.issues.createComment).toHaveBeenCalledTimes(3)
|
||||
})
|
||||
|
||||
describe('can filter tags', () => {
|
||||
const v3prev = 'v3.0.1'
|
||||
const v3current = 'v3.0.2'
|
||||
const v2prev = 'v2.0.1'
|
||||
const v2current = 'v2.0.2'
|
||||
|
||||
const listReleasesData = [
|
||||
{
|
||||
name: 'Current Release Name',
|
||||
tag_name: v3current,
|
||||
html_url: 'http://v3.0.2',
|
||||
},
|
||||
{
|
||||
name: 'Prev Release Name',
|
||||
tag_name: v3prev,
|
||||
html_url: 'http://v3.0.1',
|
||||
},
|
||||
{
|
||||
name: 'v2 Current Release Name',
|
||||
tag_name: v2current,
|
||||
html_url: 'http://v2.0.2',
|
||||
},
|
||||
{
|
||||
name: 'v2 Prev Release Name',
|
||||
tag_name: v2prev,
|
||||
html_url: 'http://v2.0.1',
|
||||
},
|
||||
]
|
||||
|
||||
it.each`
|
||||
description | prevTag | currentTag | filter
|
||||
${'no filter'} | ${v3prev} | ${v3current} | ${null}
|
||||
${'v3'} | ${v3prev} | ${v3current} | ${'v\\d'}
|
||||
${'v2'} | ${v2prev} | ${v2current} | ${'v\\d'}
|
||||
`('should filter tags with $description', async ({ prevTag, currentTag, filter }) => {
|
||||
// @ts-ignore
|
||||
github.context.payload.release.tag_name = currentTag
|
||||
|
||||
tagFilter = filter
|
||||
|
||||
mockOctokit = {
|
||||
...simpleMockOctokit,
|
||||
rest: {
|
||||
issues: {
|
||||
get: jest.fn(() => Promise.resolve({ data: { locked: false } })),
|
||||
createComment: jest.fn(() => Promise.resolve()),
|
||||
addLabels: jest.fn(() => Promise.resolve()),
|
||||
},
|
||||
repos: {
|
||||
listReleases: jest.fn(() =>
|
||||
Promise.resolve({
|
||||
data: listReleasesData,
|
||||
}),
|
||||
),
|
||||
compareCommits: jest.fn(() =>
|
||||
Promise.resolve({
|
||||
data: { commits: [{ sha: 'SHA1' }] },
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
graphql: jest.fn(() =>
|
||||
Promise.resolve({
|
||||
resource: {
|
||||
messageHeadlineHTML: '',
|
||||
messageBodyHTML:
|
||||
'<span class="issue-keyword tooltipped tooltipped-se" aria-label="This commit closes issue #123.">Closes</span> <p><span class="issue-keyword tooltipped tooltipped-se" aria-label="This pull request closes issue #7.">Closes</span>',
|
||||
associatedPullRequests: {
|
||||
pageInfo: { hasNextPage: false },
|
||||
edges: [],
|
||||
},
|
||||
},
|
||||
}),
|
||||
),
|
||||
}
|
||||
|
||||
jest.isolateModules(() => {
|
||||
require('./index')
|
||||
})
|
||||
|
||||
await new Promise<void>(resolve => setImmediate(() => resolve()))
|
||||
|
||||
expect(github.getOctokit).toHaveBeenCalled()
|
||||
expect(mockOctokit.rest.repos.compareCommits.mock.calls).toEqual([
|
||||
[{ base: prevTag, head: currentTag }],
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('feature tests', () => {
|
||||
beforeEach(() => {
|
||||
mockOctokit = simpleMockOctokit
|
||||
})
|
||||
|
||||
it('can disable comments', async () => {
|
||||
commentTempate = ''
|
||||
|
||||
jest.isolateModules(() => {
|
||||
require('./index')
|
||||
})
|
||||
|
||||
await new Promise<void>(resolve => setImmediate(() => resolve()))
|
||||
|
||||
expect(github.getOctokit).toHaveBeenCalled()
|
||||
expect(mockOctokit.rest.issues.createComment).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should unlock and comment', async () => {
|
||||
mockOctokit = {
|
||||
...simpleMockOctokit,
|
||||
rest: {
|
||||
...simpleMockOctokit.rest,
|
||||
issues: {
|
||||
// Return locked for both issues to be commented on
|
||||
get: jest.fn(() => Promise.resolve({ data: { locked: true } })),
|
||||
lock: jest.fn(() => Promise.resolve()),
|
||||
unlock: jest.fn(() => Promise.resolve()),
|
||||
createComment: jest.fn(() => Promise.resolve()),
|
||||
},
|
||||
},
|
||||
graphql: jest.fn(() =>
|
||||
Promise.resolve({
|
||||
resource: {
|
||||
messageHeadlineHTML: '',
|
||||
messageBodyHTML:
|
||||
'<span class="issue-keyword tooltipped tooltipped-se" aria-label="This commit closes issue #123.">Closes</span> <p><span class="issue-keyword tooltipped tooltipped-se" aria-label="This pull request closes issue #7.">Closes</span>',
|
||||
associatedPullRequests: {
|
||||
pageInfo: { hasNextPage: false },
|
||||
edges: [],
|
||||
},
|
||||
},
|
||||
}),
|
||||
),
|
||||
}
|
||||
|
||||
jest.isolateModules(() => {
|
||||
require('./index')
|
||||
})
|
||||
|
||||
await new Promise<void>(resolve => setImmediate(() => resolve()))
|
||||
|
||||
expect(github.getOctokit).toHaveBeenCalled()
|
||||
|
||||
// Should call once for both linked issues
|
||||
expect(mockOctokit.rest.issues.unlock).toHaveBeenCalledTimes(2)
|
||||
expect(mockOctokit.rest.issues.createComment).toHaveBeenCalledTimes(2)
|
||||
expect(mockOctokit.rest.issues.lock).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
|
||||
it.skip('can apply labels', async () => {
|
||||
labelTemplate = ':dart: landed,release-{release_tag},{release_name}'
|
||||
|
||||
jest.isolateModules(() => {
|
||||
require('./index')
|
||||
})
|
||||
|
||||
await new Promise<void>(resolve => setImmediate(() => resolve()))
|
||||
|
||||
expect(github.getOctokit).toHaveBeenCalled()
|
||||
expect(mockOctokit.rest.issues.addLabels.mock.calls).toMatchSnapshot()
|
||||
})
|
||||
})
|
||||
})
|
||||
349
.github/actions/release-commenter/src/index.ts
vendored
Normal file
349
.github/actions/release-commenter/src/index.ts
vendored
Normal file
@@ -0,0 +1,349 @@
|
||||
import * as core from '@actions/core'
|
||||
import * as github from '@actions/github'
|
||||
import type * as Webhooks from '@octokit/webhooks-types'
|
||||
|
||||
const closesMatcher = /aria-label="This (?:commit|pull request) closes issue #(\d+)\."/g
|
||||
|
||||
const releaseLinkTemplateRegex = /{release_link}/g
|
||||
const releaseNameTemplateRegex = /{release_name}/g
|
||||
const releaseTagTemplateRegex = /{release_tag}/g
|
||||
|
||||
;(async function main() {
|
||||
try {
|
||||
const payload = github.context.payload as Webhooks.EventPayloadMap['release']
|
||||
|
||||
const githubToken = core.getInput('GITHUB_TOKEN')
|
||||
const tagFilter = core.getInput('tag-filter') || undefined // Accept tag filter as an input
|
||||
const octokit = github.getOctokit(githubToken)
|
||||
|
||||
const commentTemplate = core.getInput('comment-template')
|
||||
const labelTemplate = core.getInput('label-template') || null
|
||||
const skipLabelTemplate = core.getInput('skip-label') || null
|
||||
|
||||
// Fetch the releases with the optional tag filter applied
|
||||
const { data: rawReleases } = await octokit.rest.repos.listReleases({
|
||||
...github.context.repo,
|
||||
per_page: 100,
|
||||
})
|
||||
|
||||
// Get the current release tag or latest tag
|
||||
const currentTag = payload?.release?.tag_name || rawReleases?.[0]?.tag_name
|
||||
|
||||
let releases = rawReleases
|
||||
|
||||
// Filter releases by the tag filter if provided
|
||||
if (tagFilter) {
|
||||
core.info(`Filtering releases by tag filter: ${tagFilter}`)
|
||||
// Get the matching part of the current release tag
|
||||
const regexMatch = currentTag.match(tagFilter)?.[0]
|
||||
if (!regexMatch) {
|
||||
core.error(`Current release tag ${currentTag} does not match the tag filter ${tagFilter}`)
|
||||
return
|
||||
}
|
||||
|
||||
core.info(`Matched string from filter: ${regexMatch}`)
|
||||
|
||||
releases = releases
|
||||
.filter(release => {
|
||||
const match = release.tag_name.match(regexMatch)?.[0]
|
||||
return match
|
||||
})
|
||||
.slice(0, 2)
|
||||
}
|
||||
|
||||
core.info(`Releases: ${JSON.stringify(releases, null, 2)}`)
|
||||
|
||||
if (releases.length < 2) {
|
||||
if (!releases.length) {
|
||||
core.error(`No releases found with the provided tag filter: '${tagFilter}'`)
|
||||
return
|
||||
}
|
||||
|
||||
core.info('first release')
|
||||
return
|
||||
}
|
||||
|
||||
const [currentRelease, priorRelease] = releases
|
||||
|
||||
core.info(`${priorRelease.tag_name}...${currentRelease.tag_name}`)
|
||||
|
||||
const {
|
||||
data: { commits },
|
||||
} = await octokit.rest.repos.compareCommits({
|
||||
...github.context.repo,
|
||||
base: priorRelease.tag_name,
|
||||
head: currentRelease.tag_name,
|
||||
})
|
||||
|
||||
if (!currentRelease.name) {
|
||||
core.info('Current release has no name, will fall back to the tag name.')
|
||||
}
|
||||
const releaseLabel = currentRelease.name || currentRelease.tag_name
|
||||
|
||||
const comment = commentTemplate
|
||||
.trim()
|
||||
.split(releaseLinkTemplateRegex)
|
||||
.join(`[${releaseLabel}](${currentRelease.html_url})`)
|
||||
.split(releaseNameTemplateRegex)
|
||||
.join(releaseLabel)
|
||||
.split(releaseTagTemplateRegex)
|
||||
.join(currentRelease.tag_name)
|
||||
|
||||
const parseLabels = (rawInput: string | null) =>
|
||||
rawInput
|
||||
?.split(releaseNameTemplateRegex)
|
||||
.join(releaseLabel)
|
||||
?.split(releaseTagTemplateRegex)
|
||||
.join(currentRelease.tag_name)
|
||||
?.split(',')
|
||||
?.map(l => l.trim())
|
||||
.filter(l => l)
|
||||
|
||||
const labels = parseLabels(labelTemplate)
|
||||
const skipLabels = parseLabels(skipLabelTemplate)
|
||||
|
||||
const linkedIssuesPrs = new Set<number>()
|
||||
|
||||
await Promise.all(
|
||||
commits.map(commit =>
|
||||
(async () => {
|
||||
const query = `
|
||||
{
|
||||
resource(url: "${payload.repository.html_url}/commit/${commit.sha}") {
|
||||
... on Commit {
|
||||
messageHeadlineHTML
|
||||
messageBodyHTML
|
||||
associatedPullRequests(first: 10) {
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
}
|
||||
edges {
|
||||
node {
|
||||
bodyHTML
|
||||
number
|
||||
state
|
||||
labels(first: 10) {
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
}
|
||||
nodes {
|
||||
name
|
||||
}
|
||||
}
|
||||
timelineItems(itemTypes: [CONNECTED_EVENT, DISCONNECTED_EVENT], first: 100) {
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
}
|
||||
nodes {
|
||||
... on ConnectedEvent {
|
||||
__typename
|
||||
isCrossRepository
|
||||
subject {
|
||||
... on Issue {
|
||||
number
|
||||
}
|
||||
}
|
||||
}
|
||||
... on DisconnectedEvent {
|
||||
__typename
|
||||
isCrossRepository
|
||||
subject {
|
||||
... on Issue {
|
||||
number
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`
|
||||
const response: {
|
||||
resource: null | {
|
||||
messageHeadlineHTML: string
|
||||
messageBodyHTML: string
|
||||
associatedPullRequests: {
|
||||
pageInfo: { hasNextPage: boolean }
|
||||
edges: ReadonlyArray<{
|
||||
node: {
|
||||
bodyHTML: string
|
||||
number: number
|
||||
state: 'OPEN' | 'CLOSED' | 'MERGED'
|
||||
labels: {
|
||||
pageInfo: { hasNextPage: boolean }
|
||||
nodes: ReadonlyArray<{
|
||||
name: string
|
||||
}>
|
||||
}
|
||||
timelineItems: {
|
||||
pageInfo: { hasNextPage: boolean }
|
||||
nodes: ReadonlyArray<{
|
||||
__typename: 'ConnectedEvent' | 'DisconnectedEvent'
|
||||
isCrossRepository: boolean
|
||||
subject: {
|
||||
number: number
|
||||
}
|
||||
}>
|
||||
}
|
||||
}
|
||||
}>
|
||||
}
|
||||
}
|
||||
} = await octokit.graphql(query)
|
||||
|
||||
if (!response.resource) {
|
||||
return
|
||||
}
|
||||
|
||||
// core.info(JSON.stringify(response.resource, null, 2))
|
||||
|
||||
core.info(`Checking commit: ${payload.repository.html_url}/commit/${commit.sha}`)
|
||||
|
||||
const associatedClosedPREdges = response.resource.associatedPullRequests.edges.filter(
|
||||
e => e.node.state === 'MERGED',
|
||||
)
|
||||
|
||||
if (associatedClosedPREdges.length) {
|
||||
core.info(
|
||||
` Associated Merged PRs:\n ${associatedClosedPREdges.map(pr => `${payload.repository.html_url}/pull/${pr.node.number}`).join('\n ')}`,
|
||||
)
|
||||
} else {
|
||||
core.info(' No associated merged PRs')
|
||||
}
|
||||
|
||||
const html = [
|
||||
response.resource.messageHeadlineHTML,
|
||||
response.resource.messageBodyHTML,
|
||||
...associatedClosedPREdges.map(pr => pr.node.bodyHTML),
|
||||
].join(' ')
|
||||
|
||||
for (const match of html.matchAll(closesMatcher)) {
|
||||
const [, num] = match
|
||||
linkedIssuesPrs.add(parseInt(num, 10))
|
||||
core.info(
|
||||
` Linked issue/PR from closesMatcher: ${payload.repository.html_url}/pull/${num}`,
|
||||
)
|
||||
}
|
||||
|
||||
if (response.resource.associatedPullRequests.pageInfo.hasNextPage) {
|
||||
core.warning(`Too many PRs associated with ${commit.sha}`)
|
||||
}
|
||||
|
||||
const seen = new Set<number>()
|
||||
for (const associatedPR of associatedClosedPREdges) {
|
||||
if (associatedPR.node.timelineItems.pageInfo.hasNextPage) {
|
||||
core.warning(`Too many links for #${associatedPR.node.number}`)
|
||||
}
|
||||
if (associatedPR.node.labels.pageInfo.hasNextPage) {
|
||||
core.warning(`Too many labels for #${associatedPR.node.number}`)
|
||||
}
|
||||
// a skip labels is present on this PR
|
||||
if (
|
||||
skipLabels?.some(l => associatedPR.node.labels.nodes.some(({ name }) => name === l))
|
||||
) {
|
||||
continue
|
||||
}
|
||||
|
||||
linkedIssuesPrs.add(associatedPR.node.number)
|
||||
core.info(
|
||||
` Linked issue/PR from associated PR: ${payload.repository.html_url}/pull/${associatedPR.node.number}`,
|
||||
)
|
||||
|
||||
// These are sorted by creation date in ascending order. The latest event for a given issue/PR is all we need
|
||||
// ignore links that aren't part of this repo
|
||||
const links = associatedPR.node.timelineItems.nodes
|
||||
.filter(node => !node.isCrossRepository)
|
||||
.reverse()
|
||||
for (const link of links) {
|
||||
if (seen.has(link.subject.number)) {
|
||||
continue
|
||||
}
|
||||
if (link.__typename == 'ConnectedEvent') {
|
||||
linkedIssuesPrs.add(link.subject.number)
|
||||
core.info(
|
||||
`Linked issue/PR from connected event: ${payload.repository.html_url}/pull/${link.subject.number}`,
|
||||
)
|
||||
}
|
||||
seen.add(link.subject.number)
|
||||
}
|
||||
}
|
||||
})(),
|
||||
),
|
||||
)
|
||||
|
||||
core.info(
|
||||
`Final issues/PRs to be commented on: \n${Array.from(linkedIssuesPrs)
|
||||
.map(num => ` ${payload.repository.html_url}/pull/${num}`)
|
||||
.join('\n')}`,
|
||||
)
|
||||
|
||||
const requests: Array<Promise<unknown>> = []
|
||||
for (const issueNumber of linkedIssuesPrs) {
|
||||
const baseRequest = {
|
||||
...github.context.repo,
|
||||
issue_number: issueNumber,
|
||||
}
|
||||
if (comment) {
|
||||
const commentRequest = {
|
||||
...baseRequest,
|
||||
body: comment,
|
||||
}
|
||||
|
||||
// Check if issue is locked or not
|
||||
const { data: issue } = await octokit.rest.issues.get(baseRequest)
|
||||
|
||||
let createCommentPromise: () => Promise<void>
|
||||
if (!issue.locked) {
|
||||
createCommentPromise = async () => {
|
||||
try {
|
||||
await octokit.rest.issues.createComment(commentRequest)
|
||||
} catch (error) {
|
||||
core.error(error as Error)
|
||||
core.error(
|
||||
`Failed to comment on issue/PR: ${issueNumber}. ${payload.repository.html_url}/pull/${issueNumber}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
core.info(
|
||||
`Issue/PR is locked: ${issueNumber}. Unlocking, commenting, and re-locking. ${payload.repository.html_url}/pull/${issueNumber}`,
|
||||
)
|
||||
createCommentPromise = async () => {
|
||||
try {
|
||||
core.debug(`Unlocking issue/PR: ${issueNumber}`)
|
||||
await octokit.rest.issues.unlock(baseRequest)
|
||||
core.debug(`Commenting on issue/PR: ${issueNumber}`)
|
||||
await octokit.rest.issues.createComment(commentRequest)
|
||||
core.debug(`Re-locking issue/PR: ${issueNumber}`)
|
||||
await octokit.rest.issues.lock(baseRequest)
|
||||
} catch (error) {
|
||||
core.error(error as Error)
|
||||
core.error(
|
||||
`Failed to unlock, comment, and re-lock issue/PR: ${issueNumber}. ${payload.repository.html_url}/pull/${issueNumber}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
requests.push(createCommentPromise())
|
||||
}
|
||||
if (labels) {
|
||||
const request = {
|
||||
...baseRequest,
|
||||
labels,
|
||||
}
|
||||
// core.info(JSON.stringify(request, null, 2))
|
||||
requests.push(octokit.rest.issues.addLabels(request))
|
||||
}
|
||||
}
|
||||
|
||||
await Promise.all(requests)
|
||||
} catch (error) {
|
||||
core.error(error as Error)
|
||||
core.setFailed((error as Error).message)
|
||||
}
|
||||
})()
|
||||
15
.github/actions/release-commenter/tsconfig.json
vendored
Normal file
15
.github/actions/release-commenter/tsconfig.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es5",
|
||||
"lib": ["es2020.string"],
|
||||
"noEmit": true,
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noImplicitReturns": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"downlevelIteration": true,
|
||||
"skipLibCheck": true,
|
||||
},
|
||||
"exclude": ["src/**/*.test.ts"]
|
||||
}
|
||||
74
.github/dependabot.yml
vendored
Normal file
74
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
# docs: https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: github-actions
|
||||
directories:
|
||||
- /
|
||||
- /.github/workflows
|
||||
- /.github/actions/* # Not working until resolved: https://github.com/dependabot/dependabot-core/issues/6345
|
||||
- /.github/actions/setup
|
||||
target-branch: beta
|
||||
schedule:
|
||||
interval: monthly
|
||||
timezone: America/Detroit
|
||||
time: '06:00'
|
||||
groups:
|
||||
github_actions:
|
||||
patterns:
|
||||
- '*'
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /
|
||||
target-branch: beta
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: sunday
|
||||
timezone: America/Detroit
|
||||
time: '06:00'
|
||||
commit-message:
|
||||
prefix: 'chore(deps)'
|
||||
labels:
|
||||
- dependencies
|
||||
groups:
|
||||
production-deps:
|
||||
dependency-type: production
|
||||
update-types:
|
||||
- minor
|
||||
- patch
|
||||
patterns:
|
||||
- '*'
|
||||
exclude-patterns:
|
||||
- 'drizzle*'
|
||||
dev-deps:
|
||||
dependency-type: development
|
||||
update-types:
|
||||
- minor
|
||||
- patch
|
||||
patterns:
|
||||
- '*'
|
||||
exclude-patterns:
|
||||
- 'drizzle*'
|
||||
|
||||
# Only bump patch versions for 2.x
|
||||
- package-ecosystem: npm
|
||||
directory: /
|
||||
target-branch: main
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: sunday
|
||||
timezone: America/Detroit
|
||||
time: '06:00'
|
||||
commit-message:
|
||||
prefix: 'chore(deps)'
|
||||
labels:
|
||||
- dependencies
|
||||
groups:
|
||||
production-deps:
|
||||
dependency-type: production
|
||||
update-types:
|
||||
- patch
|
||||
patterns:
|
||||
- '*'
|
||||
exclude-patterns:
|
||||
- 'drizzle*'
|
||||
3977
.github/pnpm-lock.yaml
generated
vendored
Normal file
3977
.github/pnpm-lock.yaml
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2
.github/pnpm-workspace.yaml
vendored
Normal file
2
.github/pnpm-workspace.yaml
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
packages:
|
||||
- 'actions/*'
|
||||
26
.github/workflows/lock-issues.yml
vendored
Normal file
26
.github/workflows/lock-issues.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: lock-issues
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run nightly at 12am EST
|
||||
- cron: '0 4 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
lock_issues:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Lock issues
|
||||
uses: dessant/lock-threads@v5
|
||||
with:
|
||||
process-only: 'issues'
|
||||
issue-inactive-days: '1'
|
||||
exclude-any-issue-labels: 'status: awaiting-reply'
|
||||
log-output: true
|
||||
issue-comment: >
|
||||
This issue has been automatically locked.
|
||||
|
||||
Please open a new issue if this issue persists with any additional detail.
|
||||
14
.github/workflows/main.yml
vendored
14
.github/workflows/main.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
with:
|
||||
filters: |
|
||||
needs_build:
|
||||
- '.github/workflows/**'
|
||||
- '.github/workflows/main.yml'
|
||||
- 'packages/**'
|
||||
- 'test/**'
|
||||
- 'pnpm-lock.yaml'
|
||||
@@ -61,7 +61,7 @@ jobs:
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v3
|
||||
with:
|
||||
version: 8
|
||||
version: 9.7.0
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
@@ -116,7 +116,7 @@ jobs:
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v3
|
||||
with:
|
||||
version: 8
|
||||
version: 9.7.0
|
||||
run_install: false
|
||||
|
||||
- name: Restore build
|
||||
@@ -201,7 +201,7 @@ jobs:
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v3
|
||||
with:
|
||||
version: 8
|
||||
version: 9.7.0
|
||||
run_install: false
|
||||
|
||||
- name: Restore build
|
||||
@@ -242,7 +242,7 @@ jobs:
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v3
|
||||
with:
|
||||
version: 8
|
||||
version: 9.7.0
|
||||
run_install: false
|
||||
|
||||
- name: Restore build
|
||||
@@ -286,7 +286,7 @@ jobs:
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v3
|
||||
with:
|
||||
version: 8
|
||||
version: 9.7.0
|
||||
run_install: false
|
||||
|
||||
- name: Restore build
|
||||
@@ -327,7 +327,7 @@ jobs:
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v3
|
||||
with:
|
||||
version: 8
|
||||
version: 9.7.0
|
||||
run_install: false
|
||||
|
||||
- name: Restore build
|
||||
|
||||
32
.github/workflows/post-release.yml
vendored
Normal file
32
.github/workflows/post-release.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
name: post-release
|
||||
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
post_release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# Only needed if debugging on a branch other than default
|
||||
# ref: ${{ github.event.release.target_commitish || github.ref }}
|
||||
- run: echo "npm_version=$(npm pkg get version | tr -d '"')" >> "$GITHUB_ENV"
|
||||
- uses: ./.github/actions/release-commenter
|
||||
continue-on-error: true
|
||||
env:
|
||||
ACTIONS_STEP_DEBUG: true
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
tag-filter: 'v\d'
|
||||
|
||||
|
||||
# Change to blank to disable commenting
|
||||
# comment-template: ''
|
||||
|
||||
comment-template: |
|
||||
🚀 This is included in version {release_link}
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -4,6 +4,9 @@ dist
|
||||
/.idea/*
|
||||
!/.idea/runConfigurations
|
||||
|
||||
# Custom actions
|
||||
!.github/actions/**/dist
|
||||
|
||||
test-results
|
||||
.devcontainer
|
||||
.localstack
|
||||
@@ -134,7 +137,6 @@ out
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
.nuxt
|
||||
dist
|
||||
|
||||
# Gatsby files
|
||||
.cache/
|
||||
|
||||
54
CHANGELOG.md
54
CHANGELOG.md
@@ -1,3 +1,57 @@
|
||||
## [2.30.1](https://github.com/payloadcms/payload/compare/v2.30.0...v2.30.1) (2024-10-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **db-mongodb:** properly filters out `number` field values with the `exists` operator filter ([#8415](https://github.com/payloadcms/payload/issues/8415)) ([0586f23](https://github.com/payloadcms/payload/commit/0586f236bbf04163a0d9b226772849cb3d977864)), closes [#8181](https://github.com/payloadcms/payload/issues/8181)
|
||||
* sorting by id incorrectly orders by version.id ([#8450](https://github.com/payloadcms/payload/issues/8450)) ([1d38e6d](https://github.com/payloadcms/payload/commit/1d38e6d5d5b56a91aa8f59a461d40f28b1750f8c))
|
||||
|
||||
## [2.30.0](https://github.com/payloadcms/payload/compare/v2.29.0...v2.30.0) (2024-09-27)
|
||||
|
||||
* export toast from react toastify in payload ([#8438](https://github.com/payloadcms/payload/issues/8438)) ([17fc2d1](https://github.com/payloadcms/payload/commit/17fc2d13d06b6de01f839c27fd706bc0d6a185eb))
|
||||
|
||||
## [2.29.0](https://github.com/payloadcms/payload/compare/v2.28.0...v2.29.0) (2024-09-25)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add new option to disable JOI validation ([#8067](https://github.com/payloadcms/payload/issues/8067)) ([28a0650](https://github.com/payloadcms/payload/commit/28a065072fcad2dc768e44d79609eb5ab8a3fdfd))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **db-postgres:** localized items in arrays with versions ([#8334](https://github.com/payloadcms/payload/issues/8334)) ([c86526b](https://github.com/payloadcms/payload/commit/c86526b5c81ff484e66fbe6e7c727fdcc1f93c77))
|
||||
* **db-postgres:** querying on array within a relationship field ([#8153](https://github.com/payloadcms/payload/issues/8153)) ([170ea5b](https://github.com/payloadcms/payload/commit/170ea5badcff154514b8166ac92177d89a3fa5f8))
|
||||
* **db-postgres:** sanitize tab/group path for table name ([#8010](https://github.com/payloadcms/payload/issues/8010)) ([ba7a043](https://github.com/payloadcms/payload/commit/ba7a043a99f58fad39a62ac471eeb7309a39bba0))
|
||||
* treat empty strings as null / undefined for `exists` queries ([#8336](https://github.com/payloadcms/payload/issues/8336)) ([31d0b30](https://github.com/payloadcms/payload/commit/31d0b309fe5df1e37ed2a938959c1ef87834d987)), closes [#7714](https://github.com/payloadcms/payload/issues/7714)
|
||||
|
||||
## [2.28.0](https://github.com/payloadcms/payload/compare/v2.27.0...v2.28.0) (2024-09-04)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* collections can use custom database operations ([#7675](https://github.com/payloadcms/payload/issues/7675)) ([6ba293c](https://github.com/payloadcms/payload/commit/6ba293c0f84f91bf89cf089a20e47de130013ebb))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **db-postgres:** migration exit codes ([#7873](https://github.com/payloadcms/payload/issues/7873)) ([25e9bc6](https://github.com/payloadcms/payload/commit/25e9bc62dbcbabcb3619cf83e3dc0110e0a4cabf)), closes [#7031](https://github.com/payloadcms/payload/issues/7031)
|
||||
* **db-postgres:** query hasMany text/number in array/blocks ([#8033](https://github.com/payloadcms/payload/issues/8033)) ([96a624a](https://github.com/payloadcms/payload/commit/96a624ad5c5259b197b4ca793d8419d1e827de9c))
|
||||
* **plugin-cloud:** better logging on static handler ([#7924](https://github.com/payloadcms/payload/issues/7924)) ([1f09348](https://github.com/payloadcms/payload/commit/1f0934877ce5aabb771c936c3677a26d2ef006ec))
|
||||
|
||||
## [2.27.0](https://github.com/payloadcms/payload/compare/v2.26.0...v2.27.0) (2024-08-26)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add support for custom image size file names ([#7637](https://github.com/payloadcms/payload/issues/7637)) ([f976270](https://github.com/payloadcms/payload/commit/f97627092cabe4eabbebefa75afc53579188386b))
|
||||
* upgrade react-toastify dependency, and upgrade to pnpm v9 in our monorepo ([#7667](https://github.com/payloadcms/payload/issues/7667)) ([94d18e8](https://github.com/payloadcms/payload/commit/94d18e8d747588efce225cde0b621db9b513e7c1))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* update state of field if either `valid` status or `errorMessage` changes ([#7632](https://github.com/payloadcms/payload/issues/7632)) ([c624eea](https://github.com/payloadcms/payload/commit/c624eea0d868938f4603860fa25be3df580ba7fe)), closes [#6413](https://github.com/payloadcms/payload/issues/6413)
|
||||
|
||||
## [2.26.0](https://github.com/payloadcms/payload/compare/v2.25.0...v2.26.0) (2024-08-09)
|
||||
|
||||
|
||||
|
||||
@@ -30,7 +30,8 @@ It's often best practice to write your Collections in separate files and then im
|
||||
| **`typescript`** | An object with property `interface` as the text used in schema generation. Auto-generated from slug if not defined. |
|
||||
| **`defaultSort`** | Pass a top-level field to sort by default in the collection List view. Prefix the name of the field with a minus symbol ("-") to sort in descending order. |
|
||||
| **`custom`** | Extension point for adding custom data (e.g. for plugins) |
|
||||
| **`dbName`** | Custom table or collection name depending on the database adapter. Auto-generated from slug if not defined.
|
||||
| **`dbName`** | Custom table or collection name depending on the database adapter. Auto-generated from slug if not defined. |
|
||||
| **`db`** | Set custom database operations for this Collection. [More](/docs/database/overview#collection-operations) |
|
||||
|
||||
_\* An asterisk denotes that a property is required._
|
||||
|
||||
|
||||
@@ -24,8 +24,8 @@ export default buildConfig({
|
||||
// collections go here
|
||||
],
|
||||
localization: {
|
||||
locales: ['en', 'es', 'de'],
|
||||
defaultLocale: 'en',
|
||||
locales: ['en', 'es', 'de'], // required
|
||||
defaultLocale: 'en', // required
|
||||
fallback: true,
|
||||
},
|
||||
})
|
||||
@@ -54,7 +54,7 @@ export default buildConfig({
|
||||
rtl: true,
|
||||
},
|
||||
],
|
||||
defaultLocale: 'en',
|
||||
defaultLocale: 'en', // required
|
||||
fallback: true,
|
||||
},
|
||||
})
|
||||
@@ -87,7 +87,7 @@ export default buildConfig({
|
||||
code: 'nb',
|
||||
},
|
||||
],
|
||||
defaultLocale: 'en',
|
||||
defaultLocale: 'en', // required
|
||||
fallback: true,
|
||||
},
|
||||
})
|
||||
|
||||
@@ -70,4 +70,105 @@ export default buildConfig({
|
||||
}
|
||||
}),
|
||||
})
|
||||
```
|
||||
```
|
||||
|
||||
## Collection Operations
|
||||
|
||||
To configure Collection database operations in your Payload application, your Collection config has methods that can override default database operations for that Collection.
|
||||
|
||||
The override methods receive arguments useful for augmenting operations such as Field data, the collection slug, and the req.
|
||||
|
||||
Here is an example:
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload/types'
|
||||
|
||||
export const Collection: CollectionConfig => {
|
||||
return {
|
||||
slug: 'collection-db-operations',
|
||||
db: {
|
||||
// Create a document in a custom db
|
||||
create: async ({ collection, data, req }) => {
|
||||
const doc = await fetch(`https://example.com/api/${collection}/create`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify(data),
|
||||
headers: {
|
||||
'x-app-user': `payload_${req.payload.user}`,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
}).then(response => response.json())
|
||||
|
||||
return doc
|
||||
},
|
||||
|
||||
// Delete a document in a custom db
|
||||
deleteOne: async ({ collection, data, req }) => {
|
||||
const docs = await fetch(`https://example.com/api/${collection}/delete/${data.id}`, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
'x-app-user': `payload_${req.payload.user}`
|
||||
}
|
||||
}).then(response => response.json())
|
||||
|
||||
return docs
|
||||
},
|
||||
|
||||
// Delete many documents in a custom db
|
||||
deleteMany: async ({ collection, data, req }) => {
|
||||
const docs = await fetch(`https://example.com/api/${collection}/delete`, {
|
||||
method: 'DELETE'
|
||||
headers: {
|
||||
'x-app-user': `payload_${req.payload.user}`
|
||||
}
|
||||
body: JSON.stringify(data),
|
||||
}).then(response => response.json())
|
||||
|
||||
return docs
|
||||
},
|
||||
|
||||
// Find documents in a custom db
|
||||
find: async ({ collection, data, req, where, limit }) => {
|
||||
const docs = await fetch(`https://example.com/api/${collection}/find`, {
|
||||
headers: {
|
||||
'x-app-user': `payload_${req.payload.user}`
|
||||
}
|
||||
body: JSON.stringify({data, where, limit}),
|
||||
}).then(response => response.json())
|
||||
|
||||
return { docs }
|
||||
},
|
||||
|
||||
// Find one document in a custom db
|
||||
findOne: async ({ collection, data, req }) => {
|
||||
const doc = await fetch(`https://example.com/api/${collection}/find/${data.id}`, {
|
||||
headers: {
|
||||
'x-app-user': `payload_${req.payload.user}`
|
||||
}
|
||||
}).then(response => response.json())
|
||||
|
||||
return doc
|
||||
},
|
||||
|
||||
// Update one document in an custom db
|
||||
updateOne: async ({ collection, data, req }) => {
|
||||
const doc = await fetch(`https://example.com/api/${collection}/update/${data.id}`, {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify(data),
|
||||
headers: {
|
||||
'x-app-user': `payload_${req.payload.user}`,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
}).then(response => response.json())
|
||||
|
||||
return { ...doc, updated: true }
|
||||
},
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'name',
|
||||
type: 'text',
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
title: Hooks Overview
|
||||
label: Overview
|
||||
order: 10
|
||||
desc: Hooks allow you to add your own logic to Payload, including integrating with third-party APIs, adding auto-generated data, or modifing Payload's base functionality.
|
||||
desc: Hooks allow you to add your own logic to Payload, including integrating with third-party APIs, adding auto-generated data, or modifying Payload's base functionality.
|
||||
keywords: hooks, overview, config, configuration, documentation, Content Management System, cms, headless, javascript, node, react, express
|
||||
---
|
||||
|
||||
<Banner type="info">
|
||||
Hooks are powerful ways to tie into existing Payload actions in order to add your own logic like
|
||||
integrating with third-party APIs, adding auto-generated data, or modifing Payload's base
|
||||
integrating with third-party APIs, adding auto-generated data, or modifying Payload's base
|
||||
functionality.
|
||||
</Banner>
|
||||
|
||||
|
||||
@@ -144,6 +144,10 @@ export default addLastModified
|
||||
|
||||
### Available Plugins
|
||||
|
||||
You can discover existing plugins by browsing the `payload-plugin` topic on [GitHub](https://github.com/topics/payload-plugin).
|
||||
Payload supports both official plugins, maintained by the Payload team, and community plugins, developed by external contributors.
|
||||
|
||||
You can discover existing plugins by browsing the `payload-plugin` topic on [GitHub](https://github.com/topics/payload-plugin). These plugins offer a wide range of functionality. Some are maintained by the Payload team, while others are community-built. While we encourage users to explore them, please note that only official plugins are maintained and supported by the Payload team. For community plugins, support may vary as they are developed and maintained independently.
|
||||
|
||||
For maintainers building plugins for others to use, please add the topic to help others find it. If you would like one to be built by the core Payload team, [open a Feature Request](https://github.com/payloadcms/payload/discussions) in our GitHub Discussions board. We would be happy to review your code and maybe feature you and your plugin where appropriate.
|
||||
|
||||
For a list of official plugins, check the [Payload monorepo](https://github.com/payloadcms/payload/tree/main/packages).
|
||||
|
||||
@@ -76,7 +76,7 @@ The following custom endpoints are automatically opened for you:
|
||||
| Endpoint | Method | Description |
|
||||
| --- | --- | --- |
|
||||
| `/api/stripe/rest` | `POST` | Proxies the [Stripe REST API](https://stripe.com/docs/api) behind [Payload access control](https://payloadcms.com/docs/access-control/overview) and returns the result. See the [REST Proxy](#stripe-rest-proxy) section for more details. |
|
||||
| `/api/stripe/webhooks` | `POST` | Handles all Stripe webhook events |
|
||||
| `/stripe/webhooks` | `POST` | Handles all Stripe webhook events |
|
||||
|
||||
##### Stripe REST Proxy
|
||||
|
||||
@@ -114,13 +114,13 @@ const res = await fetch(`/api/stripe/rest`, {
|
||||
Development:
|
||||
|
||||
1. Login using Stripe cli `stripe login`
|
||||
1. Forward events to localhost `stripe listen --forward-to localhost:3000/api/stripe/webhooks`
|
||||
1. Forward events to localhost `stripe listen --forward-to localhost:3000/stripe/webhooks`
|
||||
1. Paste the given secret into your `.env` file as `STRIPE_WEBHOOKS_ENDPOINT_SECRET`
|
||||
|
||||
Production:
|
||||
|
||||
1. Login and [create a new webhook](https://dashboard.stripe.com/test/webhooks/create) from the Stripe dashboard
|
||||
1. Paste `YOUR_DOMAIN_NAME/api/stripe/webhooks` as the "Webhook Endpoint URL"
|
||||
1. Paste `YOUR_DOMAIN_NAME/stripe/webhooks` as the "Webhook Endpoint URL"
|
||||
1. Select which events to broadcast
|
||||
1. Paste the given secret into your `.env` file as `STRIPE_WEBHOOKS_ENDPOINT_SECRET`
|
||||
1. Then, handle these events using the `webhooks` portion of this plugin's config:
|
||||
|
||||
@@ -168,6 +168,22 @@ When an uploaded image is smaller than the defined image size, we have 3 options
|
||||
Use the `withoutEnlargement` prop to change this.
|
||||
</Banner>
|
||||
|
||||
#### Custom file name per size
|
||||
|
||||
Each image size supports a `generateImageName` function that can be used to generate a custom file name for the resized image.
|
||||
This function receives the original file name, the resize name, the extension, height and width as arguments.
|
||||
|
||||
```ts
|
||||
{
|
||||
name: 'thumbnail',
|
||||
width: 400,
|
||||
height: 300,
|
||||
generateImageName: ({ height, sizeName, extension, width }) => {
|
||||
return `custom-${sizeName}-${height}-${width}.${extension}`
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### Crop and Focal Point Selector
|
||||
|
||||
This feature is only available for image file types.
|
||||
|
||||
@@ -120,8 +120,9 @@
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14",
|
||||
"pnpm": ">=8"
|
||||
"pnpm": ">=9.7.0"
|
||||
},
|
||||
"packageManager": "pnpm@9.7.0",
|
||||
"lint-staged": {
|
||||
"*.{js,jsx,ts,tsx}": [
|
||||
"prettier --write"
|
||||
|
||||
@@ -33,15 +33,15 @@
|
||||
"md5": "2.3.0",
|
||||
"mini-css-extract-plugin": "1.6.2",
|
||||
"path-browserify": "1.0.1",
|
||||
"postcss": "8.4.31",
|
||||
"postcss": "8.4.47",
|
||||
"postcss-loader": "6.2.1",
|
||||
"postcss-preset-env": "9.0.0",
|
||||
"process": "0.11.10",
|
||||
"sass-loader": "12.6.0",
|
||||
"style-loader": "^2.0.0",
|
||||
"swc-loader": "^0.2.3",
|
||||
"swc-minify-webpack-plugin": "^2.1.0",
|
||||
"terser-webpack-plugin": "^5.3.6",
|
||||
"swc-loader": "^0.2.6",
|
||||
"swc-minify-webpack-plugin": "^2.1.3",
|
||||
"terser-webpack-plugin": "^5.3.10",
|
||||
"url-loader": "4.1.1",
|
||||
"webpack": "^5.78.0",
|
||||
"webpack-bundle-analyzer": "^4.8.0",
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import slugify from '@sindresorhus/slugify'
|
||||
import arg from 'arg'
|
||||
import commandExists from 'command-exists'
|
||||
|
||||
import type { CliArgs, PackageManager } from './types'
|
||||
|
||||
@@ -68,7 +67,7 @@ export class Main {
|
||||
const template = await parseTemplate(this.args, validTemplates)
|
||||
|
||||
const projectDir = projectName === '.' ? process.cwd() : `./${slugify(projectName)}`
|
||||
const packageManager = await getPackageManager(this.args)
|
||||
const packageManager = getPackageManager(this.args)
|
||||
|
||||
if (template.type !== 'plugin') {
|
||||
const dbDetails = await selectDb(this.args, projectName)
|
||||
@@ -109,7 +108,7 @@ export class Main {
|
||||
}
|
||||
}
|
||||
|
||||
async function getPackageManager(args: CliArgs): Promise<PackageManager> {
|
||||
function getPackageManager(args: CliArgs): PackageManager {
|
||||
let packageManager: PackageManager = 'npm'
|
||||
|
||||
if (args['--use-npm']) {
|
||||
@@ -119,15 +118,22 @@ async function getPackageManager(args: CliArgs): Promise<PackageManager> {
|
||||
} else if (args['--use-pnpm']) {
|
||||
packageManager = 'pnpm'
|
||||
} else {
|
||||
try {
|
||||
if (await commandExists('yarn')) {
|
||||
packageManager = 'yarn'
|
||||
} else if (await commandExists('pnpm')) {
|
||||
packageManager = 'pnpm'
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
packageManager = 'npm'
|
||||
}
|
||||
packageManager = getEnvironmentPackageManager()
|
||||
}
|
||||
|
||||
return packageManager
|
||||
}
|
||||
|
||||
function getEnvironmentPackageManager(): PackageManager {
|
||||
const userAgent = process.env.npm_config_user_agent || ''
|
||||
|
||||
if (userAgent.startsWith('yarn')) {
|
||||
return 'yarn'
|
||||
}
|
||||
|
||||
if (userAgent.startsWith('pnpm')) {
|
||||
return 'pnpm'
|
||||
}
|
||||
|
||||
return 'npm'
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-mongodb",
|
||||
"version": "1.7.1",
|
||||
"version": "1.7.3",
|
||||
"description": "The officially supported MongoDB database adapter for Payload",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -55,6 +55,30 @@ const handleNonHasManyValues = (formattedValue, operator, path) => {
|
||||
}
|
||||
}
|
||||
|
||||
const buildExistsQuery = (formattedValue, path) => {
|
||||
if (formattedValue) {
|
||||
return {
|
||||
rawQuery: {
|
||||
$and: [
|
||||
{ [path]: { $exists: true } },
|
||||
{ [path]: { $ne: null } },
|
||||
{ [path]: { $ne: '' } }, // Exclude null and empty string
|
||||
],
|
||||
},
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
rawQuery: {
|
||||
$or: [
|
||||
{ [path]: { $exists: false } },
|
||||
{ [path]: { $eq: null } },
|
||||
{ [path]: { $eq: '' } }, // Treat empty string as null / undefined
|
||||
],
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const sanitizeQueryValue = ({
|
||||
field,
|
||||
hasCustomID,
|
||||
@@ -102,8 +126,16 @@ export const sanitizeQueryValue = ({
|
||||
}
|
||||
}
|
||||
|
||||
if (field.type === 'number' && typeof formattedValue === 'string') {
|
||||
formattedValue = Number(val)
|
||||
if (field.type === 'number') {
|
||||
if (typeof formattedValue === 'string' && operator !== 'exists') {
|
||||
formattedValue = Number(val)
|
||||
}
|
||||
|
||||
if (operator === 'exists') {
|
||||
formattedValue = val === 'true' ? true : val === 'false' ? false : Boolean(val)
|
||||
|
||||
return buildExistsQuery(formattedValue, path)
|
||||
}
|
||||
}
|
||||
|
||||
if (field.type === 'date' && typeof val === 'string' && operator !== 'exists') {
|
||||
@@ -189,6 +221,12 @@ export const sanitizeQueryValue = ({
|
||||
$regex: formattedValue.replace(/[\\^$*+?.()|[\]{}]/g, '\\$&'),
|
||||
}
|
||||
}
|
||||
|
||||
if (operator === 'exists') {
|
||||
formattedValue = formattedValue === 'true' || formattedValue === true
|
||||
|
||||
return buildExistsQuery(formattedValue, path)
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-postgres",
|
||||
"version": "0.8.5",
|
||||
"version": "0.8.7",
|
||||
"description": "The officially supported Postgres database adapter for Payload",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -75,6 +75,7 @@ export const buildFindManyArgs = ({
|
||||
depth,
|
||||
fields,
|
||||
path: '',
|
||||
tablePath: '',
|
||||
topLevelArgs: result,
|
||||
topLevelTableName: tableName,
|
||||
})
|
||||
|
||||
@@ -15,6 +15,7 @@ type TraverseFieldArgs = {
|
||||
depth?: number
|
||||
fields: Field[]
|
||||
path: string
|
||||
tablePath: string
|
||||
topLevelArgs: Record<string, unknown>
|
||||
topLevelTableName: string
|
||||
}
|
||||
@@ -27,6 +28,7 @@ export const traverseFields = ({
|
||||
depth,
|
||||
fields,
|
||||
path,
|
||||
tablePath,
|
||||
topLevelArgs,
|
||||
topLevelTableName,
|
||||
}: TraverseFieldArgs) => {
|
||||
@@ -38,6 +40,7 @@ export const traverseFields = ({
|
||||
currentArgs,
|
||||
currentTableName,
|
||||
depth,
|
||||
tablePath,
|
||||
fields: field.fields,
|
||||
path,
|
||||
topLevelArgs,
|
||||
@@ -50,6 +53,7 @@ export const traverseFields = ({
|
||||
if (field.type === 'tabs') {
|
||||
field.tabs.forEach((tab) => {
|
||||
const tabPath = tabHasName(tab) ? `${path}${tab.name}_` : path
|
||||
const tabTablePath = tabHasName(tab) ? `${tablePath}${toSnakeCase(tab.name)}_` : tablePath
|
||||
|
||||
traverseFields({
|
||||
_locales,
|
||||
@@ -59,6 +63,7 @@ export const traverseFields = ({
|
||||
depth,
|
||||
fields: tab.fields,
|
||||
path: tabPath,
|
||||
tablePath: tabTablePath,
|
||||
topLevelArgs,
|
||||
topLevelTableName,
|
||||
})
|
||||
@@ -79,7 +84,7 @@ export const traverseFields = ({
|
||||
}
|
||||
|
||||
const arrayTableName = adapter.tableNameMap.get(
|
||||
`${currentTableName}_${path}${toSnakeCase(field.name)}`,
|
||||
`${currentTableName}_${tablePath}${toSnakeCase(field.name)}`,
|
||||
)
|
||||
|
||||
const arrayTableNameWithLocales = `${arrayTableName}${adapter.localesSuffix}`
|
||||
@@ -95,6 +100,7 @@ export const traverseFields = ({
|
||||
depth,
|
||||
fields: field.fields,
|
||||
path: '',
|
||||
tablePath: '',
|
||||
topLevelArgs,
|
||||
topLevelTableName,
|
||||
})
|
||||
@@ -147,6 +153,7 @@ export const traverseFields = ({
|
||||
currentArgs: withBlock,
|
||||
currentTableName: tableName,
|
||||
depth,
|
||||
tablePath: '',
|
||||
fields: block.fields,
|
||||
path: '',
|
||||
topLevelArgs,
|
||||
@@ -163,6 +170,7 @@ export const traverseFields = ({
|
||||
adapter,
|
||||
currentArgs,
|
||||
currentTableName,
|
||||
tablePath: `${tablePath}${toSnakeCase(field.name)}_`,
|
||||
depth,
|
||||
fields: field.fields,
|
||||
path: `${path}${field.name}_`,
|
||||
|
||||
@@ -110,5 +110,6 @@ async function runMigrationFile(payload: Payload, migration: Migration, batch: n
|
||||
err,
|
||||
msg: parseError(err, `Error running migration ${migration.name}`),
|
||||
})
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -79,6 +79,7 @@ export async function migrateFresh(
|
||||
err,
|
||||
msg: parseError(err, `Error running migration ${migration.name}. Rolling back`),
|
||||
})
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -98,6 +98,7 @@ export async function migrateRefresh(this: PostgresAdapter) {
|
||||
err,
|
||||
msg: parseError(err, `Error running migration ${migration.name}. Rolling back.`),
|
||||
})
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -261,10 +261,10 @@ export const getTableColumnFromPath = ({
|
||||
tableType = 'numbers'
|
||||
columnName = 'number'
|
||||
}
|
||||
newTableName = `${tableName}_${tableType}`
|
||||
newTableName = `${rootTableName}_${tableType}`
|
||||
const joinConstraints = [
|
||||
eq(adapter.tables[tableName].id, adapter.tables[newTableName].parent),
|
||||
eq(adapter.tables[newTableName].path, `${constraintPath}${field.name}`),
|
||||
eq(adapter.tables[rootTableName].id, adapter.tables[newTableName].parent),
|
||||
like(adapter.tables[newTableName].path, `${constraintPath}${field.name}`),
|
||||
]
|
||||
|
||||
if (locale && field.localized && adapter.payload.config.localization) {
|
||||
@@ -298,10 +298,12 @@ export const getTableColumnFromPath = ({
|
||||
`${tableName}_${tableNameSuffix}${toSnakeCase(field.name)}`,
|
||||
)
|
||||
|
||||
const arrayParentTable = aliasTable || adapter.tables[tableName]
|
||||
|
||||
constraintPath = `${constraintPath}${field.name}.%.`
|
||||
if (locale && field.localized && adapter.payload.config.localization) {
|
||||
joins[newTableName] = and(
|
||||
eq(adapter.tables[tableName].id, adapter.tables[newTableName]._parentID),
|
||||
eq(arrayParentTable.id, adapter.tables[newTableName]._parentID),
|
||||
eq(adapter.tables[newTableName]._locale, locale),
|
||||
)
|
||||
if (locale !== 'all') {
|
||||
@@ -312,10 +314,7 @@ export const getTableColumnFromPath = ({
|
||||
})
|
||||
}
|
||||
} else {
|
||||
joins[newTableName] = eq(
|
||||
adapter.tables[tableName].id,
|
||||
adapter.tables[newTableName]._parentID,
|
||||
)
|
||||
joins[newTableName] = eq(arrayParentTable.id, adapter.tables[newTableName]._parentID)
|
||||
}
|
||||
return getTableColumnFromPath({
|
||||
adapter,
|
||||
|
||||
@@ -54,7 +54,10 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
|
||||
arrayRowLocaleData._locale = arrayRowLocale
|
||||
rowsByTable[tableName].locales.push(arrayRowLocaleData)
|
||||
if (!arrayRow.row.id) {
|
||||
arrayRowLocaleData._getParentID = (rows) => rows[i].id
|
||||
arrayRowLocaleData._getParentID = (rows: { _uuid: string; id: number }[]) => {
|
||||
const { id } = rows.find((each) => each._uuid === arrayRow.row._uuid)
|
||||
return id
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
"eslint-plugin-perfectionist": "2.0.0",
|
||||
"eslint-plugin-playwright": "0.16.0",
|
||||
"eslint-plugin-react": "7.33.2",
|
||||
"eslint-plugin-react-hooks": "4.6.0",
|
||||
"eslint-plugin-react-hooks": "4.6.2",
|
||||
"eslint-plugin-regexp": "1.15.0"
|
||||
},
|
||||
"keywords": []
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "payload",
|
||||
"version": "2.26.0",
|
||||
"version": "2.30.1",
|
||||
"description": "Node, React and MongoDB Headless CMS and Application Framework",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
@@ -55,13 +55,13 @@
|
||||
"@date-io/date-fns": "2.16.0",
|
||||
"@dnd-kit/core": "6.0.8",
|
||||
"@dnd-kit/sortable": "7.0.2",
|
||||
"@faceless-ui/modal": "2.0.1",
|
||||
"@faceless-ui/modal": "2.0.2",
|
||||
"@faceless-ui/scroll-info": "1.3.0",
|
||||
"@faceless-ui/window-info": "2.1.1",
|
||||
"@faceless-ui/window-info": "2.1.2",
|
||||
"@monaco-editor/react": "4.5.1",
|
||||
"@swc/core": "1.6.1",
|
||||
"@swc/register": "0.1.10",
|
||||
"body-parser": "1.20.2",
|
||||
"body-parser": "1.20.3",
|
||||
"body-scroll-lock": "4.0.0-beta.0",
|
||||
"bson-objectid": "2.0.4",
|
||||
"compression": "1.7.4",
|
||||
@@ -70,10 +70,10 @@
|
||||
"console-table-printer": "2.11.2",
|
||||
"dataloader": "2.2.2",
|
||||
"date-fns": "2.30.0",
|
||||
"deep-equal": "2.2.2",
|
||||
"deep-equal": "2.2.3",
|
||||
"deepmerge": "4.3.1",
|
||||
"dotenv": "8.6.0",
|
||||
"express": "4.18.2",
|
||||
"express": "4.21.0",
|
||||
"express-fileupload": "1.4.0",
|
||||
"express-rate-limit": "5.5.1",
|
||||
"file-type": "16.5.4",
|
||||
@@ -97,14 +97,14 @@
|
||||
"isomorphic-fetch": "3.0.0",
|
||||
"joi": "17.9.2",
|
||||
"json-schema-to-typescript": "14.0.5",
|
||||
"jsonwebtoken": "9.0.1",
|
||||
"jsonwebtoken": "9.0.2",
|
||||
"jwt-decode": "3.1.2",
|
||||
"md5": "2.3.0",
|
||||
"method-override": "3.0.0",
|
||||
"minimist": "1.2.8",
|
||||
"mkdirp": "1.0.4",
|
||||
"monaco-editor": "0.38.0",
|
||||
"nodemailer": "6.9.8",
|
||||
"nodemailer": "6.9.15",
|
||||
"object-to-formdata": "4.5.1",
|
||||
"passport": "0.6.0",
|
||||
"passport-anonymous": "1.0.1",
|
||||
@@ -129,14 +129,14 @@
|
||||
"react-router-dom": "5.3.4",
|
||||
"react-router-navigation-prompt": "1.9.6",
|
||||
"react-select": "5.7.4",
|
||||
"react-toastify": "8.2.0",
|
||||
"react-toastify": "10.0.5",
|
||||
"sanitize-filename": "1.6.3",
|
||||
"sass": "1.69.4",
|
||||
"scheduler": "0.23.0",
|
||||
"scheduler": "0.23.2",
|
||||
"scmp": "2.1.0",
|
||||
"sharp": "0.32.6",
|
||||
"swc-loader": "0.2.3",
|
||||
"terser-webpack-plugin": "5.3.9",
|
||||
"swc-loader": "0.2.6",
|
||||
"terser-webpack-plugin": "5.3.10",
|
||||
"ts-essentials": "7.0.3",
|
||||
"use-context-selector": "1.4.1",
|
||||
"uuid": "9.0.1"
|
||||
@@ -145,7 +145,7 @@
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"@release-it/conventional-changelog": "7.0.0",
|
||||
"@types/asap": "2.0.0",
|
||||
"@types/body-parser": "1.19.2",
|
||||
"@types/body-parser": "1.19.5",
|
||||
"@types/body-scroll-lock": "^3.1.0",
|
||||
"@types/compression": "1.7.2",
|
||||
"@types/express": "4.17.17",
|
||||
@@ -158,14 +158,14 @@
|
||||
"@types/isomorphic-fetch": "0.0.36",
|
||||
"@types/joi": "14.3.4",
|
||||
"@types/json-schema": "7.0.12",
|
||||
"@types/jsonwebtoken": "8.5.9",
|
||||
"@types/jsonwebtoken": "9.0.7",
|
||||
"@types/method-override": "0.0.32",
|
||||
"@types/mime": "2.0.3",
|
||||
"@types/mini-css-extract-plugin": "^1.4.3",
|
||||
"@types/minimist": "1.2.2",
|
||||
"@types/mkdirp": "1.0.2",
|
||||
"@types/node-fetch": "2.6.4",
|
||||
"@types/nodemailer": "6.4.14",
|
||||
"@types/nodemailer": "6.4.16",
|
||||
"@types/passport": "1.0.12",
|
||||
"@types/passport-anonymous": "1.0.3",
|
||||
"@types/passport-jwt": "3.0.9",
|
||||
@@ -202,9 +202,9 @@
|
||||
"rimraf": "4.4.1",
|
||||
"sass-loader": "12.6.0",
|
||||
"serve-static": "1.15.0",
|
||||
"swc-loader": "^0.2.3",
|
||||
"swc-loader": "^0.2.6",
|
||||
"terser": "5.19.2",
|
||||
"terser-webpack-plugin": "^5.3.6",
|
||||
"terser-webpack-plugin": "^5.3.10",
|
||||
"url-loader": "4.1.1",
|
||||
"vite": "^4.4.9",
|
||||
"webpack": "^5.78.0"
|
||||
|
||||
@@ -36,6 +36,7 @@ const useField = <T,>(options: Options): FieldType<T> => {
|
||||
const showError = valid === false && submitted
|
||||
|
||||
const prevValid = useRef(valid)
|
||||
const prevErrorMessage = useRef(field?.errorMessage)
|
||||
const prevValue = useRef(value)
|
||||
|
||||
// Method to return from `useField`, used to
|
||||
@@ -128,8 +129,9 @@ const useField = <T,>(options: Options): FieldType<T> => {
|
||||
|
||||
// Only dispatch if the validation result has changed
|
||||
// This will prevent unnecessary rerenders
|
||||
if (valid !== prevValid.current) {
|
||||
if (valid !== prevValid.current || errorMessage !== prevErrorMessage.current) {
|
||||
prevValid.current = valid
|
||||
prevErrorMessage.current = errorMessage
|
||||
|
||||
if (typeof dispatchField === 'function') {
|
||||
dispatchField({
|
||||
|
||||
@@ -41,7 +41,7 @@ export const registerLocalStrategy = async ({
|
||||
const sanitizedDoc = { ...doc }
|
||||
if (sanitizedDoc.password) delete sanitizedDoc.password
|
||||
|
||||
return payload.db.create({
|
||||
const dbArgs = {
|
||||
collection: collection.slug,
|
||||
data: {
|
||||
...sanitizedDoc,
|
||||
@@ -49,5 +49,10 @@ export const registerLocalStrategy = async ({
|
||||
salt,
|
||||
},
|
||||
req,
|
||||
})
|
||||
}
|
||||
if (collection?.db?.create) {
|
||||
return collection.db.create(dbArgs)
|
||||
} else {
|
||||
return payload.db.create(dbArgs)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -118,6 +118,7 @@ const collectionSchema = joi.object().keys({
|
||||
joi.boolean(),
|
||||
),
|
||||
custom: joi.object().pattern(joi.string(), joi.any()),
|
||||
db: joi.object(),
|
||||
dbName: joi.alternatives().try(joi.string(), joi.func()),
|
||||
defaultSort: joi.string(),
|
||||
endpoints: endpointsSchema,
|
||||
|
||||
@@ -3,7 +3,7 @@ import type { Response } from 'express'
|
||||
import type { GraphQLInputObjectType, GraphQLNonNull, GraphQLObjectType } from 'graphql'
|
||||
import type { DeepRequired } from 'ts-essentials'
|
||||
|
||||
import type { GeneratedTypes } from '../../'
|
||||
import type { DatabaseAdapter, GeneratedTypes } from '../../'
|
||||
import type {
|
||||
CustomPreviewButtonProps,
|
||||
CustomPublishButtonType,
|
||||
@@ -383,6 +383,14 @@ export type CollectionConfig = {
|
||||
auth?: IncomingAuthType | boolean
|
||||
/** Extension point to add your custom data. */
|
||||
custom?: Record<string, any>
|
||||
|
||||
/**
|
||||
* Add a custom database adapter to this collection.
|
||||
*/
|
||||
db?: Pick<
|
||||
DatabaseAdapter,
|
||||
'create' | 'deleteMany' | 'deleteOne' | 'find' | 'findOne' | 'updateOne'
|
||||
>
|
||||
/**
|
||||
* Used to override the default naming of the database table or collection with your using a function or string
|
||||
* @WARNING: If you change this property with existing data, you will need to handle the renaming of the table in your database or by using migrations
|
||||
|
||||
@@ -242,11 +242,16 @@ async function create<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
req,
|
||||
})
|
||||
} else {
|
||||
doc = await payload.db.create({
|
||||
const dbArgs = {
|
||||
collection: collectionConfig.slug,
|
||||
data: resultWithLocales,
|
||||
req,
|
||||
})
|
||||
}
|
||||
if (collectionConfig?.db?.create) {
|
||||
doc = await collectionConfig.db.create(dbArgs)
|
||||
} else {
|
||||
doc = await payload.db.create(dbArgs)
|
||||
}
|
||||
}
|
||||
|
||||
const verificationToken = doc._verificationToken
|
||||
|
||||
@@ -104,12 +104,20 @@ async function deleteOperation<TSlug extends keyof GeneratedTypes['collections']
|
||||
// Retrieve documents
|
||||
// /////////////////////////////////////
|
||||
|
||||
const { docs } = await payload.db.find<GeneratedTypes['collections'][TSlug]>({
|
||||
const dbArgs = {
|
||||
collection: collectionConfig.slug,
|
||||
locale,
|
||||
req,
|
||||
where: fullWhere,
|
||||
})
|
||||
}
|
||||
let docs
|
||||
if (collectionConfig?.db?.find) {
|
||||
const result = await collectionConfig.db.find<GeneratedTypes['collections'][TSlug]>(dbArgs)
|
||||
docs = result.docs
|
||||
} else {
|
||||
const result = await payload.db.find<GeneratedTypes['collections'][TSlug]>(dbArgs)
|
||||
docs = result.docs
|
||||
}
|
||||
|
||||
const errors = []
|
||||
|
||||
@@ -160,7 +168,7 @@ async function deleteOperation<TSlug extends keyof GeneratedTypes['collections']
|
||||
// Delete document
|
||||
// /////////////////////////////////////
|
||||
|
||||
await payload.db.deleteOne({
|
||||
const deleteOneArgs = {
|
||||
collection: collectionConfig.slug,
|
||||
req,
|
||||
where: {
|
||||
@@ -168,7 +176,12 @@ async function deleteOperation<TSlug extends keyof GeneratedTypes['collections']
|
||||
equals: id,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
if (collectionConfig?.db?.deleteOne) {
|
||||
await collectionConfig.db.deleteOne(deleteOneArgs)
|
||||
} else {
|
||||
await payload.db.deleteOne(deleteOneArgs)
|
||||
}
|
||||
|
||||
// /////////////////////////////////////
|
||||
// afterRead - Fields
|
||||
|
||||
@@ -96,13 +96,19 @@ async function deleteByID<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
// /////////////////////////////////////
|
||||
// Retrieve document
|
||||
// /////////////////////////////////////
|
||||
|
||||
const docToDelete = await req.payload.db.findOne({
|
||||
let docToDelete: Document
|
||||
const dbArgs = {
|
||||
collection: collectionConfig.slug,
|
||||
locale: req.locale,
|
||||
req,
|
||||
where: combineQueries({ id: { equals: id } }, accessResults),
|
||||
})
|
||||
}
|
||||
|
||||
if (collectionConfig?.db?.findOne) {
|
||||
docToDelete = await collectionConfig.db.findOne(dbArgs)
|
||||
} else {
|
||||
docToDelete = await req.payload.db.findOne(dbArgs)
|
||||
}
|
||||
|
||||
if (!docToDelete && !hasWhereAccess) throw new NotFound(t)
|
||||
if (!docToDelete && hasWhereAccess) throw new Forbidden(t)
|
||||
@@ -132,11 +138,17 @@ async function deleteByID<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
// Delete document
|
||||
// /////////////////////////////////////
|
||||
|
||||
let result = await req.payload.db.deleteOne({
|
||||
let result
|
||||
const deleteOneArgs = {
|
||||
collection: collectionConfig.slug,
|
||||
req,
|
||||
where: { id: { equals: id } },
|
||||
})
|
||||
}
|
||||
if (collectionConfig?.db?.deleteOne) {
|
||||
result = await collectionConfig?.db.deleteOne(deleteOneArgs)
|
||||
} else {
|
||||
result = await payload.db.deleteOne(deleteOneArgs)
|
||||
}
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Delete Preferences
|
||||
|
||||
@@ -142,7 +142,7 @@ async function find<T extends TypeWithID & Record<string, unknown>>(
|
||||
where,
|
||||
})
|
||||
|
||||
result = await payload.db.find<T>({
|
||||
const dbArgs = {
|
||||
collection: collectionConfig.slug,
|
||||
limit: sanitizedLimit,
|
||||
locale,
|
||||
@@ -151,7 +151,13 @@ async function find<T extends TypeWithID & Record<string, unknown>>(
|
||||
req,
|
||||
sort,
|
||||
where: fullWhere,
|
||||
})
|
||||
}
|
||||
|
||||
if (collectionConfig?.db?.find) {
|
||||
result = await collectionConfig.db.find<T>(dbArgs)
|
||||
} else {
|
||||
result = await payload.db.find<T>(dbArgs)
|
||||
}
|
||||
}
|
||||
|
||||
// /////////////////////////////////////
|
||||
|
||||
@@ -87,7 +87,12 @@ async function findByID<T extends TypeWithID>(incomingArgs: Arguments): Promise<
|
||||
|
||||
if (!findOneArgs.where.and[0].id) throw new NotFound(t)
|
||||
|
||||
let result: T = await req.payload.db.findOne(findOneArgs)
|
||||
let result: T
|
||||
if (collectionConfig?.db?.findOne) {
|
||||
result = await collectionConfig.db.findOne(findOneArgs)
|
||||
} else {
|
||||
result = await req.payload.db.findOne(findOneArgs)
|
||||
}
|
||||
|
||||
if (!result) {
|
||||
if (!disableErrors) {
|
||||
|
||||
@@ -85,7 +85,12 @@ async function restoreVersion<T extends TypeWithID = any>(args: Arguments): Prom
|
||||
where: combineQueries({ id: { equals: parentDocID } }, accessResults),
|
||||
}
|
||||
|
||||
const doc = await req.payload.db.findOne(findOneArgs)
|
||||
let doc: T
|
||||
if (collectionConfig?.db?.findOne) {
|
||||
doc = await collectionConfig.db.findOne(findOneArgs)
|
||||
} else {
|
||||
doc = await req.payload.db.findOne(findOneArgs)
|
||||
}
|
||||
|
||||
if (!doc && !hasWherePolicy) throw new NotFound(t)
|
||||
if (!doc && hasWherePolicy) throw new Forbidden(t)
|
||||
@@ -106,12 +111,18 @@ async function restoreVersion<T extends TypeWithID = any>(args: Arguments): Prom
|
||||
// Update
|
||||
// /////////////////////////////////////
|
||||
|
||||
let result = await req.payload.db.updateOne({
|
||||
const restoreVersionArgs = {
|
||||
id: parentDocID,
|
||||
collection: collectionConfig.slug,
|
||||
data: rawVersion.version,
|
||||
req,
|
||||
})
|
||||
}
|
||||
let result
|
||||
if (collectionConfig?.db?.updateOne) {
|
||||
result = await collectionConfig.db.updateOne(restoreVersionArgs)
|
||||
} else {
|
||||
result = await req.payload.db.updateOne(restoreVersionArgs)
|
||||
}
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Save `previousDoc` as a version after restoring
|
||||
|
||||
@@ -137,14 +137,21 @@ async function update<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
|
||||
docs = query.docs
|
||||
} else {
|
||||
const query = await payload.db.find({
|
||||
const dbArgs = {
|
||||
collection: collectionConfig.slug,
|
||||
limit: 0,
|
||||
locale,
|
||||
pagination: false,
|
||||
req,
|
||||
where: fullWhere,
|
||||
})
|
||||
}
|
||||
|
||||
let query
|
||||
if (collectionConfig?.db?.find) {
|
||||
query = await collectionConfig.db.find(dbArgs)
|
||||
} else {
|
||||
query = await payload.db.find(dbArgs)
|
||||
}
|
||||
|
||||
docs = query.docs
|
||||
}
|
||||
@@ -282,13 +289,18 @@ async function update<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
// /////////////////////////////////////
|
||||
|
||||
if (!shouldSaveDraft || data._status === 'published') {
|
||||
result = await req.payload.db.updateOne({
|
||||
const dbArgs = {
|
||||
id,
|
||||
collection: collectionConfig.slug,
|
||||
data: result,
|
||||
locale,
|
||||
req,
|
||||
})
|
||||
}
|
||||
if (collectionConfig?.db?.updateOne) {
|
||||
result = await collectionConfig.db.updateOne(dbArgs)
|
||||
} else {
|
||||
result = await req.payload.db.updateOne(dbArgs)
|
||||
}
|
||||
}
|
||||
|
||||
// /////////////////////////////////////
|
||||
|
||||
@@ -270,13 +270,18 @@ async function updateByID<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
// /////////////////////////////////////
|
||||
|
||||
if (!shouldSaveDraft || data._status === 'published') {
|
||||
result = await req.payload.db.updateOne({
|
||||
const dbArgs = {
|
||||
id,
|
||||
collection: collectionConfig.slug,
|
||||
data: dataToUpdate,
|
||||
locale,
|
||||
req,
|
||||
})
|
||||
}
|
||||
if (collectionConfig?.db?.updateOne) {
|
||||
result = await collectionConfig.db.updateOne(dbArgs)
|
||||
} else {
|
||||
result = await req.payload.db.updateOne(dbArgs)
|
||||
}
|
||||
}
|
||||
|
||||
// /////////////////////////////////////
|
||||
|
||||
@@ -39,6 +39,7 @@ export const defaults: Omit<Config, 'db' | 'editor'> = {
|
||||
schemaOutputFile: `${typeof process?.cwd === 'function' ? process.cwd() : ''}/schema.graphql`,
|
||||
},
|
||||
hooks: {},
|
||||
joiValidation: true,
|
||||
localization: false,
|
||||
maxDepth: 10,
|
||||
rateLimit: {
|
||||
|
||||
@@ -128,6 +128,7 @@ export default joi.object({
|
||||
}),
|
||||
i18n: joi.object(),
|
||||
indexSortableFields: joi.boolean(),
|
||||
joiValidation: joi.boolean(),
|
||||
local: joi.boolean(),
|
||||
localization: joi.alternatives().try(
|
||||
joi.object().keys({
|
||||
|
||||
@@ -149,6 +149,11 @@ export type InitOptions = {
|
||||
*/
|
||||
local?: boolean
|
||||
|
||||
/**
|
||||
* A previously instantiated logger instance. Must conform to the PayloadLogger interface which uses Pino
|
||||
* This allows you to bring your own logger instance and let payload use it
|
||||
*/
|
||||
logger?: PayloadLogger
|
||||
loggerDestination?: DestinationStream
|
||||
/**
|
||||
* Specify options for the built-in Pino logger that Payload uses for internal logging.
|
||||
@@ -156,11 +161,6 @@ export type InitOptions = {
|
||||
* See Pino Docs for options: https://getpino.io/#/docs/api?id=options
|
||||
*/
|
||||
loggerOptions?: LoggerOptions
|
||||
/**
|
||||
* A previously instantiated logger instance. Must conform to the PayloadLogger interface which uses Pino
|
||||
* This allows you to bring your own logger instance and let payload use it
|
||||
*/
|
||||
logger?: PayloadLogger
|
||||
|
||||
/**
|
||||
* A function that is called immediately following startup that receives the Payload instance as it's only argument.
|
||||
@@ -635,6 +635,12 @@ export type Config = {
|
||||
i18n?: i18nInitOptions
|
||||
/** Automatically index all sortable top-level fields in the database to improve sort performance and add database compatibility for Azure Cosmos and similar. */
|
||||
indexSortableFields?: boolean
|
||||
/**
|
||||
* Disable JOI validation
|
||||
*
|
||||
* @default true // enabled by default
|
||||
*/
|
||||
joiValidation?: boolean
|
||||
/**
|
||||
* Translate your content to different languages/locales.
|
||||
*
|
||||
|
||||
@@ -83,6 +83,10 @@ const validateSchema = async (
|
||||
abortEarly: false,
|
||||
})
|
||||
|
||||
if (!config?.joiValidation) {
|
||||
return config
|
||||
}
|
||||
|
||||
const nestedErrors = [
|
||||
...(await validateCollections(config.collections)),
|
||||
...validateGlobals(config.globals),
|
||||
|
||||
@@ -23,11 +23,13 @@ export {
|
||||
useListDrawer,
|
||||
} from '../../admin/components/elements/ListDrawer'
|
||||
|
||||
export { useNav } from '../../admin/components/elements/Nav/context'
|
||||
|
||||
export { default as NavGroup } from '../../admin/components/elements/NavGroup'
|
||||
export {
|
||||
Description,
|
||||
DescriptionComponent,
|
||||
DescriptionFunction,
|
||||
} from '../../admin/components/forms/FieldDescription/types'
|
||||
|
||||
export { useNav } from '../../admin/components/elements/Nav/context'
|
||||
export { default as NavGroup } from '../../admin/components/elements/NavGroup'
|
||||
export { toast } from 'react-toastify'
|
||||
|
||||
@@ -219,7 +219,7 @@
|
||||
"noFiltersSet": "Nessun filtro impostato",
|
||||
"noLabel": "<No {{label}}>",
|
||||
"noOptions": "Nessuna opzione",
|
||||
"noResults": "Nessun {{label}} trovato. Non esiste ancora nessun {{label}} oppure nessuno corrisponde ai filtri che hai specificato sopra.",
|
||||
"noResults": "Non abbiamo trovato {{label}}. Potrebbero non esserci {{label}}, oppure nessuno corrisponde ai filtri che hai specificato sopra.",
|
||||
"noValue": "Nessun valore",
|
||||
"none": "Nessuno",
|
||||
"notFound": "Non Trovato",
|
||||
|
||||
@@ -431,15 +431,26 @@ export default async function resizeAndTransformImageSizes({
|
||||
|
||||
const mimeInfo = await fromBuffer(bufferData)
|
||||
|
||||
const imageNameWithDimensions = createImageName({
|
||||
extension: mimeInfo?.ext || sanitizedImage.ext,
|
||||
height: extractHeightFromImage({
|
||||
...originalImageMeta,
|
||||
height: bufferInfo.height,
|
||||
}),
|
||||
outputImageName: sanitizedImage.name,
|
||||
width: bufferInfo.width,
|
||||
})
|
||||
const imageNameWithDimensions = imageResizeConfig.generateImageName
|
||||
? imageResizeConfig.generateImageName({
|
||||
extension: mimeInfo?.ext || sanitizedImage.ext,
|
||||
height: extractHeightFromImage({
|
||||
...originalImageMeta,
|
||||
height: bufferInfo.height,
|
||||
}),
|
||||
originalName: sanitizedImage.name,
|
||||
sizeName: imageResizeConfig.name,
|
||||
width: bufferInfo.width,
|
||||
})
|
||||
: createImageName({
|
||||
extension: mimeInfo?.ext || sanitizedImage.ext,
|
||||
height: extractHeightFromImage({
|
||||
...originalImageMeta,
|
||||
height: bufferInfo.height,
|
||||
}),
|
||||
outputImageName: sanitizedImage.name,
|
||||
width: bufferInfo.width,
|
||||
})
|
||||
|
||||
const imagePath = `${staticPath}/${imageNameWithDimensions}`
|
||||
|
||||
|
||||
@@ -51,12 +51,24 @@ export type ImageUploadFormatOptions = {
|
||||
*/
|
||||
export type ImageUploadTrimOptions = Parameters<Sharp['trim']>[0]
|
||||
|
||||
export type GenerateImageName = (args: {
|
||||
extension: string
|
||||
height: number
|
||||
originalName: string
|
||||
sizeName: string
|
||||
width: number
|
||||
}) => string
|
||||
|
||||
export type ImageSize = Omit<ResizeOptions, 'withoutEnlargement'> & {
|
||||
/**
|
||||
* @deprecated prefer position
|
||||
*/
|
||||
crop?: string // comes from sharp package
|
||||
formatOptions?: ImageUploadFormatOptions
|
||||
/**
|
||||
* Generate a custom name for the file of this image size.
|
||||
*/
|
||||
generateImageName?: GenerateImageName
|
||||
name: string
|
||||
trimOptions?: ImageUploadTrimOptions
|
||||
/**
|
||||
|
||||
@@ -13,5 +13,9 @@ export const getQueryDraftsSort = (sort: string): string => {
|
||||
orderBy = sort.substring(1)
|
||||
}
|
||||
|
||||
if (orderBy === 'id') {
|
||||
return `${direction}parent`
|
||||
}
|
||||
|
||||
return `${direction}version.${orderBy}`
|
||||
}
|
||||
|
||||
@@ -23,7 +23,9 @@ export const getLatestCollectionVersion = async <T extends TypeWithID = any>({
|
||||
}: Args): Promise<T> => {
|
||||
let latestVersion: TypeWithVersion<T>
|
||||
|
||||
if (config.versions?.drafts) {
|
||||
const hasConfigDb = Object.keys(config?.db ? config?.db : {}).length > 0
|
||||
|
||||
if (config.versions?.drafts && !hasConfigDb) {
|
||||
const { docs } = await payload.db.findVersions<T>({
|
||||
collection: config.slug,
|
||||
limit: 1,
|
||||
@@ -35,7 +37,12 @@ export const getLatestCollectionVersion = async <T extends TypeWithID = any>({
|
||||
;[latestVersion] = docs
|
||||
}
|
||||
|
||||
const doc = await payload.db.findOne<T>({ ...query, req })
|
||||
let doc
|
||||
if (config?.db?.findOne) {
|
||||
doc = await config.db.findOne<T>({ ...query, req })
|
||||
} else {
|
||||
doc = await payload.db.findOne<T>({ ...query, req })
|
||||
}
|
||||
|
||||
if (!latestVersion || (docHasTimestamps(doc) && latestVersion.updatedAt < doc.updatedAt)) {
|
||||
return doc
|
||||
|
||||
@@ -96,11 +96,19 @@ From there, create the adapter, passing in all of its required properties:
|
||||
```js
|
||||
import { azureBlobStorageAdapter } from '@payloadcms/plugin-cloud-storage/azure'
|
||||
|
||||
// if you need to obtain credentials you may do so by following the instructions here: https://docs.microsoft.com/en-us/azure/storage/common/storage-auth-aad-app?tabs=javascript
|
||||
// or you can use the connection string directly.
|
||||
|
||||
const adapter = azureBlobStorageAdapter({
|
||||
connectionString: process.env.AZURE_STORAGE_CONNECTION_STRING,
|
||||
containerName: process.env.AZURE_STORAGE_CONTAINER_NAME,
|
||||
allowContainerCreate: process.env.AZURE_STORAGE_ALLOW_CONTAINER_CREATE === 'true',
|
||||
baseURL: process.env.AZURE_STORAGE_ACCOUNT_BASEURL,
|
||||
/**
|
||||
* Optional: You may wish to obtain credentials that cannot be passed through in the connectionString connection option. In that case the connectionString will only be the URL to the storage account.
|
||||
* Can be one of AnonymousCredential | StorageSharedKeyCredential | TokenCredential
|
||||
**/
|
||||
credentials: new StorageSharedKeyCredential(process.env.AZURE_STORAGE_ACCOUNT_NAME, process.env.AZURE_STORAGE_ACCOUNT_KEY),
|
||||
})
|
||||
|
||||
// Now you can pass this adapter to the plugin
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@payloadcms/plugin-cloud-storage",
|
||||
"description": "The official cloud storage plugin for Payload CMS",
|
||||
"version": "1.1.3",
|
||||
"version": "1.2.0",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"license": "MIT",
|
||||
@@ -53,6 +53,7 @@
|
||||
"@aws-sdk/client-s3": "^3.142.0",
|
||||
"@aws-sdk/lib-storage": "^3.267.0",
|
||||
"@azure/storage-blob": "^12.11.0",
|
||||
"@azure/core-http": "^3.0.0",
|
||||
"@google-cloud/storage": "^6.4.1",
|
||||
"@types/express": "^4.17.9",
|
||||
"@types/find-node-modules": "^2.1.2",
|
||||
|
||||
@@ -1,4 +1,9 @@
|
||||
import type { ContainerClient } from '@azure/storage-blob'
|
||||
import type { TokenCredential } from '@azure/core-http'
|
||||
import type {
|
||||
AnonymousCredential,
|
||||
ContainerClient,
|
||||
StorageSharedKeyCredential,
|
||||
} from '@azure/storage-blob'
|
||||
|
||||
import { BlobServiceClient } from '@azure/storage-blob'
|
||||
|
||||
@@ -15,6 +20,7 @@ export interface Args {
|
||||
baseURL: string
|
||||
connectionString: string
|
||||
containerName: string
|
||||
credential?: AnonymousCredential | StorageSharedKeyCredential | TokenCredential
|
||||
}
|
||||
|
||||
export const azureBlobStorageAdapter = ({
|
||||
@@ -22,11 +28,14 @@ export const azureBlobStorageAdapter = ({
|
||||
baseURL,
|
||||
connectionString,
|
||||
containerName,
|
||||
credential,
|
||||
}: Args): Adapter => {
|
||||
let storageClient: ContainerClient | null = null
|
||||
const getStorageClient = () => {
|
||||
if (storageClient) return storageClient
|
||||
const blobServiceClient = BlobServiceClient.fromConnectionString(connectionString)
|
||||
const blobServiceClient = credential
|
||||
? new BlobServiceClient(connectionString, credential)
|
||||
: BlobServiceClient.fromConnectionString(connectionString)
|
||||
return (storageClient = blobServiceClient.getContainerClient(containerName))
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@payloadcms/plugin-cloud",
|
||||
"description": "The official Payload Cloud plugin",
|
||||
"version": "3.0.1",
|
||||
"version": "3.0.2",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"license": "MIT",
|
||||
@@ -28,12 +28,12 @@
|
||||
"@aws-sdk/credential-providers": "^3.289.0",
|
||||
"@aws-sdk/lib-storage": "^3.267.0",
|
||||
"amazon-cognito-identity-js": "^6.1.2",
|
||||
"nodemailer": "6.9.9"
|
||||
"nodemailer": "6.9.15"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/express": "^4.17.9",
|
||||
"@types/jest": "^29.5.1",
|
||||
"@types/nodemailer": "6.4.14",
|
||||
"@types/nodemailer": "6.4.16",
|
||||
"payload": "workspace:*",
|
||||
"ts-jest": "^29.1.0",
|
||||
"webpack": "^5.78.0"
|
||||
|
||||
@@ -29,18 +29,27 @@ export const getStaticHandler = ({ cachingOptions, collection }: Args): StaticHa
|
||||
collCacheConfig?.enabled !== false
|
||||
|
||||
return async (req, res, next) => {
|
||||
const filename = req.params.filename
|
||||
let fileKeyWithPrefix = ''
|
||||
|
||||
if (!filename) {
|
||||
req.payload.logger.warn({
|
||||
msg: `No filename provided for static file against collection: ${collection.slug}`,
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
const { identityID, storageClient } = await getStorageClient()
|
||||
|
||||
const Key = createKey({
|
||||
fileKeyWithPrefix = createKey({
|
||||
collection: collection.slug,
|
||||
filename: req.params.filename,
|
||||
filename,
|
||||
identityID,
|
||||
})
|
||||
|
||||
const object = await storageClient.getObject({
|
||||
Bucket: process.env.PAYLOAD_CLOUD_BUCKET,
|
||||
Key,
|
||||
Key: fileKeyWithPrefix,
|
||||
})
|
||||
|
||||
res.set({
|
||||
@@ -56,7 +65,10 @@ export const getStaticHandler = ({ cachingOptions, collection }: Args): StaticHa
|
||||
|
||||
return next()
|
||||
} catch (err: unknown) {
|
||||
req.payload.logger.error({ err, msg: 'Error getting file from cloud storage' })
|
||||
req.payload.logger.error({
|
||||
err,
|
||||
msg: `Error getting file from cloud storage: '${fileKeyWithPrefix}'`,
|
||||
})
|
||||
return next()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/richtext-lexical",
|
||||
"version": "0.11.2",
|
||||
"version": "0.11.3",
|
||||
"description": "The officially supported Lexical richtext adapter for Payload",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -22,7 +22,7 @@
|
||||
"prepublishOnly": "pnpm clean && pnpm build"
|
||||
},
|
||||
"dependencies": {
|
||||
"@faceless-ui/modal": "2.0.1",
|
||||
"@faceless-ui/modal": "2.0.2",
|
||||
"@lexical/headless": "0.13.1",
|
||||
"@lexical/link": "0.13.1",
|
||||
"@lexical/list": "0.13.1",
|
||||
@@ -39,7 +39,7 @@
|
||||
"json-schema": "^0.4.0",
|
||||
"lexical": "0.13.1",
|
||||
"lodash": "4.17.21",
|
||||
"react-error-boundary": "4.0.12",
|
||||
"react-error-boundary": "4.0.13",
|
||||
"react-i18next": "11.18.6",
|
||||
"ts-essentials": "7.0.3"
|
||||
},
|
||||
|
||||
@@ -118,13 +118,14 @@ export const LinkFeature = (props: LinkFeatureProps): FeatureProvider => {
|
||||
})
|
||||
|
||||
const rel: string = node.fields.newTab ? ' rel="noopener noreferrer"' : ''
|
||||
const target: string = node.fields.newTab ? ' target="_blank"' : ''
|
||||
|
||||
const href: string =
|
||||
node.fields.linkType === 'custom'
|
||||
? node.fields.url
|
||||
: (node.fields.doc?.value as string)
|
||||
|
||||
return `<a href="${href}"${rel}>${childrenText}</a>`
|
||||
return `<a href="${href}"${target}${rel}>${childrenText}</a>`
|
||||
},
|
||||
nodeTypes: [LinkNode.getType()],
|
||||
} as HTMLConverter<SerializedLinkNode>,
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"prepublishOnly": "pnpm clean && pnpm build"
|
||||
},
|
||||
"dependencies": {
|
||||
"@faceless-ui/modal": "2.0.1",
|
||||
"@faceless-ui/modal": "2.0.2",
|
||||
"i18next": "22.5.1",
|
||||
"is-hotkey": "0.2.0",
|
||||
"react-i18next": "11.18.6",
|
||||
|
||||
23285
pnpm-lock.yaml
generated
23285
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -5,7 +5,7 @@
|
||||
import React from 'react'
|
||||
import Link from 'next/link'
|
||||
|
||||
import { Header } from '../../../payload/payload-types'
|
||||
import type { Header } from '../../../payload/payload-types'
|
||||
import { fetchHeader } from '../../_api/fetchGlobals'
|
||||
import { Gutter } from '../Gutter'
|
||||
import { HeaderNav } from './Nav'
|
||||
|
||||
@@ -177,7 +177,7 @@ If you are migrating an existing site or moving content to a new URL, you can us
|
||||
|
||||
## Website
|
||||
|
||||
This template includes a beautifully designed, production-ready front-end built with the [Next.js App Router](https://nextjs.org), served right alongside your Payload app in a single Express server. This makes is so that you can deploy both apps simultaneously and host them together. If you prefer a different front-end framework, this pattern works for any framework that supports a custom server. If you prefer to host your website separately from Payload, you can easily [Eject](#eject) the front-end out from this template to swap in your own, or to use it as a standalone CMS. For more details, see the official [Custom Server Example](https://github.com/payloadcms/payload/tree/main/examples/custom-server).
|
||||
This template includes a beautifully designed, production-ready front-end built with the [Next.js App Router](https://nextjs.org), served right alongside your Payload app in a single Express server. This makes it so that you can deploy both apps simultaneously and host them together. If you prefer a different front-end framework, this pattern works for any framework that supports a custom server. If you prefer to host your website separately from Payload, you can easily [Eject](#eject) the front-end out from this template to swap in your own, or to use it as a standalone CMS. For more details, see the official [Custom Server Example](https://github.com/payloadcms/payload/tree/main/examples/custom-server).
|
||||
|
||||
Core features:
|
||||
|
||||
|
||||
68
test/collections-db/config.ts
Normal file
68
test/collections-db/config.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
import type { CollectionConfig } from '../../packages/payload/types'
|
||||
|
||||
import { buildConfigWithDefaults } from '../buildConfigWithDefaults'
|
||||
import { devUser } from '../credentials'
|
||||
|
||||
export const doc = {
|
||||
id: -1,
|
||||
customData: true,
|
||||
}
|
||||
export const docs = [doc]
|
||||
|
||||
const collectionWithDb = (collectionSlug: string): CollectionConfig => {
|
||||
return {
|
||||
slug: collectionSlug,
|
||||
db: {
|
||||
// @ts-expect-error
|
||||
create: () => {
|
||||
return doc
|
||||
},
|
||||
// @ts-expect-error
|
||||
deleteOne: () => {
|
||||
return docs
|
||||
},
|
||||
// Only used in deleteUserPreferences on user collections
|
||||
// @ts-expect-error
|
||||
deleteMany: () => {
|
||||
return docs
|
||||
},
|
||||
// @ts-expect-error
|
||||
find: () => {
|
||||
return { docs }
|
||||
},
|
||||
// @ts-expect-error
|
||||
findOne: () => {
|
||||
return doc
|
||||
},
|
||||
// @ts-expect-error
|
||||
updateOne: () => {
|
||||
return { ...doc, updated: true }
|
||||
},
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'name',
|
||||
type: 'text',
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
export const collectionSlug = 'collection-db'
|
||||
export default buildConfigWithDefaults({
|
||||
// @ts-expect-error
|
||||
collections: [collectionWithDb(collectionSlug)],
|
||||
graphQL: {
|
||||
schemaOutputFile: './test/collections-db/schema.graphql',
|
||||
},
|
||||
|
||||
onInit: async (payload) => {
|
||||
await payload.create({
|
||||
collection: 'users',
|
||||
data: {
|
||||
email: devUser.email,
|
||||
password: devUser.password,
|
||||
},
|
||||
})
|
||||
},
|
||||
})
|
||||
110
test/collections-db/int.spec.ts
Normal file
110
test/collections-db/int.spec.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import payload from '../../packages/payload/src'
|
||||
import { devUser } from '../credentials'
|
||||
import { initPayloadTest } from '../helpers/configHelpers'
|
||||
import { collectionSlug } from './config'
|
||||
import { doc } from './config'
|
||||
|
||||
require('isomorphic-fetch')
|
||||
|
||||
let apiUrl
|
||||
let jwt
|
||||
|
||||
const headers = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
const { email, password } = devUser
|
||||
|
||||
describe('Collection Database Operations', () => {
|
||||
// --__--__--__--__--__--__--__--__--__
|
||||
// Boilerplate test setup/teardown
|
||||
// --__--__--__--__--__--__--__--__--__
|
||||
beforeAll(async () => {
|
||||
const { serverURL } = await initPayloadTest({ __dirname, init: { local: false } })
|
||||
apiUrl = `${serverURL}/api`
|
||||
|
||||
const response = await fetch(`${apiUrl}/users/login`, {
|
||||
body: JSON.stringify({
|
||||
email,
|
||||
password,
|
||||
}),
|
||||
headers,
|
||||
method: 'POST',
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
jwt = data.token
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
if (typeof payload.db.destroy === 'function') {
|
||||
await payload.db.destroy(payload)
|
||||
}
|
||||
})
|
||||
|
||||
// --__--__--__--__--__--__--__--__--__
|
||||
// Local API
|
||||
// --__--__--__--__--__--__--__--__--__
|
||||
|
||||
it('collection DB Create', async () => {
|
||||
const result = await payload.create({
|
||||
collection: collectionSlug,
|
||||
data: {
|
||||
id: doc.id,
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.id).toEqual(doc.id)
|
||||
expect(result.customData).toEqual(doc.customData)
|
||||
})
|
||||
|
||||
it('collection DB Update', async () => {
|
||||
const where = { id: { equals: doc.id } }
|
||||
const result = await payload.update({
|
||||
collection: collectionSlug,
|
||||
where,
|
||||
data: {
|
||||
id: doc.id,
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.docs[0].id).toEqual(doc.id)
|
||||
expect(result.docs[0].customData).toEqual(doc.customData)
|
||||
expect(result.docs[0].updated).toEqual(true)
|
||||
})
|
||||
|
||||
it('collection DB Find', async () => {
|
||||
const where = { id: { equals: doc.id } }
|
||||
const result = await payload.find({
|
||||
collection: collectionSlug,
|
||||
where,
|
||||
})
|
||||
|
||||
expect(result.docs[0].id).toEqual(doc.id)
|
||||
expect(result.docs[0].customData).toEqual(doc.customData)
|
||||
})
|
||||
|
||||
it('collection DB Find One', async () => {
|
||||
const result = await payload.findByID({
|
||||
collection: collectionSlug,
|
||||
id: doc.id,
|
||||
})
|
||||
|
||||
expect(result.id).toEqual(doc.id)
|
||||
expect(result.customData).toEqual(doc.customData)
|
||||
})
|
||||
|
||||
it('collection DB Delete', async () => {
|
||||
const where = { id: { equals: doc.id } }
|
||||
|
||||
const result = await payload.delete({
|
||||
collection: collectionSlug,
|
||||
depth: 0,
|
||||
user: devUser,
|
||||
where,
|
||||
})
|
||||
|
||||
expect(result.docs[0].id).toEqual(doc.id)
|
||||
expect(result.docs[0].customData).toEqual(doc.customData)
|
||||
expect(result.errors).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
@@ -170,6 +170,23 @@ const ArrayFields: CollectionConfig = {
|
||||
],
|
||||
type: 'array',
|
||||
},
|
||||
{
|
||||
name: 'nestedArrayLocalized',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
type: 'array',
|
||||
name: 'array',
|
||||
fields: [
|
||||
{
|
||||
name: 'text',
|
||||
type: 'text',
|
||||
localized: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
slug: arrayFieldsSlug,
|
||||
versions: true,
|
||||
|
||||
@@ -129,7 +129,6 @@ const GroupFields: CollectionConfig = {
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
{
|
||||
type: 'tabs',
|
||||
tabs: [
|
||||
@@ -184,6 +183,50 @@ const GroupFields: CollectionConfig = {
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'camelCaseGroup',
|
||||
type: 'group',
|
||||
fields: [
|
||||
{
|
||||
name: 'array',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
type: 'text',
|
||||
name: 'text',
|
||||
localized: true,
|
||||
},
|
||||
{
|
||||
type: 'array',
|
||||
name: 'array',
|
||||
fields: [
|
||||
{
|
||||
type: 'text',
|
||||
name: 'text',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'nesGroup',
|
||||
type: 'group',
|
||||
fields: [
|
||||
{
|
||||
type: 'array',
|
||||
name: 'arr',
|
||||
fields: [
|
||||
{
|
||||
type: 'text',
|
||||
name: 'text',
|
||||
localized: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@@ -79,6 +79,33 @@ const NumberFields: CollectionConfig = {
|
||||
hasMany: true,
|
||||
minRows: 2,
|
||||
},
|
||||
{
|
||||
name: 'array',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
name: 'numbers',
|
||||
type: 'number',
|
||||
hasMany: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'blocks',
|
||||
type: 'blocks',
|
||||
blocks: [
|
||||
{
|
||||
slug: 'block',
|
||||
fields: [
|
||||
{
|
||||
name: 'numbers',
|
||||
type: 'number',
|
||||
hasMany: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@@ -247,6 +247,32 @@ const TabsFields: CollectionConfig = {
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'camelCaseTab',
|
||||
fields: [
|
||||
{
|
||||
name: 'array',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
type: 'text',
|
||||
name: 'text',
|
||||
localized: true,
|
||||
},
|
||||
{
|
||||
type: 'array',
|
||||
name: 'array',
|
||||
fields: [
|
||||
{
|
||||
type: 'text',
|
||||
name: 'text',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
|
||||
@@ -166,6 +166,33 @@ const TextFields: CollectionConfig = {
|
||||
disableListFilter: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'array',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
name: 'texts',
|
||||
type: 'text',
|
||||
hasMany: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'blocks',
|
||||
type: 'blocks',
|
||||
blocks: [
|
||||
{
|
||||
slug: 'block',
|
||||
fields: [
|
||||
{
|
||||
name: 'texts',
|
||||
type: 'text',
|
||||
hasMany: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
slug: textFieldsSlug,
|
||||
}
|
||||
|
||||
@@ -399,6 +399,180 @@ describe('Fields', () => {
|
||||
expect(hitResult).toBeDefined()
|
||||
expect(missResult).toBeFalsy()
|
||||
})
|
||||
|
||||
it('should query hasMany within an array', async () => {
|
||||
const docFirst = await payload.create({
|
||||
collection: 'text-fields',
|
||||
data: {
|
||||
text: 'required',
|
||||
array: [
|
||||
{
|
||||
texts: ['text_1', 'text_2'],
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
const docSecond = await payload.create({
|
||||
collection: 'text-fields',
|
||||
data: {
|
||||
text: 'required',
|
||||
array: [
|
||||
{
|
||||
texts: ['text_other', 'text_2'],
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
const resEqualsFull = await payload.find({
|
||||
collection: 'text-fields',
|
||||
where: {
|
||||
'array.texts': {
|
||||
equals: 'text_2',
|
||||
},
|
||||
},
|
||||
sort: '-createdAt',
|
||||
})
|
||||
|
||||
expect(resEqualsFull.docs.find((res) => res.id === docFirst.id)).toBeDefined()
|
||||
expect(resEqualsFull.docs.find((res) => res.id === docSecond.id)).toBeDefined()
|
||||
|
||||
expect(resEqualsFull.totalDocs).toBe(2)
|
||||
|
||||
const resEqualsFirst = await payload.find({
|
||||
collection: 'text-fields',
|
||||
where: {
|
||||
'array.texts': {
|
||||
equals: 'text_1',
|
||||
},
|
||||
},
|
||||
sort: '-createdAt',
|
||||
})
|
||||
|
||||
expect(resEqualsFirst.docs.find((res) => res.id === docFirst.id)).toBeDefined()
|
||||
expect(resEqualsFirst.docs.find((res) => res.id === docSecond.id)).toBeUndefined()
|
||||
|
||||
expect(resEqualsFirst.totalDocs).toBe(1)
|
||||
|
||||
const resContainsSecond = await payload.find({
|
||||
collection: 'text-fields',
|
||||
where: {
|
||||
'array.texts': {
|
||||
contains: 'text_other',
|
||||
},
|
||||
},
|
||||
sort: '-createdAt',
|
||||
})
|
||||
|
||||
expect(resContainsSecond.docs.find((res) => res.id === docFirst.id)).toBeUndefined()
|
||||
expect(resContainsSecond.docs.find((res) => res.id === docSecond.id)).toBeDefined()
|
||||
|
||||
expect(resContainsSecond.totalDocs).toBe(1)
|
||||
|
||||
const resInSecond = await payload.find({
|
||||
collection: 'text-fields',
|
||||
where: {
|
||||
'array.texts': {
|
||||
in: ['text_other'],
|
||||
},
|
||||
},
|
||||
sort: '-createdAt',
|
||||
})
|
||||
|
||||
expect(resInSecond.docs.find((res) => res.id === docFirst.id)).toBeUndefined()
|
||||
expect(resInSecond.docs.find((res) => res.id === docSecond.id)).toBeDefined()
|
||||
|
||||
expect(resInSecond.totalDocs).toBe(1)
|
||||
})
|
||||
|
||||
it('should query hasMany within blocks', async () => {
|
||||
const docFirst = await payload.create({
|
||||
collection: 'text-fields',
|
||||
data: {
|
||||
text: 'required',
|
||||
blocks: [
|
||||
{
|
||||
blockType: 'block',
|
||||
texts: ['text_1', 'text_2'],
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
const docSecond = await payload.create({
|
||||
collection: 'text-fields',
|
||||
data: {
|
||||
text: 'required',
|
||||
blocks: [
|
||||
{
|
||||
blockType: 'block',
|
||||
texts: ['text_other', 'text_2'],
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
const resEqualsFull = await payload.find({
|
||||
collection: 'text-fields',
|
||||
where: {
|
||||
'blocks.texts': {
|
||||
equals: 'text_2',
|
||||
},
|
||||
},
|
||||
sort: '-createdAt',
|
||||
})
|
||||
|
||||
expect(resEqualsFull.docs.find((res) => res.id === docFirst.id)).toBeDefined()
|
||||
expect(resEqualsFull.docs.find((res) => res.id === docSecond.id)).toBeDefined()
|
||||
|
||||
expect(resEqualsFull.totalDocs).toBe(2)
|
||||
|
||||
const resEqualsFirst = await payload.find({
|
||||
collection: 'text-fields',
|
||||
where: {
|
||||
'blocks.texts': {
|
||||
equals: 'text_1',
|
||||
},
|
||||
},
|
||||
sort: '-createdAt',
|
||||
})
|
||||
|
||||
expect(resEqualsFirst.docs.find((res) => res.id === docFirst.id)).toBeDefined()
|
||||
expect(resEqualsFirst.docs.find((res) => res.id === docSecond.id)).toBeUndefined()
|
||||
|
||||
expect(resEqualsFirst.totalDocs).toBe(1)
|
||||
|
||||
const resContainsSecond = await payload.find({
|
||||
collection: 'text-fields',
|
||||
where: {
|
||||
'blocks.texts': {
|
||||
contains: 'text_other',
|
||||
},
|
||||
},
|
||||
sort: '-createdAt',
|
||||
})
|
||||
|
||||
expect(resContainsSecond.docs.find((res) => res.id === docFirst.id)).toBeUndefined()
|
||||
expect(resContainsSecond.docs.find((res) => res.id === docSecond.id)).toBeDefined()
|
||||
|
||||
expect(resContainsSecond.totalDocs).toBe(1)
|
||||
|
||||
const resInSecond = await payload.find({
|
||||
collection: 'text-fields',
|
||||
where: {
|
||||
'blocks.texts': {
|
||||
in: ['text_other'],
|
||||
},
|
||||
},
|
||||
sort: '-createdAt',
|
||||
})
|
||||
|
||||
expect(resInSecond.docs.find((res) => res.id === docFirst.id)).toBeUndefined()
|
||||
expect(resInSecond.docs.find((res) => res.id === docSecond.id)).toBeDefined()
|
||||
|
||||
expect(resInSecond.totalDocs).toBe(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('number', () => {
|
||||
@@ -534,6 +708,171 @@ describe('Fields', () => {
|
||||
expect(hitResult).toBeDefined()
|
||||
expect(missResult).toBeFalsy()
|
||||
})
|
||||
|
||||
it('should query hasMany within an array', async () => {
|
||||
const docFirst = await payload.create({
|
||||
collection: 'number-fields',
|
||||
data: {
|
||||
array: [
|
||||
{
|
||||
numbers: [10, 30],
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
const docSecond = await payload.create({
|
||||
collection: 'number-fields',
|
||||
data: {
|
||||
array: [
|
||||
{
|
||||
numbers: [10, 40],
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
const resEqualsFull = await payload.find({
|
||||
collection: 'number-fields',
|
||||
where: {
|
||||
'array.numbers': {
|
||||
equals: 10,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(resEqualsFull.docs.find((res) => res.id === docFirst.id)).toBeDefined()
|
||||
expect(resEqualsFull.docs.find((res) => res.id === docSecond.id)).toBeDefined()
|
||||
|
||||
expect(resEqualsFull.totalDocs).toBe(2)
|
||||
|
||||
const resEqualsFirst = await payload.find({
|
||||
collection: 'number-fields',
|
||||
where: {
|
||||
'array.numbers': {
|
||||
equals: 30,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(resEqualsFirst.docs.find((res) => res.id === docFirst.id)).toBeDefined()
|
||||
expect(resEqualsFirst.docs.find((res) => res.id === docSecond.id)).toBeUndefined()
|
||||
|
||||
expect(resEqualsFirst.totalDocs).toBe(1)
|
||||
|
||||
const resInSecond = await payload.find({
|
||||
collection: 'number-fields',
|
||||
where: {
|
||||
'array.numbers': {
|
||||
in: [40],
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(resInSecond.docs.find((res) => res.id === docFirst.id)).toBeUndefined()
|
||||
expect(resInSecond.docs.find((res) => res.id === docSecond.id)).toBeDefined()
|
||||
|
||||
expect(resInSecond.totalDocs).toBe(1)
|
||||
})
|
||||
|
||||
it('should query hasMany within blocks', async () => {
|
||||
const docFirst = await payload.create({
|
||||
collection: 'number-fields',
|
||||
data: {
|
||||
blocks: [
|
||||
{
|
||||
blockType: 'block',
|
||||
numbers: [10, 30],
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
const docSecond = await payload.create({
|
||||
collection: 'number-fields',
|
||||
data: {
|
||||
blocks: [
|
||||
{
|
||||
blockType: 'block',
|
||||
numbers: [10, 40],
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
const resEqualsFull = await payload.find({
|
||||
collection: 'number-fields',
|
||||
where: {
|
||||
'blocks.numbers': {
|
||||
equals: 10,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(resEqualsFull.docs.find((res) => res.id === docFirst.id)).toBeDefined()
|
||||
expect(resEqualsFull.docs.find((res) => res.id === docSecond.id)).toBeDefined()
|
||||
|
||||
expect(resEqualsFull.totalDocs).toBe(2)
|
||||
|
||||
const resEqualsFirst = await payload.find({
|
||||
collection: 'number-fields',
|
||||
where: {
|
||||
'blocks.numbers': {
|
||||
equals: 30,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(resEqualsFirst.docs.find((res) => res.id === docFirst.id)).toBeDefined()
|
||||
expect(resEqualsFirst.docs.find((res) => res.id === docSecond.id)).toBeUndefined()
|
||||
|
||||
expect(resEqualsFirst.totalDocs).toBe(1)
|
||||
|
||||
const resInSecond = await payload.find({
|
||||
collection: 'number-fields',
|
||||
where: {
|
||||
'blocks.numbers': {
|
||||
in: [40],
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(resInSecond.docs.find((res) => res.id === docFirst.id)).toBeUndefined()
|
||||
expect(resInSecond.docs.find((res) => res.id === docSecond.id)).toBeDefined()
|
||||
|
||||
expect(resInSecond.totalDocs).toBe(1)
|
||||
})
|
||||
|
||||
it('should properly query numbers with exists operator', async () => {
|
||||
await payload.create({
|
||||
collection: 'number-fields',
|
||||
data: {
|
||||
number: null,
|
||||
},
|
||||
})
|
||||
|
||||
const numbersExist = await payload.find({
|
||||
collection: 'number-fields',
|
||||
where: {
|
||||
number: {
|
||||
exists: true,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(numbersExist.totalDocs).toBe(4)
|
||||
|
||||
const numbersNotExists = await payload.find({
|
||||
collection: 'number-fields',
|
||||
where: {
|
||||
number: {
|
||||
exists: false,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(numbersNotExists.docs).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
|
||||
if (isMongoose(payload)) {
|
||||
@@ -807,6 +1146,61 @@ describe('Fields', () => {
|
||||
expect(result.items[0].localizedText.es).toStrictEqual('spanish')
|
||||
})
|
||||
|
||||
it('should create and append localized items to nested array with versions', async () => {
|
||||
const doc = await payload.create({
|
||||
collection,
|
||||
data: {
|
||||
items: [{ text: 'req' }],
|
||||
localized: [{ text: 'req' }],
|
||||
nestedArrayLocalized: [
|
||||
{
|
||||
array: [
|
||||
{
|
||||
text: 'marcelo',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
const res = await payload.update({
|
||||
id: doc.id,
|
||||
collection,
|
||||
data: {
|
||||
nestedArrayLocalized: [
|
||||
...doc.nestedArrayLocalized,
|
||||
{
|
||||
array: [
|
||||
{
|
||||
text: 'alejandro',
|
||||
},
|
||||
{
|
||||
text: 'raul',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
array: [
|
||||
{
|
||||
text: 'amigo',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
expect(res.nestedArrayLocalized).toHaveLength(3)
|
||||
|
||||
expect(res.nestedArrayLocalized[0].array[0].text).toBe('marcelo')
|
||||
|
||||
expect(res.nestedArrayLocalized[1].array[0].text).toBe('alejandro')
|
||||
expect(res.nestedArrayLocalized[1].array[1].text).toBe('raul')
|
||||
|
||||
expect(res.nestedArrayLocalized[2].array[0].text).toBe('amigo')
|
||||
})
|
||||
|
||||
it('should create with nested array', async () => {
|
||||
const subArrayText = 'something expected'
|
||||
const doc = await payload.create({
|
||||
@@ -911,6 +1305,31 @@ describe('Fields', () => {
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should insert/read camelCase group with nested arrays + localized', async () => {
|
||||
const res = await payload.create({
|
||||
collection: 'group-fields',
|
||||
data: {
|
||||
camelCaseGroup: {
|
||||
nesGroup: { arr: [{ text: 'nestedCamel' }] },
|
||||
array: [
|
||||
{
|
||||
text: 'text',
|
||||
array: [
|
||||
{
|
||||
text: 'nested',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(res.camelCaseGroup.array[0].text).toBe('text')
|
||||
expect(res.camelCaseGroup.array[0].array[0].text).toBe('nested')
|
||||
expect(res.camelCaseGroup.nesGroup.arr[0].text).toBe('nestedCamel')
|
||||
})
|
||||
})
|
||||
|
||||
describe('tabs', () => {
|
||||
@@ -972,6 +1391,37 @@ describe('Fields', () => {
|
||||
|
||||
expect(doc.potentiallyEmptyGroup).toBeDefined()
|
||||
})
|
||||
|
||||
it('should insert/read camelCase tab with nested arrays + localized', async () => {
|
||||
const res = await payload.create({
|
||||
collection: tabsFieldsSlug,
|
||||
data: {
|
||||
anotherText: 'req',
|
||||
array: [{ text: 'req' }],
|
||||
blocks: [{ blockType: 'content', text: 'req' }],
|
||||
group: { number: 1 },
|
||||
numberInRow: 1,
|
||||
textInRow: 'req',
|
||||
tab: { array: [{ text: 'req' }] },
|
||||
|
||||
camelCaseTab: {
|
||||
array: [
|
||||
{
|
||||
text: 'text',
|
||||
array: [
|
||||
{
|
||||
text: 'nested',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(res.camelCaseTab.array[0].text).toBe('text')
|
||||
expect(res.camelCaseTab.array[0].array[0].text).toBe('nested')
|
||||
})
|
||||
})
|
||||
|
||||
describe('blocks', () => {
|
||||
|
||||
@@ -641,6 +641,20 @@ export interface TextField {
|
||||
withMaxRows?: string[] | null
|
||||
disableListColumnText?: string | null
|
||||
disableListFilterText?: string | null
|
||||
array?:
|
||||
| {
|
||||
texts?: string[] | null
|
||||
id?: string | null
|
||||
}[]
|
||||
| null
|
||||
blocks?:
|
||||
| {
|
||||
texts?: string[] | null
|
||||
id?: string | null
|
||||
blockName?: string | null
|
||||
blockType: 'block'
|
||||
}[]
|
||||
| null
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
@@ -805,6 +819,28 @@ export interface GroupField {
|
||||
}
|
||||
}
|
||||
}
|
||||
camelCaseGroup?: {
|
||||
array?:
|
||||
| {
|
||||
text?: string | null
|
||||
array?:
|
||||
| {
|
||||
text?: string | null
|
||||
id?: string | null
|
||||
}[]
|
||||
| null
|
||||
id?: string | null
|
||||
}[]
|
||||
| null
|
||||
nesGroup?: {
|
||||
arr?:
|
||||
| {
|
||||
text?: string | null
|
||||
id?: string | null
|
||||
}[]
|
||||
| null
|
||||
}
|
||||
}
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
@@ -898,6 +934,20 @@ export interface NumberField {
|
||||
validatesHasMany?: number[] | null
|
||||
localizedHasMany?: number[] | null
|
||||
withMinRows?: number[] | null
|
||||
array?:
|
||||
| {
|
||||
numbers?: number[] | null
|
||||
id?: string | null
|
||||
}[]
|
||||
| null
|
||||
blocks?:
|
||||
| {
|
||||
numbers?: number[] | null
|
||||
id?: string | null
|
||||
blockName?: string | null
|
||||
blockType: 'block'
|
||||
}[]
|
||||
| null
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
@@ -1176,6 +1226,20 @@ export interface TabsField {
|
||||
afterChange?: boolean | null
|
||||
afterRead?: boolean | null
|
||||
}
|
||||
camelCaseTab: {
|
||||
array?:
|
||||
| {
|
||||
text?: string | null
|
||||
array?:
|
||||
| {
|
||||
text?: string | null
|
||||
id?: string | null
|
||||
}[]
|
||||
| null
|
||||
id?: string | null
|
||||
}[]
|
||||
| null
|
||||
}
|
||||
textarea?: string | null
|
||||
anotherText: string
|
||||
nestedTab: {
|
||||
|
||||
@@ -281,6 +281,31 @@ export default buildConfigWithDefaults({
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
slug: 'pages',
|
||||
fields: [
|
||||
{
|
||||
type: 'array',
|
||||
name: 'menu',
|
||||
fields: [
|
||||
{
|
||||
name: 'label',
|
||||
type: 'text',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
slug: 'rels-to-pages',
|
||||
fields: [
|
||||
{
|
||||
name: 'page',
|
||||
type: 'relationship',
|
||||
relationTo: 'pages',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
onInit: async (payload) => {
|
||||
await payload.create({
|
||||
|
||||
@@ -666,6 +666,37 @@ describe('Relationships', () => {
|
||||
expect(query.docs).toHaveLength(1)
|
||||
expect(query.docs[0].id).toStrictEqual(firstLevelID)
|
||||
})
|
||||
|
||||
it('should allow querying within array nesting', async () => {
|
||||
const page = await payload.create({
|
||||
collection: 'pages',
|
||||
data: {
|
||||
menu: [
|
||||
{
|
||||
label: 'hello',
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
const rel = await payload.create({ collection: 'rels-to-pages', data: { page: page.id } })
|
||||
|
||||
const resEquals = await payload.find({
|
||||
collection: 'rels-to-pages',
|
||||
where: { 'page.menu.label': { equals: 'hello' } },
|
||||
})
|
||||
|
||||
expect(resEquals.totalDocs).toBe(1)
|
||||
expect(resEquals.docs[0].id).toBe(rel.id)
|
||||
|
||||
const resIn = await payload.find({
|
||||
collection: 'rels-to-pages',
|
||||
where: { 'page.menu.label': { in: ['hello'] } },
|
||||
})
|
||||
|
||||
expect(resIn.totalDocs).toBe(1)
|
||||
expect(resIn.docs[0].id).toBe(rel.id)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Nested Querying Separate Collections', () => {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
/**
|
||||
* This file was automatically generated by Payload.
|
||||
* DO NOT MODIFY IT BY HAND. Instead, modify your source Payload config,
|
||||
@@ -8,34 +9,102 @@
|
||||
export interface Config {
|
||||
collections: {
|
||||
posts: Post
|
||||
relation: Relation
|
||||
postsLocalized: PostsLocalized
|
||||
relation: Relation
|
||||
'strict-access': StrictAccess
|
||||
'chained-relation': ChainedRelation
|
||||
'custom-id-relation': CustomIdRelation
|
||||
'custom-id-number-relation': CustomIdNumberRelation
|
||||
chained: Chained
|
||||
'custom-id': CustomId
|
||||
'custom-id-number': CustomIdNumber
|
||||
screenings: Screening
|
||||
movies: Movie
|
||||
directors: Director
|
||||
movieReviews: MovieReview
|
||||
'polymorphic-relationships': PolymorphicRelationship
|
||||
tree: Tree
|
||||
pages: Page
|
||||
'rels-to-pages': RelsToPage
|
||||
users: User
|
||||
'payload-preferences': PayloadPreference
|
||||
'payload-migrations': PayloadMigration
|
||||
}
|
||||
globals: {}
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "posts".
|
||||
*/
|
||||
export interface Post {
|
||||
id: string
|
||||
title?: string
|
||||
description?: string
|
||||
number?: number
|
||||
relationField?: string | Relation
|
||||
defaultAccessRelation?: string | StrictAccess
|
||||
chainedRelation?: string | ChainedRelation
|
||||
maxDepthRelation?: string | Relation
|
||||
customIdRelation?: string | CustomIdRelation
|
||||
customIdNumberRelation?: number | CustomIdNumberRelation
|
||||
filteredRelation?: string | Relation
|
||||
title?: string | null
|
||||
description?: string | null
|
||||
number?: number | null
|
||||
relationField?: (string | null) | Relation
|
||||
defaultAccessRelation?: (string | null) | StrictAccess
|
||||
chainedRelation?: (string | null) | Chained
|
||||
maxDepthRelation?: (string | null) | Relation
|
||||
customIdRelation?: (string | null) | CustomId
|
||||
customIdNumberRelation?: (number | null) | CustomIdNumber
|
||||
filteredRelation?: (string | null) | Relation
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "relation".
|
||||
*/
|
||||
export interface Relation {
|
||||
id: string
|
||||
name?: string | null
|
||||
disableRelation: boolean
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "strict-access".
|
||||
*/
|
||||
export interface StrictAccess {
|
||||
id: string
|
||||
name?: string | null
|
||||
disableRelation: boolean
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "chained".
|
||||
*/
|
||||
export interface Chained {
|
||||
id: string
|
||||
name?: string | null
|
||||
relation?: (string | null) | Chained
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "custom-id".
|
||||
*/
|
||||
export interface CustomId {
|
||||
id: string
|
||||
name?: string | null
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "custom-id-number".
|
||||
*/
|
||||
export interface CustomIdNumber {
|
||||
id: number
|
||||
name?: string | null
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "postsLocalized".
|
||||
*/
|
||||
export interface PostsLocalized {
|
||||
id: string
|
||||
title?: string | null
|
||||
@@ -43,68 +112,152 @@ export interface PostsLocalized {
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
export interface Relation {
|
||||
id: string
|
||||
name?: string
|
||||
disableRelation: boolean
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
export interface StrictAccess {
|
||||
id: string
|
||||
name?: string
|
||||
disableRelation: boolean
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
export interface ChainedRelation {
|
||||
id: string
|
||||
name?: string
|
||||
relation?: string | ChainedRelation
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
export interface CustomIdRelation {
|
||||
id: string
|
||||
name?: string
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
export interface CustomIdNumberRelation {
|
||||
id: number
|
||||
name?: string
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "screenings".
|
||||
*/
|
||||
export interface Screening {
|
||||
id: string
|
||||
name?: string
|
||||
movie?: string | Movie
|
||||
name?: string | null
|
||||
movie?: (string | null) | Movie
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "movies".
|
||||
*/
|
||||
export interface Movie {
|
||||
id: string
|
||||
name?: string
|
||||
director?: string | Director
|
||||
name?: string | null
|
||||
director?: (string | null) | Director
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "directors".
|
||||
*/
|
||||
export interface Director {
|
||||
id: string
|
||||
name?: string
|
||||
movies?: Array<string | Movie>
|
||||
name?: string | null
|
||||
movies?: (string | Movie)[] | null
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "movieReviews".
|
||||
*/
|
||||
export interface MovieReview {
|
||||
id: string
|
||||
movieReviewer: string | User
|
||||
likes?: (string | User)[] | null
|
||||
visibility: 'followers' | 'public'
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "users".
|
||||
*/
|
||||
export interface User {
|
||||
id: string
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
email?: string
|
||||
resetPasswordToken?: string
|
||||
resetPasswordExpiration?: string
|
||||
loginAttempts?: number
|
||||
lockUntil?: string
|
||||
password?: string
|
||||
email: string
|
||||
resetPasswordToken?: string | null
|
||||
resetPasswordExpiration?: string | null
|
||||
salt?: string | null
|
||||
hash?: string | null
|
||||
loginAttempts?: number | null
|
||||
lockUntil?: string | null
|
||||
password: string | null
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "polymorphic-relationships".
|
||||
*/
|
||||
export interface PolymorphicRelationship {
|
||||
id: string
|
||||
polymorphic?: {
|
||||
relationTo: 'movies'
|
||||
value: string | Movie
|
||||
} | null
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "tree".
|
||||
*/
|
||||
export interface Tree {
|
||||
id: string
|
||||
text?: string | null
|
||||
parent?: (string | null) | Tree
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "pages".
|
||||
*/
|
||||
export interface Page {
|
||||
id: string
|
||||
menu?:
|
||||
| {
|
||||
label?: string | null
|
||||
id?: string | null
|
||||
}[]
|
||||
| null
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "rels-to-pages".
|
||||
*/
|
||||
export interface RelsToPage {
|
||||
id: string
|
||||
page?: (string | null) | Page
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "payload-preferences".
|
||||
*/
|
||||
export interface PayloadPreference {
|
||||
id: string
|
||||
user: {
|
||||
relationTo: 'users'
|
||||
value: string | User
|
||||
}
|
||||
key?: string | null
|
||||
value?:
|
||||
| {
|
||||
[k: string]: unknown
|
||||
}
|
||||
| unknown[]
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| null
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "payload-migrations".
|
||||
*/
|
||||
export interface PayloadMigration {
|
||||
id: string
|
||||
name?: string | null
|
||||
batch?: number | null
|
||||
updatedAt: string
|
||||
createdAt: string
|
||||
}
|
||||
|
||||
declare module 'payload' {
|
||||
export interface GeneratedTypes extends Config {}
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
animatedTypeMedia,
|
||||
audioSlug,
|
||||
cropOnlySlug,
|
||||
customFileNameMediaSlug,
|
||||
enlargeSlug,
|
||||
focalOnlySlug,
|
||||
globalWithMedia,
|
||||
@@ -203,6 +204,23 @@ export default buildConfigWithDefaults({
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
slug: customFileNameMediaSlug,
|
||||
fields: [],
|
||||
upload: {
|
||||
imageSizes: [
|
||||
{
|
||||
name: 'custom',
|
||||
height: 500,
|
||||
width: 500,
|
||||
generateImageName: ({ extension, height, width, sizeName }) =>
|
||||
`${sizeName}-${width}x${height}.${extension}`,
|
||||
},
|
||||
],
|
||||
mimeTypes: ['image/png', 'image/jpg', 'image/jpeg'],
|
||||
staticDir: `./${customFileNameMediaSlug}`,
|
||||
},
|
||||
},
|
||||
{
|
||||
slug: cropOnlySlug,
|
||||
fields: [],
|
||||
|
||||
@@ -24,6 +24,7 @@ import {
|
||||
withMetadataSlug,
|
||||
withOnlyJPEGMetadataSlug,
|
||||
withoutMetadataSlug,
|
||||
customFileNameMediaSlug,
|
||||
} from './shared'
|
||||
|
||||
const { beforeAll, describe } = test
|
||||
@@ -40,6 +41,7 @@ let withMetadataURL: AdminUrlUtil
|
||||
let withoutMetadataURL: AdminUrlUtil
|
||||
let withOnlyJPEGMetadataURL: AdminUrlUtil
|
||||
let relationPreviewURL: AdminUrlUtil
|
||||
let customFileNameURL: AdminUrlUtil
|
||||
|
||||
describe('uploads', () => {
|
||||
let page: Page
|
||||
@@ -62,6 +64,7 @@ describe('uploads', () => {
|
||||
withoutMetadataURL = new AdminUrlUtil(serverURL, withoutMetadataSlug)
|
||||
withOnlyJPEGMetadataURL = new AdminUrlUtil(serverURL, withOnlyJPEGMetadataSlug)
|
||||
relationPreviewURL = new AdminUrlUtil(serverURL, relationPreviewSlug)
|
||||
customFileNameURL = new AdminUrlUtil(serverURL, customFileNameMediaSlug)
|
||||
|
||||
const context = await browser.newContext()
|
||||
page = await context.newPage()
|
||||
@@ -427,6 +430,25 @@ describe('uploads', () => {
|
||||
expect(webpMediaDoc.sizes.sizeThree.filesize).toEqual(211638)
|
||||
})
|
||||
|
||||
test('should have custom file name for image size', async () => {
|
||||
await page.goto(customFileNameURL.create)
|
||||
await page.setInputFiles('input[type="file"]', path.resolve(__dirname, './image.png'))
|
||||
|
||||
await expect(page.locator('.file-field__upload .thumbnail img')).toBeVisible()
|
||||
|
||||
await saveDocAndAssert(page)
|
||||
|
||||
await expect(page.locator('.file-details img')).toBeVisible()
|
||||
|
||||
await page.locator('.file-field__previewSizes').click()
|
||||
|
||||
const renamedImageSizeFile = page
|
||||
.locator('.preview-sizes__list .preview-sizes__sizeOption')
|
||||
.nth(1)
|
||||
|
||||
await expect(renamedImageSizeFile).toContainText('custom-500x500.png')
|
||||
})
|
||||
|
||||
describe('image manipulation', () => {
|
||||
test('should crop image correctly', async () => {
|
||||
const positions = {
|
||||
|
||||
@@ -15,3 +15,4 @@ export const animatedTypeMedia = 'animated-type-media'
|
||||
export const withMetadataSlug = 'with-meta-data'
|
||||
export const withoutMetadataSlug = 'without-meta-data'
|
||||
export const withOnlyJPEGMetadataSlug = 'with-only-jpeg-meta-data'
|
||||
export const customFileNameMediaSlug = 'custom-file-name-media'
|
||||
|
||||
Reference in New Issue
Block a user