feat(storage-*): large file uploads on Vercel (#11382)

Currently, usage of Payload on Vercel has a limitation - uploads are
limited by 4.5MB file size.
This PR allows you to pass `clientUploads: true` to all existing storage
adapters
* Storage S3
* Vercel Blob
* Google Cloud Storage
* Uploadthing
* Azure Blob

And then, Payload will do uploads on the client instead. With the S3
Adapter it uses signed URLs and with Vercel Blob it does this -
https://vercel.com/guides/how-to-bypass-vercel-body-size-limit-serverless-functions#step-2:-create-a-client-upload-route.
Note that it doesn't mean that anyone can now upload files to your
storage, it still does auth checks and you can customize that with
`clientUploads.access`


https://github.com/user-attachments/assets/5083c76c-8f5a-43dc-a88c-9ddc4527d91c

Implements https://github.com/payloadcms/payload/discussions/7569
feature request.
This commit is contained in:
Sasha
2025-02-26 21:59:34 +02:00
committed by GitHub
parent c6ab312286
commit b540da53ec
54 changed files with 1548 additions and 152 deletions

View File

@@ -30,6 +30,7 @@ pnpm add @payloadcms/storage-vercel-blob
- Configure the `collections` object to specify which collections should use the Vercel Blob adapter. The slug _must_ match one of your existing collection slugs.
- Ensure you have `BLOB_READ_WRITE_TOKEN` set in your Vercel environment variables. This is usually set by Vercel automatically after adding blob storage to your project.
- When enabled, this package will automatically set `disableLocalStorage` to `true` for each collection.
- When deploying to Vercel, server uploads are limited with 4.5MB. Set `clientUploads` to `true` to do uploads directly on the client.
```ts
import { vercelBlobStorage } from '@payloadcms/storage-vercel-blob'
@@ -64,6 +65,7 @@ export default buildConfig({
| `addRandomSuffix` | Add a random suffix to the uploaded file name in Vercel Blob storage | `false` |
| `cacheControlMaxAge` | Cache-Control max-age in seconds | `365 * 24 * 60 * 60` (1 Year) |
| `token` | Vercel Blob storage read/write token | `''` |
| `clientUploads` | Do uploads directly on the client to bypass limits on Vercel. | |
## S3 Storage
[`@payloadcms/storage-s3`](https://www.npmjs.com/package/@payloadcms/storage-s3)
@@ -79,6 +81,7 @@ pnpm add @payloadcms/storage-s3
- Configure the `collections` object to specify which collections should use the S3 Storage adapter. The slug _must_ match one of your existing collection slugs.
- The `config` object can be any [`S3ClientConfig`](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3) object (from [`@aws-sdk/client-s3`](https://github.com/aws/aws-sdk-js-v3)). _This is highly dependent on your AWS setup_. Check the AWS documentation for more information.
- When enabled, this package will automatically set `disableLocalStorage` to `true` for each collection.
- When deploying to Vercel, server uploads are limited with 4.5MB. Set `clientUploads` to `true` to do uploads directly on the client. You must allow CORS PUT method for the bucket to your website.
```ts
import { s3Storage } from '@payloadcms/storage-s3'
@@ -126,6 +129,7 @@ pnpm add @payloadcms/storage-azure
- Configure the `collections` object to specify which collections should use the Azure Blob adapter. The slug _must_ match one of your existing collection slugs.
- When enabled, this package will automatically set `disableLocalStorage` to `true` for each collection.
- When deploying to Vercel, server uploads are limited with 4.5MB. Set `clientUploads` to `true` to do uploads directly on the client. You must allow CORS PUT method to your website.
```ts
import { azureStorage } from '@payloadcms/storage-azure'
@@ -161,6 +165,7 @@ export default buildConfig({
| `baseURL` | Base URL for the Azure Blob storage account | |
| `connectionString` | Azure Blob storage connection string | |
| `containerName` | Azure Blob storage container name | |
| `clientUploads` | Do uploads directly on the client to bypass limits on Vercel. | |
## Google Cloud Storage
[`@payloadcms/storage-gcs`](https://www.npmjs.com/package/@payloadcms/storage-gcs)
@@ -175,6 +180,7 @@ pnpm add @payloadcms/storage-gcs
- Configure the `collections` object to specify which collections should use the Google Cloud Storage adapter. The slug _must_ match one of your existing collection slugs.
- When enabled, this package will automatically set `disableLocalStorage` to `true` for each collection.
- When deploying to Vercel, server uploads are limited with 4.5MB. Set `clientUploads` to `true` to do uploads directly on the client. You must allow CORS PUT method for the bucket to your website.
```ts
import { gcsStorage } from '@payloadcms/storage-gcs'
@@ -204,12 +210,13 @@ export default buildConfig({
### Configuration Options#gcs-configuration
| Option | Description | Default |
| ------------- | --------------------------------------------------------------------------------------------------- | --------- |
| --------------- | --------------------------------------------------------------------------------------------------- | --------- |
| `enabled` | Whether or not to enable the plugin | `true` |
| `collections` | Collections to apply the storage to | |
| `bucket` | The name of the bucket to use | |
| `options` | Google Cloud Storage client configuration. See [Docs](https://github.com/googleapis/nodejs-storage) | |
| `acl` | Access control list for files that are uploaded | `Private` |
| `clientUploads` | Do uploads directly on the client to bypass limits on Vercel. | |
## Uploadthing Storage
@@ -226,6 +233,7 @@ pnpm add @payloadcms/storage-uploadthing
- Configure the `collections` object to specify which collections should use uploadthing. The slug _must_ match one of your existing collection slugs and be an `upload` type.
- Get a token from Uploadthing and set it as `token` in the `options` object.
- `acl` is optional and defaults to `public-read`.
- When deploying to Vercel, server uploads are limited with 4.5MB. Set `clientUploads` to `true` to do uploads directly on the client.
```ts
export default buildConfig({
@@ -247,12 +255,13 @@ export default buildConfig({
### Configuration Options#uploadthing-configuration
| Option | Description | Default |
| ---------------- | ----------------------------------------------- | ------------- |
| ---------------- | ------------------------------------------------------------- | ------------- |
| `token` | Token from Uploadthing. Required. | |
| `acl` | Access control list for files that are uploaded | `public-read` |
| `logLevel` | Log level for Uploadthing | `info` |
| `fetch` | Custom fetch function | `fetch` |
| `defaultKeyType` | Default key type for file operations | `fileKey` |
| `clientUploads` | Do uploads directly on the client to bypass limits on Vercel. | |
## Custom Storage Adapters

View File

@@ -178,7 +178,7 @@ export type UploadConfig = {
req: PayloadRequest,
args: {
doc: TypeWithID
params: { collection: string; filename: string }
params: { clientUploadContext?: unknown; collection: string; filename: string }
},
) => Promise<Response> | Promise<void> | Response | void)[]
/**

View File

@@ -44,6 +44,54 @@ export const addDataAndFileToRequest: AddDataAndFileToRequest = async (req) => {
if (fields?._payload && typeof fields._payload === 'string') {
req.data = JSON.parse(fields._payload)
}
if (!req.file && fields?.file && typeof fields?.file === 'string') {
const { clientUploadContext, collectionSlug, filename, mimeType, size } = JSON.parse(
fields.file,
)
const uploadConfig = req.payload.collections[collectionSlug].config.upload
if (!uploadConfig.handlers) {
throw new APIError('uploadConfig.handlers is not present for ' + collectionSlug)
}
let response: null | Response = null
let error: unknown
for (const handler of uploadConfig.handlers) {
try {
const result = await handler(req, {
doc: null,
params: {
clientUploadContext, // Pass additional specific to adapters context returned from UploadHandler, then staticHandler can use them.
collection: collectionSlug,
filename,
},
})
if (result) {
response = result
}
// If we couldn't get the file from that handler, save the error and try other.
} catch (err) {
error = err
}
}
if (!response) {
if (error) {
payload.logger.error(error)
}
throw new APIError('Expected response from the upload handler.')
}
req.file = {
name: filename,
data: Buffer.from(await response.arrayBuffer()),
mimetype: response.headers.get('Content-Type') || mimeType,
size,
}
}
}
}
}

View File

@@ -7,6 +7,15 @@
"syntax": "typescript",
"tsx": true,
"dts": true
},
"transform": {
"react": {
"runtime": "automatic",
"pragmaFrag": "React.Fragment",
"throwIfNamespace": true,
"development": false,
"useBuiltins": true
}
}
},
"module": {

View File

@@ -33,6 +33,11 @@
"import": "./src/exports/utilities.ts",
"types": "./src/exports/utilities.ts",
"default": "./src/exports/utilities.ts"
},
"./client": {
"import": "./src/exports/client.ts",
"types": "./src/exports/client.ts",
"default": "./src/exports/client.ts"
}
},
"main": "./src/index.ts",
@@ -53,15 +58,20 @@
"test": "echo \"No tests available.\""
},
"dependencies": {
"@payloadcms/ui": "workspace:*",
"find-node-modules": "^2.1.3",
"range-parser": "^1.2.1"
},
"devDependencies": {
"@types/find-node-modules": "^2.1.2",
"@types/react": "19.0.1",
"@types/react-dom": "19.0.1",
"payload": "workspace:*"
},
"peerDependencies": {
"payload": "workspace:*"
"payload": "workspace:*",
"react": "^19.0.0 || ^19.0.0-rc-65a56d0e-20241020",
"react-dom": "^19.0.0 || ^19.0.0-rc-65a56d0e-20241020"
},
"publishConfig": {
"exports": {
@@ -79,6 +89,11 @@
"import": "./dist/exports/utilities.js",
"types": "./dist/exports/utilities.d.ts",
"default": "./dist/exports/utilities.js"
},
"./client": {
"import": "./dist/exports/client.js",
"types": "./dist/exports/client.d.ts",
"default": "./dist/exports/client.js"
}
},
"main": "./dist/index.js",

View File

@@ -0,0 +1,69 @@
'use client'
import type { UploadCollectionSlug } from 'payload'
import { useConfig, useEffectEvent, useUploadHandlers } from '@payloadcms/ui'
import { Fragment, type ReactNode, useEffect } from 'react'
type ClientUploadHandlerProps<T extends Record<string, unknown>> = {
children: ReactNode
collectionSlug: UploadCollectionSlug
enabled?: boolean
extra: T
serverHandlerPath: string
}
export const createClientUploadHandler = <T extends Record<string, unknown>>({
handler,
}: {
handler: (args: {
apiRoute: string
collectionSlug: UploadCollectionSlug
extra: T
file: File
serverHandlerPath: string
serverURL: string
updateFilename: (value: string) => void
}) => Promise<unknown>
}) => {
return function ClientUploadHandler({
children,
collectionSlug,
enabled,
extra,
serverHandlerPath,
}: ClientUploadHandlerProps<T>) {
const { setUploadHandler } = useUploadHandlers()
const {
config: {
routes: { api: apiRoute },
serverURL,
},
} = useConfig()
const initializeHandler = useEffectEvent(() => {
if (enabled) {
setUploadHandler({
collectionSlug,
handler: ({ file, updateFilename }) => {
return handler({
apiRoute,
collectionSlug,
extra,
file,
serverHandlerPath,
serverURL,
updateFilename,
})
},
})
}
})
useEffect(() => {
initializeHandler()
}, [])
return <Fragment>{children}</Fragment>
}
}

View File

@@ -0,0 +1 @@
export { createClientUploadHandler } from '../client/createClientUploadHandler.js'

View File

@@ -1 +1,2 @@
export { getFilePrefix } from '../utilities/getFilePrefix.js'
export { initClientUploads } from '../utilities/initClientUploads.js'

View File

@@ -16,6 +16,17 @@ export interface File {
tempFilePath?: string
}
export type ClientUploadsAccess = (args: {
collectionSlug: UploadCollectionSlug
req: PayloadRequest
}) => boolean | Promise<boolean>
export type ClientUploadsConfig =
| {
access?: ClientUploadsAccess
}
| boolean
export type HandleUpload = (args: {
collection: CollectionConfig
data: any
@@ -43,7 +54,10 @@ export type GenerateURL = (args: {
export type StaticHandler = (
req: PayloadRequest,
args: { doc?: TypeWithID; params: { collection: string; filename: string } },
args: {
doc?: TypeWithID
params: { clientUploadContext?: unknown; collection: string; filename: string }
},
) => Promise<Response> | Response
export interface GeneratedAdapter {

View File

@@ -0,0 +1,76 @@
import type { Config, PayloadHandler } from 'payload'
export const initClientUploads = <ExtraProps extends Record<string, unknown>, T>({
clientHandler,
collections,
config,
enabled,
extraClientHandlerProps,
serverHandler,
serverHandlerPath,
}: {
/** Path to clientHandler component */
clientHandler: string
collections: Record<string, T>
config: Config
enabled: boolean
/** extra props to pass to the client handler */
extraClientHandlerProps?: (collection: T) => ExtraProps
serverHandler: PayloadHandler
serverHandlerPath: string
}) => {
if (enabled) {
if (!config.endpoints) {
config.endpoints = []
}
/**
* Tracks how many times the same handler was already applied.
* This allows to apply the same plugin multiple times, for example
* to use different buckets for different collections.
*/
let handlerCount = 0
for (const endpoint of config.endpoints) {
if (endpoint.path === serverHandlerPath) {
handlerCount++
}
}
if (handlerCount) {
serverHandlerPath = `${serverHandlerPath}-${handlerCount}`
}
config.endpoints.push({
handler: serverHandler,
method: 'post',
path: serverHandlerPath,
})
}
if (!config.admin) {
config.admin = {}
}
if (!config.admin.components) {
config.admin.components = {}
}
if (!config.admin.components.providers) {
config.admin.components.providers = []
}
for (const collectionSlug in collections) {
const collection = collections[collectionSlug]
config.admin.components.providers.push({
clientProps: {
collectionSlug,
enabled,
extra: extraClientHandlerProps ? extraClientHandlerProps(collection) : undefined,
serverHandlerPath,
},
path: clientHandler,
})
}
}

View File

@@ -5,5 +5,5 @@
"strict": false,
"noUncheckedIndexedAccess": false,
},
"references": [{ "path": "../payload" }]
"references": [{ "path": "../payload" }, { "path": "../ui" }]
}

View File

@@ -14,6 +14,7 @@ pnpm add @payloadcms/storage-azure
- Configure the `collections` object to specify which collections should use the Azure Blob Storage adapter. The slug _must_ match one of your existing collection slugs.
- When enabled, this package will automatically set `disableLocalStorage` to `true` for each collection.
- When deploying to Vercel, server uploads are limited with 4.5MB. Set `clientUploads` to `true` to do uploads directly on the client. You must allow CORS PUT method to your website.
```ts
import { azureStorage } from '@payloadcms/storage-azure'
@@ -49,3 +50,4 @@ export default buildConfig({
| `baseURL` | Base URL for the Azure Blob storage account | |
| `connectionString` | Azure Blob storage connection string | |
| `containerName` | Azure Blob storage container name | |
| `clientUploads` | Do uploads directly on the client to bypass limits on Vercel. | |

View File

@@ -23,6 +23,11 @@
"import": "./src/index.ts",
"types": "./src/index.ts",
"default": "./src/index.ts"
},
"./client": {
"import": "./src/exports/client.ts",
"types": "./src/exports/client.ts",
"default": "./src/exports/client.ts"
}
},
"main": "./src/index.ts",
@@ -62,6 +67,11 @@
"import": "./dist/index.js",
"types": "./dist/index.d.ts",
"default": "./dist/index.js"
},
"./client": {
"import": "./dist/exports/client.js",
"types": "./dist/exports/client.d.ts",
"default": "./dist/exports/client.js"
}
},
"main": "./dist/index.js",

View File

@@ -0,0 +1,29 @@
'use client'
import { createClientUploadHandler } from '@payloadcms/plugin-cloud-storage/client'
export const AzureClientUploadHandler = createClientUploadHandler({
handler: async ({ apiRoute, collectionSlug, file, serverHandlerPath, serverURL }) => {
const response = await fetch(`${serverURL}${apiRoute}${serverHandlerPath}`, {
body: JSON.stringify({
collectionSlug,
filename: file.name,
mimeType: file.type,
}),
credentials: 'include',
method: 'POST',
})
const { url } = await response.json()
await fetch(url, {
body: file,
headers: {
'Content-Length': file.size.toString(),
'Content-Type': file.type,
// Required for azure
'x-ms-blob-type': 'BlockBlob',
},
method: 'PUT',
})
},
})

View File

@@ -0,0 +1 @@
export { AzureClientUploadHandler } from '../client/AzureClientUploadHandler.js'

View File

@@ -0,0 +1,62 @@
import type { ContainerClient, StorageSharedKeyCredential } from '@azure/storage-blob'
import type { ClientUploadsAccess } from '@payloadcms/plugin-cloud-storage/types'
import type { PayloadHandler } from 'payload'
import { BlobSASPermissions, generateBlobSASQueryParameters } from '@azure/storage-blob'
import path from 'path'
import { APIError, Forbidden } from 'payload'
import type { AzureStorageOptions } from './index.js'
interface Args {
access?: ClientUploadsAccess
collections: AzureStorageOptions['collections']
containerName: string
getStorageClient: () => ContainerClient
}
const defaultAccess: Args['access'] = ({ req }) => !!req.user
export const getGenerateSignedURLHandler = ({
access = defaultAccess,
collections,
containerName,
getStorageClient,
}: Args): PayloadHandler => {
return async (req) => {
if (!req.json) {
throw new APIError('Unreachable')
}
const { collectionSlug, filename, mimeType } = await req.json()
const collectionS3Config = collections[collectionSlug]
if (!collectionS3Config) {
throw new APIError(`Collection ${collectionSlug} was not found in S3 options`)
}
const prefix = (typeof collectionS3Config === 'object' && collectionS3Config.prefix) || ''
if (!(await access({ collectionSlug, req }))) {
throw new Forbidden()
}
const fileKey = path.posix.join(prefix, filename)
const blobClient = getStorageClient().getBlobClient(fileKey)
const sasToken = generateBlobSASQueryParameters(
{
blobName: fileKey,
containerName,
contentType: mimeType,
expiresOn: new Date(Date.now() + 30 * 60 * 1000),
permissions: BlobSASPermissions.parse('w'),
startsOn: new Date(),
},
getStorageClient().credential as StorageSharedKeyCredential,
)
return Response.json({ url: `${blobClient.url}?${sasToken.toString()}` })
}
}

View File

@@ -1,5 +1,7 @@
import type { ContainerClient } from '@azure/storage-blob'
import type {
Adapter,
ClientUploadsConfig,
PluginOptions as CloudStoragePluginOptions,
CollectionOptions,
GeneratedAdapter,
@@ -7,7 +9,9 @@ import type {
import type { Config, Plugin, UploadCollectionSlug } from 'payload'
import { cloudStoragePlugin } from '@payloadcms/plugin-cloud-storage'
import { initClientUploads } from '@payloadcms/plugin-cloud-storage/utilities'
import { getGenerateSignedURLHandler } from './generateSignedURL.js'
import { getGenerateURL } from './generateURL.js'
import { getHandleDelete } from './handleDelete.js'
import { getHandleUpload } from './handleUpload.js'
@@ -27,6 +31,11 @@ export type AzureStorageOptions = {
*/
baseURL: string
/**
* Do uploads directly on the client to bypass limits on Vercel. You must allow CORS PUT method to your website.
*/
clientUploads?: ClientUploadsConfig
/**
* Collection options to apply the Azure Blob adapter to.
*/
@@ -59,7 +68,30 @@ export const azureStorage: AzureStoragePlugin =
return incomingConfig
}
const adapter = azureStorageInternal(azureStorageOptions)
const getStorageClient = () =>
getStorageClientFunc({
connectionString: azureStorageOptions.connectionString,
containerName: azureStorageOptions.containerName,
})
initClientUploads({
clientHandler: '@payloadcms/storage-azure/client#AzureClientUploadHandler',
collections: azureStorageOptions.collections,
config: incomingConfig,
enabled: !!azureStorageOptions.clientUploads,
serverHandler: getGenerateSignedURLHandler({
access:
typeof azureStorageOptions.clientUploads === 'object'
? azureStorageOptions.clientUploads.access
: undefined,
collections: azureStorageOptions.collections,
containerName: azureStorageOptions.containerName,
getStorageClient,
}),
serverHandlerPath: '/storage-azure-generate-signed-url',
})
const adapter = azureStorageInternal(getStorageClient, azureStorageOptions)
// Add adapter to each collection option object
const collectionsWithAdapter: CloudStoragePluginOptions['collections'] = Object.entries(
@@ -98,20 +130,16 @@ export const azureStorage: AzureStoragePlugin =
})(config)
}
function azureStorageInternal({
allowContainerCreate,
baseURL,
connectionString,
containerName,
}: AzureStorageOptions): Adapter {
function azureStorageInternal(
getStorageClient: () => ContainerClient,
{ allowContainerCreate, baseURL, connectionString, containerName }: AzureStorageOptions,
): Adapter {
const createContainerIfNotExists = () => {
void getStorageClientFunc({ connectionString, containerName }).createIfNotExists({
access: 'blob',
})
}
const getStorageClient = () => getStorageClientFunc({ connectionString, containerName })
return ({ collection, prefix }): GeneratedAdapter => {
return {
name: 'azure',

View File

@@ -14,6 +14,7 @@ pnpm add @payloadcms/storage-gcs
- Configure the `collections` object to specify which collections should use the Google Cloud Storage adapter. The slug _must_ match one of your existing collection slugs.
- When enabled, this package will automatically set `disableLocalStorage` to `true` for each collection.
- When deploying to Vercel, server uploads are limited with 4.5MB. Set `clientUploads` to `true` to do uploads directly on the client. You must allow CORS PUT method for the bucket to your website.
```ts
import { gcsStorage } from '@payloadcms/storage-gcs'

View File

@@ -23,6 +23,11 @@
"import": "./src/index.ts",
"types": "./src/index.ts",
"default": "./src/index.ts"
},
"./client": {
"import": "./src/exports/client.ts",
"types": "./src/exports/client.ts",
"default": "./src/exports/client.ts"
}
},
"main": "./src/index.ts",
@@ -59,6 +64,11 @@
"import": "./dist/index.js",
"types": "./dist/index.d.ts",
"default": "./dist/index.js"
},
"./client": {
"import": "./dist/exports/client.js",
"types": "./dist/exports/client.d.ts",
"default": "./dist/exports/client.js"
}
},
"main": "./dist/index.js",

View File

@@ -0,0 +1,24 @@
'use client'
import { createClientUploadHandler } from '@payloadcms/plugin-cloud-storage/client'
export const GcsClientUploadHandler = createClientUploadHandler({
handler: async ({ apiRoute, collectionSlug, file, serverHandlerPath, serverURL }) => {
const response = await fetch(`${serverURL}${apiRoute}${serverHandlerPath}`, {
body: JSON.stringify({
collectionSlug,
filename: file.name,
mimeType: file.type,
}),
credentials: 'include',
method: 'POST',
})
const { url } = await response.json()
await fetch(url, {
body: file,
headers: { 'Content-Length': file.size.toString(), 'Content-Type': file.type },
method: 'PUT',
})
},
})

View File

@@ -0,0 +1 @@
export { GcsClientUploadHandler } from '../client/GcsClientUploadHandler.js'

View File

@@ -0,0 +1,58 @@
import type { Storage } from '@google-cloud/storage'
import type { ClientUploadsAccess } from '@payloadcms/plugin-cloud-storage/types'
import type { PayloadHandler } from 'payload'
import path from 'path'
import { APIError, Forbidden } from 'payload'
import type { GcsStorageOptions } from './index.js'
interface Args {
access?: ClientUploadsAccess
acl?: 'private' | 'public-read'
bucket: string
collections: GcsStorageOptions['collections']
getStorageClient: () => Storage
}
const defaultAccess: Args['access'] = ({ req }) => !!req.user
export const getGenerateSignedURLHandler = ({
access = defaultAccess,
bucket,
collections,
getStorageClient,
}: Args): PayloadHandler => {
return async (req) => {
if (!req.json) {
throw new APIError('Unreachable')
}
const { collectionSlug, filename, mimeType } = await req.json()
const collectionS3Config = collections[collectionSlug]
if (!collectionS3Config) {
throw new APIError(`Collection ${collectionSlug} was not found in S3 options`)
}
const prefix = (typeof collectionS3Config === 'object' && collectionS3Config.prefix) || ''
if (!(await access({ collectionSlug, req }))) {
throw new Forbidden()
}
const fileKey = path.posix.join(prefix, filename)
const [url] = await getStorageClient()
.bucket(bucket)
.file(fileKey)
.getSignedUrl({
action: 'write',
contentType: mimeType,
expires: Date.now() + 60 * 60 * 5,
version: 'v4',
})
return Response.json({ url })
}
}

View File

@@ -1,6 +1,7 @@
import type { StorageOptions } from '@google-cloud/storage'
import type {
Adapter,
ClientUploadsConfig,
PluginOptions as CloudStoragePluginOptions,
CollectionOptions,
GeneratedAdapter,
@@ -10,6 +11,7 @@ import type { Config, Plugin, UploadCollectionSlug } from 'payload'
import { Storage } from '@google-cloud/storage'
import { cloudStoragePlugin } from '@payloadcms/plugin-cloud-storage'
import { getGenerateSignedURLHandler } from './generateSignedURL.js'
import { getGenerateURL } from './generateURL.js'
import { getHandleDelete } from './handleDelete.js'
import { getHandleUpload } from './handleUpload.js'
@@ -22,6 +24,10 @@ export interface GcsStorageOptions {
* The name of the bucket to use.
*/
bucket: string
/**
* Do uploads directly on the client to bypass limits on Vercel. You must allow CORS PUT method for the bucket to your website.
*/
clientUploads?: ClientUploadsConfig
/**
* Collection options to apply the S3 adapter to.
*/
@@ -50,7 +56,60 @@ export const gcsStorage: GcsStoragePlugin =
return incomingConfig
}
const adapter = gcsStorageInternal(gcsStorageOptions)
let storageClient: null | Storage = null
const getStorageClient = (): Storage => {
if (storageClient) {
return storageClient
}
storageClient = new Storage(gcsStorageOptions.options)
return storageClient
}
const adapter = gcsStorageInternal(getStorageClient, gcsStorageOptions)
if (gcsStorageOptions.clientUploads) {
if (!incomingConfig.endpoints) {
incomingConfig.endpoints = []
}
incomingConfig.endpoints.push({
handler: getGenerateSignedURLHandler({
access:
typeof gcsStorageOptions.clientUploads === 'object'
? gcsStorageOptions.clientUploads.access
: undefined,
bucket: gcsStorageOptions.bucket,
collections: gcsStorageOptions.collections,
getStorageClient,
}),
method: 'post',
path: '/storage-gcs-generate-signed-url',
})
}
if (!incomingConfig.admin) {
incomingConfig.admin = {}
}
if (!incomingConfig.admin.components) {
incomingConfig.admin.components = {}
}
if (!incomingConfig.admin.components.providers) {
incomingConfig.admin.components.providers = []
}
for (const collectionSlug in gcsStorageOptions.collections) {
incomingConfig.admin.components.providers.push({
clientProps: {
collectionSlug,
enabled: !!gcsStorageOptions.clientUploads,
},
path: '@payloadcms/storage-gcs/client#GcsClientUploadHandler',
})
}
// Add adapter to each collection option object
const collectionsWithAdapter: CloudStoragePluginOptions['collections'] = Object.entries(
@@ -89,18 +148,11 @@ export const gcsStorage: GcsStoragePlugin =
})(config)
}
function gcsStorageInternal({ acl, bucket, options }: GcsStorageOptions): Adapter {
function gcsStorageInternal(
getStorageClient: () => Storage,
{ acl, bucket }: GcsStorageOptions,
): Adapter {
return ({ collection, prefix }): GeneratedAdapter => {
let storageClient: null | Storage = null
const getStorageClient = (): Storage => {
if (storageClient) {
return storageClient
}
storageClient = new Storage(options)
return storageClient
}
return {
name: 'gcs',
generateURL: getGenerateURL({ bucket, getStorageClient }),

View File

@@ -15,6 +15,7 @@ pnpm add @payloadcms/storage-s3
- Configure the `collections` object to specify which collections should use the AWS S3 adapter. The slug _must_ match one of your existing collection slugs.
- The `config` object can be any [`S3ClientConfig`](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3) object (from [`@aws-sdk/client-s3`](https://github.com/aws/aws-sdk-js-v3)). _This is highly dependent on your AWS setup_. Check the AWS documentation for more information.
- When enabled, this package will automatically set `disableLocalStorage` to `true` for each collection.
- When deploying to Vercel, server uploads are limited with 4.5MB. Set `clientUploads` to `true` to do uploads directly on the client. You must allow CORS PUT method for the bucket to your website.
```ts
import { s3Storage } from '@payloadcms/storage-s3'

View File

@@ -23,6 +23,11 @@
"import": "./src/index.ts",
"types": "./src/index.ts",
"default": "./src/index.ts"
},
"./client": {
"import": "./src/exports/client.ts",
"types": "./src/exports/client.ts",
"default": "./src/exports/client.ts"
}
},
"main": "./src/index.ts",
@@ -43,6 +48,7 @@
"dependencies": {
"@aws-sdk/client-s3": "^3.614.0",
"@aws-sdk/lib-storage": "^3.614.0",
"@aws-sdk/s3-request-presigner": "^3.614.0",
"@payloadcms/plugin-cloud-storage": "workspace:*"
},
"devDependencies": {
@@ -60,6 +66,11 @@
"import": "./dist/index.js",
"types": "./dist/index.d.ts",
"default": "./dist/index.js"
},
"./client": {
"import": "./dist/exports/client.js",
"types": "./dist/exports/client.d.ts",
"default": "./dist/exports/client.js"
}
},
"main": "./dist/index.js",

View File

@@ -0,0 +1,24 @@
'use client'
import { createClientUploadHandler } from '@payloadcms/plugin-cloud-storage/client'
export const S3ClientUploadHandler = createClientUploadHandler({
handler: async ({ apiRoute, collectionSlug, file, serverHandlerPath, serverURL }) => {
const response = await fetch(`${serverURL}${apiRoute}${serverHandlerPath}`, {
body: JSON.stringify({
collectionSlug,
filename: file.name,
mimeType: file.type,
}),
credentials: 'include',
method: 'POST',
})
const { url } = await response.json()
await fetch(url, {
body: file,
headers: { 'Content-Length': file.size.toString(), 'Content-Type': file.type },
method: 'PUT',
})
},
})

View File

@@ -0,0 +1 @@
export { S3ClientUploadHandler } from '../client/S3ClientUploadHandler.js'

View File

@@ -0,0 +1,59 @@
import type { ClientUploadsAccess } from '@payloadcms/plugin-cloud-storage/types'
import type { PayloadHandler } from 'payload'
import * as AWS from '@aws-sdk/client-s3'
import { getSignedUrl } from '@aws-sdk/s3-request-presigner'
import path from 'path'
import { APIError, Forbidden } from 'payload'
import type { S3StorageOptions } from './index.js'
interface Args {
access?: ClientUploadsAccess
acl?: 'private' | 'public-read'
bucket: string
collections: S3StorageOptions['collections']
getStorageClient: () => AWS.S3
}
const defaultAccess: Args['access'] = ({ req }) => !!req.user
export const getGenerateSignedURLHandler = ({
access = defaultAccess,
acl,
bucket,
collections,
getStorageClient,
}: Args): PayloadHandler => {
return async (req) => {
if (!req.json) {
throw new APIError('Content-Type expected to be application/json', 400)
}
const { collectionSlug, filename, mimeType } = await req.json()
const collectionS3Config = collections[collectionSlug]
if (!collectionS3Config) {
throw new APIError(`Collection ${collectionSlug} was not found in S3 options`)
}
const prefix = (typeof collectionS3Config === 'object' && collectionS3Config.prefix) || ''
if (!(await access({ collectionSlug, req }))) {
throw new Forbidden()
}
const fileKey = path.posix.join(prefix, filename)
const url = await getSignedUrl(
// @ts-expect-error mismatch versions or something
getStorageClient(),
new AWS.PutObjectCommand({ ACL: acl, Bucket: bucket, ContentType: mimeType, Key: fileKey }),
{
expiresIn: 600,
},
)
return Response.json({ url })
}
}

View File

@@ -1,5 +1,6 @@
import type {
Adapter,
ClientUploadsConfig,
PluginOptions as CloudStoragePluginOptions,
CollectionOptions,
GeneratedAdapter,
@@ -8,7 +9,9 @@ import type { Config, Plugin, UploadCollectionSlug } from 'payload'
import * as AWS from '@aws-sdk/client-s3'
import { cloudStoragePlugin } from '@payloadcms/plugin-cloud-storage'
import { initClientUploads } from '@payloadcms/plugin-cloud-storage/utilities'
import { getGenerateSignedURLHandler } from './generateSignedURL.js'
import { getGenerateURL } from './generateURL.js'
import { getHandleDelete } from './handleDelete.js'
import { getHandleUpload } from './handleUpload.js'
@@ -28,10 +31,15 @@ export type S3StorageOptions = {
bucket: string
/**
* Do uploads directly on the client to bypass limits on Vercel. You must allow CORS PUT method for the bucket to your website.
*/
clientUploads?: ClientUploadsConfig
/**
* Collection options to apply the S3 adapter to.
*/
collections: Partial<Record<UploadCollectionSlug, Omit<CollectionOptions, 'adapter'> | true>>
/**
* AWS S3 client configuration. Highly dependent on your AWS setup.
*
@@ -63,7 +71,35 @@ export const s3Storage: S3StoragePlugin =
return incomingConfig
}
const adapter = s3StorageInternal(s3StorageOptions)
let storageClient: AWS.S3 | null = null
const getStorageClient: () => AWS.S3 = () => {
if (storageClient) {
return storageClient
}
storageClient = new AWS.S3(s3StorageOptions.config ?? {})
return storageClient
}
initClientUploads({
clientHandler: '@payloadcms/storage-s3/client#S3ClientUploadHandler',
collections: s3StorageOptions.collections,
config: incomingConfig,
enabled: !!s3StorageOptions.clientUploads,
serverHandler: getGenerateSignedURLHandler({
access:
typeof s3StorageOptions.clientUploads === 'object'
? s3StorageOptions.clientUploads.access
: undefined,
acl: s3StorageOptions.acl,
bucket: s3StorageOptions.bucket,
collections: s3StorageOptions.collections,
getStorageClient,
}),
serverHandlerPath: '/storage-s3-generate-signed-url',
})
const adapter = s3StorageInternal(getStorageClient, s3StorageOptions)
// Add adapter to each collection option object
const collectionsWithAdapter: CloudStoragePluginOptions['collections'] = Object.entries(
@@ -102,17 +138,11 @@ export const s3Storage: S3StoragePlugin =
})(config)
}
function s3StorageInternal({ acl, bucket, config = {} }: S3StorageOptions): Adapter {
function s3StorageInternal(
getStorageClient: () => AWS.S3,
{ acl, bucket, config = {} }: S3StorageOptions,
): Adapter {
return ({ collection, prefix }): GeneratedAdapter => {
let storageClient: AWS.S3 | null = null
const getStorageClient: () => AWS.S3 = () => {
if (storageClient) {
return storageClient
}
storageClient = new AWS.S3(config)
return storageClient
}
return {
name: 's3',
generateURL: getGenerateURL({ bucket, config }),

View File

@@ -13,6 +13,7 @@ pnpm add @payloadcms/storage-uploadthing
- Configure the `collections` object to specify which collections should use uploadthing. The slug _must_ match one of your existing collection slugs and be an `upload` type.
- Get an API key from Uploadthing and set it as `apiKey` in the `options` object.
- `acl` is optional and defaults to `public-read`.
- When deploying to Vercel, server uploads are limited with 4.5MB. Set `clientUploads` to `true` to do uploads directly on the client.
```ts
export default buildConfig({

View File

@@ -23,6 +23,11 @@
"import": "./src/index.ts",
"types": "./src/index.ts",
"default": "./src/index.ts"
},
"./client": {
"import": "./src/exports/client.ts",
"types": "./src/exports/client.ts",
"default": "./src/exports/client.ts"
}
},
"main": "./src/index.ts",
@@ -59,6 +64,11 @@
"import": "./dist/index.js",
"types": "./dist/index.d.ts",
"default": "./dist/index.js"
},
"./client": {
"import": "./dist/exports/client.js",
"types": "./dist/exports/client.d.ts",
"default": "./dist/exports/client.js"
}
},
"main": "./dist/index.js",

View File

@@ -0,0 +1,18 @@
'use client'
import { createClientUploadHandler } from '@payloadcms/plugin-cloud-storage/client'
import { genUploader } from 'uploadthing/client'
export const UploadthingClientUploadHandler = createClientUploadHandler({
handler: async ({ apiRoute, collectionSlug, file, serverHandlerPath, serverURL }) => {
const { uploadFiles } = genUploader({
package: 'storage-uploadthing',
url: `${serverURL}${apiRoute}${serverHandlerPath}?collectionSlug=${collectionSlug}`,
})
const res = await uploadFiles('uploader', {
files: [file],
})
return { key: res[0].key }
},
})

View File

@@ -0,0 +1 @@
export { UploadthingClientUploadHandler } from '../client/UploadthingClientUploadHandler.js'

View File

@@ -0,0 +1,62 @@
import {
APIError,
Forbidden,
type PayloadHandler,
type PayloadRequest,
type UploadCollectionSlug,
} from 'payload'
type Args = {
access?: (args: {
collectionSlug: UploadCollectionSlug
req: PayloadRequest
}) => boolean | Promise<boolean>
acl: 'private' | 'public-read'
token?: string
}
const defaultAccess: Args['access'] = ({ req }) => !!req.user
import type { FileRouter } from 'uploadthing/server'
import { createRouteHandler } from 'uploadthing/next'
import { createUploadthing } from 'uploadthing/server'
export const getClientUploadRoute = ({
access = defaultAccess,
acl,
token,
}: Args): PayloadHandler => {
const f = createUploadthing()
const uploadRouter = {
uploader: f({
blob: {
acl,
maxFileCount: 1,
},
})
.middleware(async ({ req: rawReq }) => {
const req = rawReq as PayloadRequest
const collectionSlug = req.searchParams.get('collectionSlug')
if (!collectionSlug) {
throw new APIError('No payload was provided')
}
if (!(await access({ collectionSlug, req }))) {
throw new Forbidden()
}
return {}
})
.onUploadComplete(() => {}),
} satisfies FileRouter
const { POST } = createRouteHandler({ config: { token }, router: uploadRouter })
return async (req) => {
return POST(req)
}
}

View File

@@ -1,5 +1,6 @@
import type {
Adapter,
ClientUploadsConfig,
PluginOptions as CloudStoragePluginOptions,
CollectionOptions,
GeneratedAdapter,
@@ -8,14 +9,22 @@ import type { Config, Field, Plugin, UploadCollectionSlug } from 'payload'
import type { UTApiOptions } from 'uploadthing/types'
import { cloudStoragePlugin } from '@payloadcms/plugin-cloud-storage'
import { UTApi } from 'uploadthing/server'
import { initClientUploads } from '@payloadcms/plugin-cloud-storage/utilities'
import { createRouteHandler } from 'uploadthing/next'
import { createUploadthing, UTApi } from 'uploadthing/server'
import { generateURL } from './generateURL.js'
import { getClientUploadRoute } from './getClientUploadRoute.js'
import { getHandleDelete } from './handleDelete.js'
import { getHandleUpload } from './handleUpload.js'
import { getHandler } from './staticHandler.js'
export type UploadthingStorageOptions = {
/**
* Do uploads directly on the client, to bypass limits on Vercel.
*/
clientUploads?: ClientUploadsConfig
/**
* Collection options to apply the adapter to.
*/
@@ -58,6 +67,22 @@ export const uploadthingStorage: UploadthingPlugin =
const adapter = uploadthingInternal(uploadthingStorageOptions)
initClientUploads({
clientHandler: '@payloadcms/storage-uploadthing/client#UploadthingClientUploadHandler',
collections: uploadthingStorageOptions.collections,
config: incomingConfig,
enabled: !!uploadthingStorageOptions.clientUploads,
serverHandler: getClientUploadRoute({
access:
typeof uploadthingStorageOptions.clientUploads === 'object'
? uploadthingStorageOptions.clientUploads.access
: undefined,
acl: uploadthingStorageOptions.options.acl || 'public-read',
token: uploadthingStorageOptions.options.token,
}),
serverHandlerPath: '/storage-uploadthing-client-upload-route',
})
// Add adapter to each collection option object
const collectionsWithAdapter: CloudStoragePluginOptions['collections'] = Object.entries(
uploadthingStorageOptions.collections,

View File

@@ -9,8 +9,18 @@ type Args = {
}
export const getHandler = ({ utApi }: Args): StaticHandler => {
return async (req, { doc, params: { collection, filename } }) => {
return async (req, { doc, params: { clientUploadContext, collection, filename } }) => {
try {
let key: string
if (
clientUploadContext &&
typeof clientUploadContext === 'object' &&
'key' in clientUploadContext &&
typeof clientUploadContext.key === 'string'
) {
key = clientUploadContext.key
} else {
const collectionConfig = req.payload.collections[collection]?.config
let retrievedDoc = doc
@@ -48,7 +58,8 @@ export const getHandler = ({ utApi }: Args): StaticHandler => {
return new Response(null, { status: 404, statusText: 'Not Found' })
}
const key = getKeyFromFilename(retrievedDoc, filename)
key = getKeyFromFilename(retrievedDoc, filename)
}
if (!key) {
return new Response(null, { status: 404, statusText: 'Not Found' })
@@ -69,7 +80,7 @@ export const getHandler = ({ utApi }: Args): StaticHandler => {
const blob = await response.blob()
const etagFromHeaders = req.headers.get('etag') || req.headers.get('if-none-match')
const objectEtag = response.headers.get('etag') as string
const objectEtag = response.headers.get('etag')
if (etagFromHeaders && etagFromHeaders === objectEtag) {
return new Response(null, {

View File

@@ -15,6 +15,7 @@ pnpm add @payloadcms/storage-vercel-blob
- Configure the `collections` object to specify which collections should use the Vercel Blob adapter. The slug _must_ match one of your existing collection slugs.
- Ensure you have `BLOB_READ_WRITE_TOKEN` set in your Vercel environment variables. This is usually set by Vercel automatically after adding blob storage to your project.
- When enabled, this package will automatically set `disableLocalStorage` to `true` for each collection.
- When deploying to Vercel, server uploads are limited with 4.5MB. Set `clientUploads` to `true` to do uploads directly on the client.
```ts
import { vercelBlobStorage } from '@payloadcms/storage-vercel-blob'
@@ -47,3 +48,4 @@ export default buildConfig({
| `addRandomSuffix` | Add a random suffix to the uploaded file name in Vercel Blob storage | `false` |
| `cacheControlMaxAge` | Cache-Control max-age in seconds | `365 * 24 * 60 * 60` (1 Year) |
| `token` | Vercel Blob storage read/write token | `''` |
| `clientUploads` | Do uploads directly on the client to bypass limits on Vercel | |

View File

@@ -23,6 +23,11 @@
"import": "./src/index.ts",
"types": "./src/index.ts",
"default": "./src/index.ts"
},
"./client": {
"import": "./src/exports/client.ts",
"types": "./src/exports/client.ts",
"default": "./src/exports/client.ts"
}
},
"main": "./src/index.ts",
@@ -59,6 +64,11 @@
"import": "./dist/index.js",
"types": "./dist/index.d.ts",
"default": "./dist/index.js"
},
"./client": {
"import": "./dist/exports/client.js",
"types": "./dist/exports/client.d.ts",
"default": "./dist/exports/client.js"
}
},
"main": "./dist/index.js",

View File

@@ -0,0 +1,34 @@
'use client'
import { createClientUploadHandler } from '@payloadcms/plugin-cloud-storage/client'
import { upload } from '@vercel/blob/client'
export type VercelBlobClientUploadHandlerExtra = {
addRandomSuffix: boolean
baseURL: string
prefix: string
}
export const VercelBlobClientUploadHandler =
createClientUploadHandler<VercelBlobClientUploadHandlerExtra>({
handler: async ({
apiRoute,
collectionSlug,
extra: { addRandomSuffix, baseURL, prefix = '' },
file,
serverHandlerPath,
serverURL,
updateFilename,
}) => {
const result = await upload(`${prefix}${file.name}`, file, {
access: 'public',
clientPayload: collectionSlug,
contentType: file.type,
handleUploadUrl: `${serverURL}${apiRoute}${serverHandlerPath}`,
})
// Update filename with suffix from returned url
if (addRandomSuffix) {
updateFilename(result.url.replace(`${baseURL}/`, ''))
}
},
})

View File

@@ -0,0 +1 @@
export { VercelBlobClientUploadHandler } from '../client/VercelBlobClientUploadHandler.js'

View File

@@ -0,0 +1,50 @@
import type { PayloadHandler, PayloadRequest, UploadCollectionSlug } from 'payload'
import { handleUpload, type HandleUploadBody } from '@vercel/blob/client'
import { APIError, Forbidden } from 'payload'
type Args = {
access?: (args: {
collectionSlug: UploadCollectionSlug
req: PayloadRequest
}) => boolean | Promise<boolean>
addRandomSuffix?: boolean
cacheControlMaxAge?: number
token: string
}
const defaultAccess: Args['access'] = ({ req }) => !!req.user
export const getClientUploadRoute =
({ access = defaultAccess, addRandomSuffix, cacheControlMaxAge, token }: Args): PayloadHandler =>
async (req) => {
const body = (await req.json!()) as HandleUploadBody
try {
const jsonResponse = await handleUpload({
body,
onBeforeGenerateToken: async (_pathname: string, collectionSlug: null | string) => {
if (!collectionSlug) {
throw new APIError('No payload was provided')
}
if (!(await access({ collectionSlug, req }))) {
throw new Forbidden()
}
return Promise.resolve({
addRandomSuffix,
cacheControlMaxAge,
})
},
onUploadCompleted: async () => {},
request: req as Request,
token,
})
return Response.json(jsonResponse)
} catch (error) {
req.payload.logger.error(error)
throw new APIError('storage-vercel-blob client upload route error')
}
}

View File

@@ -1,5 +1,6 @@
import type {
Adapter,
ClientUploadsConfig,
PluginOptions as CloudStoragePluginOptions,
CollectionOptions,
GeneratedAdapter,
@@ -7,8 +8,12 @@ import type {
import type { Config, Plugin, UploadCollectionSlug } from 'payload'
import { cloudStoragePlugin } from '@payloadcms/plugin-cloud-storage'
import { initClientUploads } from '@payloadcms/plugin-cloud-storage/utilities'
import type { VercelBlobClientUploadHandlerExtra } from './client/VercelBlobClientUploadHandler.js'
import { getGenerateUrl } from './generateURL.js'
import { getClientUploadRoute } from './getClientUploadRoute.js'
import { getHandleDelete } from './handleDelete.js'
import { getHandleUpload } from './handleUpload.js'
import { getStaticHandler } from './staticHandler.js'
@@ -32,10 +37,15 @@ export type VercelBlobStorageOptions = {
/**
* Cache-Control max-age in seconds
*
* @defaultvalue 365 * 24 * 60 * 60 (1 Year)
* @default 365 * 24 * 60 * 60 // (1 Year)
*/
cacheControlMaxAge?: number
/**
* Do uploads directly on the client, to bypass limits on Vercel.
*/
clientUploads?: ClientUploadsConfig
/**
* Collections to apply the Vercel Blob adapter to
*/
@@ -91,6 +101,29 @@ export const vercelBlobStorage: VercelBlobStoragePlugin =
const baseUrl = `https://${storeId}.${optionsWithDefaults.access}.blob.vercel-storage.com`
initClientUploads<
VercelBlobClientUploadHandlerExtra,
VercelBlobStorageOptions['collections'][string]
>({
clientHandler: '@payloadcms/storage-vercel-blob/client#VercelBlobClientUploadHandler',
collections: options.collections,
config: incomingConfig,
enabled: !!options.clientUploads,
extraClientHandlerProps: (collection) => ({
addRandomSuffix: !!optionsWithDefaults.addRandomSuffix,
baseURL: baseUrl,
prefix: (typeof collection === 'object' && collection.prefix) || '',
}),
serverHandler: getClientUploadRoute({
access:
typeof options.clientUploads === 'object' ? options.clientUploads.access : undefined,
addRandomSuffix: optionsWithDefaults.addRandomSuffix,
cacheControlMaxAge: options.cacheControlMaxAge,
token: options.token,
}),
serverHandlerPath: '/vercel-blob-client-upload-route',
})
const adapter = vercelBlobStorageInternal({ ...optionsWithDefaults, baseUrl })
// Add adapter to each collection option object

View File

@@ -22,7 +22,6 @@ export const getStaticHandler = (
const fileUrl = `${baseUrl}/${fileKey}`
const etagFromHeaders = req.headers.get('etag') || req.headers.get('if-none-match')
const blobMetadata = await head(fileUrl, { token })
const uploadedAtString = blobMetadata.uploadedAt.toISOString()
const ETag = `"${fileKey}-${uploadedAtString}"`

View File

@@ -1,16 +1,43 @@
import type { FormState } from 'payload'
import type { CollectionSlug, FormState } from 'payload'
import { serialize } from 'object-to-formdata'
import { reduceFieldsToValues } from 'payload/shared'
export function createFormData(formState: FormState = {}, overrides: Record<string, any> = {}) {
import type { UploadHandlersContext } from '../../../providers/UploadHandlers/index.js'
export async function createFormData(
formState: FormState = {},
overrides: Record<string, any> = {},
collectionSlug: CollectionSlug,
uploadHandler: ReturnType<UploadHandlersContext['getUploadHandler']>,
) {
const data = reduceFieldsToValues(formState, true)
const file = data?.file
let file = data?.file
if (file) {
delete data.file
}
let clientUploadContext = null
if (typeof uploadHandler === 'function') {
let filename = file.name
clientUploadContext = await uploadHandler({
file,
updateFilename: (value) => {
filename = value
},
})
file = JSON.stringify({
clientUploadContext,
collectionSlug,
filename,
mimeType: file.type,
size: file.size,
})
}
const dataWithOverrides = {
...data,
...overrides,

View File

@@ -15,6 +15,7 @@ import { useConfig } from '../../../providers/Config/index.js'
import { useLocale } from '../../../providers/Locale/index.js'
import { useServerFunctions } from '../../../providers/ServerFunctions/index.js'
import { useTranslation } from '../../../providers/Translation/index.js'
import { useUploadHandlers } from '../../../providers/UploadHandlers/index.js'
import { hasSavePermission as getHasSavePermission } from '../../../utilities/hasSavePermission.js'
import { LoadingOverlay } from '../../Loading/index.js'
import { useLoadingOverlay } from '../../LoadingOverlay/index.js'
@@ -94,6 +95,7 @@ export function FormsManagerProvider({ children }: FormsManagerProps) {
const { i18n, t } = useTranslation()
const { getDocumentSlots, getFormState } = useServerFunctions()
const { getUploadHandler } = useUploadHandlers()
const [documentSlots, setDocumentSlots] = React.useState<DocumentSlots>({})
const [hasSubmitted, setHasSubmitted] = React.useState(false)
@@ -296,7 +298,12 @@ export function FormsManagerProvider({ children }: FormsManagerProps) {
setLoadingText(t('general:uploadingBulk', { current: i + 1, total: currentForms.length }))
const req = await fetch(actionURL, {
body: createFormData(form.formState, overrides),
body: await createFormData(
form.formState,
overrides,
collectionSlug,
getUploadHandler({ collectionSlug }),
),
method: 'POST',
})
@@ -387,7 +394,17 @@ export function FormsManagerProvider({ children }: FormsManagerProps) {
},
})
},
[actionURL, activeIndex, forms, onSuccess, t, closeModal, drawerSlug],
[
actionURL,
activeIndex,
forms,
onSuccess,
collectionSlug,
getUploadHandler,
t,
closeModal,
drawerSlug,
],
)
const bulkUpdateForm = React.useCallback(

View File

@@ -290,6 +290,8 @@ export {
export { ScrollInfoProvider, useScrollInfo } from '../../providers/ScrollInfo/index.js'
export { SearchParamsProvider, useSearchParams } from '../../providers/SearchParams/index.js'
export { SelectionProvider, useSelection } from '../../providers/Selection/index.js'
export { UploadHandlersProvider, useUploadHandlers } from '../../providers/UploadHandlers/index.js'
export type { UploadHandlersContext } from '../../providers/UploadHandlers/index.js'
export { defaultTheme, type Theme, ThemeProvider, useTheme } from '../../providers/Theme/index.js'
export { TranslationProvider, useTranslation } from '../../providers/Translation/index.js'
export { useWindowInfo, WindowInfoProvider } from '../../providers/WindowInfo/index.js'

View File

@@ -32,6 +32,7 @@ import { useOperation } from '../../providers/Operation/index.js'
import { useRouteTransition } from '../../providers/RouteTransition/index.js'
import { useServerFunctions } from '../../providers/ServerFunctions/index.js'
import { useTranslation } from '../../providers/Translation/index.js'
import { useUploadHandlers } from '../../providers/UploadHandlers/index.js'
import { abortAndIgnore, handleAbortRef } from '../../utilities/abortAndIgnore.js'
import { requests } from '../../utilities/api.js'
import {
@@ -90,6 +91,7 @@ export const Form: React.FC<FormProps> = (props) => {
const { getFormState } = useServerFunctions()
const { startRouteTransition } = useRouteTransition()
const { getUploadHandler } = useUploadHandlers()
const { config } = useConfig()
@@ -319,7 +321,7 @@ export const Form: React.FC<FormProps> = (props) => {
return
}
const formData = contextRef.current.createFormData(overrides, {
const formData = await contextRef.current.createFormData(overrides, {
mergeOverrideData: Boolean(typeof overridesFromArgs !== 'function'),
})
@@ -480,10 +482,11 @@ export const Form: React.FC<FormProps> = (props) => {
[],
)
const createFormData = useCallback<CreateFormData>((overrides, { mergeOverrideData = true }) => {
const createFormData = useCallback<CreateFormData>(
async (overrides, { mergeOverrideData = true }) => {
let data = reduceFieldsToValues(contextRef.current.fields, true)
const file = data?.file
let file = data?.file
if (file) {
delete data.file
@@ -498,6 +501,27 @@ export const Form: React.FC<FormProps> = (props) => {
data = overrides
}
const handler = getUploadHandler({ collectionSlug })
if (typeof handler === 'function') {
let clientUploadContext = null
let filename = file.name
clientUploadContext = await handler({
file,
updateFilename: (value) => {
filename = value
},
})
file = JSON.stringify({
clientUploadContext,
collectionSlug,
filename,
mimeType: file.type,
size: file.size,
})
}
const dataToSerialize = {
_payload: JSON.stringify(data),
file,
@@ -507,7 +531,9 @@ export const Form: React.FC<FormProps> = (props) => {
const formData = serialize(dataToSerialize, { indices: true, nullsAsUndefineds: false })
return formData
}, [])
},
[collectionSlug, getUploadHandler],
)
const reset = useCallback(
async (data: unknown) => {

View File

@@ -84,7 +84,7 @@ export type CreateFormData = (
* @default true
*/
options?: { mergeOverrideData?: boolean },
) => FormData
) => FormData | Promise<FormData>
export type GetFields = () => FormState
export type GetField = (path: string) => FormField
export type GetData = () => Data

View File

@@ -34,6 +34,7 @@ import { ServerFunctionsProvider } from '../ServerFunctions/index.js'
import { ThemeProvider } from '../Theme/index.js'
import { ToastContainer } from '../ToastContainer/index.js'
import { TranslationProvider } from '../Translation/index.js'
import { UploadHandlersProvider } from '../UploadHandlers/index.js'
type Props = {
readonly children: React.ReactNode
@@ -106,7 +107,9 @@ export const RootProvider: React.FC<Props> = ({
<LoadingOverlayProvider>
<DocumentEventsProvider>
<NavProvider initialIsOpen={isNavOpen}>
<UploadHandlersProvider>
{children}
</UploadHandlersProvider>
</NavProvider>
</DocumentEventsProvider>
</LoadingOverlayProvider>

View File

@@ -0,0 +1,54 @@
'use client'
import type { UploadCollectionSlug } from 'payload'
import React, { useState } from 'react'
type UploadHandler = (args: {
file: File
updateFilename: (filename: string) => void
}) => Promise<unknown>
export type UploadHandlersContext = {
getUploadHandler: (args: { collectionSlug: UploadCollectionSlug }) => null | UploadHandler
setUploadHandler: (args: {
collectionSlug: UploadCollectionSlug
handler: UploadHandler
}) => unknown
}
const Context = React.createContext<null | UploadHandlersContext>(null)
export const UploadHandlersProvider = ({ children }) => {
const [uploadHandlers, setUploadHandlers] = useState<Map<UploadCollectionSlug, UploadHandler>>(
() => new Map(),
)
const getUploadHandler: UploadHandlersContext['getUploadHandler'] = ({ collectionSlug }) => {
return uploadHandlers.get(collectionSlug)
}
const setUploadHandler: UploadHandlersContext['setUploadHandler'] = ({
collectionSlug,
handler,
}) => {
setUploadHandlers((uploadHandlers) => {
const clone = new Map(uploadHandlers)
clone.set(collectionSlug, handler)
return clone
})
}
return (
<Context.Provider value={{ getUploadHandler, setUploadHandler }}>{children}</Context.Provider>
)
}
export const useUploadHandlers = (): UploadHandlersContext => {
const context = React.useContext(Context)
if (context === null) {
throw new Error('useUploadHandlers must be used within UploadHandlersProvider')
}
return context
}

430
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -36,6 +36,7 @@ export default buildConfigWithDefaults({
},
plugins: [
uploadthingStorage({
clientUploads: true,
collections: {
[mediaSlug]: true,
},

View File

@@ -66,7 +66,15 @@
"./packages/plugin-multi-tenant/src/exports/client.ts"
],
"@payloadcms/plugin-multi-tenant": ["./packages/plugin-multi-tenant/src/index.ts"],
"@payloadcms/next": ["./packages/next/src/exports/*"]
"@payloadcms/next": ["./packages/next/src/exports/*"],
"@payloadcms/storage-s3/client": ["./packages/storage-s3/src/exports/client.ts"],
"@payloadcms/storage-vercel-blob/client": [
"./packages/storage-vercel-blob/src/exports/client.ts"
],
"@payloadcms/storage-gcs/client": ["./packages/storage-gcs/src/exports/client.ts"],
"@payloadcms/storage-uploadthing/client": [
"./packages/storage-uploadthing/src/exports/client.ts"
]
}
},
"include": ["${configDir}/src"],

View File

@@ -40,6 +40,9 @@
{
"path": "./packages/plugin-cloud-storage"
},
{
"path": "./packages/storage-s3"
},
{
"path": "./packages/payload-cloud"
},