fix(plugin-cloud-storage): actually deprecate adapters (#9640)
In v2, plugin-cloud-storage exported some adapters that were marked for deprecation. These were replaced by standalone `@payloadcms/storage-*` packages. More detail located in [this section of the migration guide](https://github.com/payloadcms/payload/blob/main/docs/migration-guide/overview.mdx#payloadcmsplugin-cloud-storage). Unfortunately, these exports were not removed prior to releasing 3.0.
This commit is contained in:
@@ -64,8 +64,8 @@
|
||||
"dev:vercel-postgres": "cross-env PAYLOAD_DATABASE=vercel-postgres pnpm runts ./test/dev.ts",
|
||||
"devsafe": "node ./scripts/delete-recursively.js '**/.next' && pnpm dev",
|
||||
"docker:restart": "pnpm docker:stop --remove-orphans && pnpm docker:start",
|
||||
"docker:start": "docker compose -f packages/plugin-cloud-storage/docker-compose.yml up -d",
|
||||
"docker:stop": "docker compose -f packages/plugin-cloud-storage/docker-compose.yml down",
|
||||
"docker:start": "docker compose -f test/docker-compose.yml up -d",
|
||||
"docker:stop": "docker compose -f test/docker-compose.yml down",
|
||||
"force:build": "pnpm run build:core:force",
|
||||
"lint": "turbo run lint --concurrency 1 --continue",
|
||||
"lint-staged": "lint-staged",
|
||||
|
||||
@@ -33,26 +33,6 @@
|
||||
"import": "./src/exports/utilities.ts",
|
||||
"types": "./src/exports/utilities.ts",
|
||||
"default": "./src/exports/utilities.ts"
|
||||
},
|
||||
"./azure": {
|
||||
"import": "./src/exports/azure.ts",
|
||||
"types": "./src/exports/azure.ts",
|
||||
"default": "./src/exports/azure.ts"
|
||||
},
|
||||
"./gcs": {
|
||||
"import": "./src/exports/gcs.ts",
|
||||
"types": "./src/exports/gcs.ts",
|
||||
"default": "./src/exports/gcs.ts"
|
||||
},
|
||||
"./s3": {
|
||||
"import": "./src/exports/s3.ts",
|
||||
"types": "./src/exports/s3.ts",
|
||||
"default": "./src/exports/s3.ts"
|
||||
},
|
||||
"./vercelBlob": {
|
||||
"import": "./src/exports/vercelBlob.ts",
|
||||
"types": "./src/exports/vercelBlob.ts",
|
||||
"default": "./src/exports/vercelBlob.ts"
|
||||
}
|
||||
},
|
||||
"main": "./src/index.ts",
|
||||
@@ -77,43 +57,12 @@
|
||||
"range-parser": "^1.2.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@aws-sdk/client-s3": "^3.614.0",
|
||||
"@aws-sdk/lib-storage": "^3.614.0",
|
||||
"@azure/storage-blob": "^12.11.0",
|
||||
"@google-cloud/storage": "^7.7.0",
|
||||
"@types/find-node-modules": "^2.1.2",
|
||||
"@vercel/blob": "^0.22.3",
|
||||
"payload": "workspace:*"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@aws-sdk/client-s3": "^3.614.0",
|
||||
"@aws-sdk/lib-storage": "^3.614.0",
|
||||
"@azure/abort-controller": "^1.0.0",
|
||||
"@azure/storage-blob": "^12.11.0",
|
||||
"@google-cloud/storage": "^7.7.0",
|
||||
"@vercel/blob": "^0.22.3",
|
||||
"payload": "workspace:*"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@aws-sdk/client-s3": {
|
||||
"optional": true
|
||||
},
|
||||
"@aws-sdk/lib-storage": {
|
||||
"optional": true
|
||||
},
|
||||
"@azure/abort-controller": {
|
||||
"optional": true
|
||||
},
|
||||
"@azure/storage-blob": {
|
||||
"optional": true
|
||||
},
|
||||
"@google-cloud/storage": {
|
||||
"optional": true
|
||||
},
|
||||
"@vercel/blob": {
|
||||
"optional": true
|
||||
}
|
||||
},
|
||||
"publishConfig": {
|
||||
"exports": {
|
||||
".": {
|
||||
@@ -130,26 +79,6 @@
|
||||
"import": "./dist/exports/utilities.js",
|
||||
"types": "./dist/exports/utilities.d.ts",
|
||||
"default": "./dist/exports/utilities.js"
|
||||
},
|
||||
"./azure": {
|
||||
"import": "./dist/exports/azure.js",
|
||||
"types": "./dist/exports/azure.d.ts",
|
||||
"default": "./dist/exports/azure.js"
|
||||
},
|
||||
"./gcs": {
|
||||
"import": "./dist/exports/gcs.js",
|
||||
"types": "./dist/exports/gcs.d.ts",
|
||||
"default": "./dist/exports/gcs.js"
|
||||
},
|
||||
"./s3": {
|
||||
"import": "./dist/exports/s3.js",
|
||||
"types": "./dist/exports/s3.d.ts",
|
||||
"default": "./dist/exports/s3.js"
|
||||
},
|
||||
"./vercelBlob": {
|
||||
"import": "./dist/exports/vercelBlob.js",
|
||||
"types": "./dist/exports/vercelBlob.d.ts",
|
||||
"default": "./dist/exports/vercelBlob.js"
|
||||
}
|
||||
},
|
||||
"main": "./dist/index.js",
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
azure-storage:
|
||||
image: mcr.microsoft.com/azure-storage/azurite:3.18.0
|
||||
restart: always
|
||||
command: 'azurite --loose --blobHost 0.0.0.0 --tableHost 0.0.0.0 --queueHost 0.0.0.0'
|
||||
ports:
|
||||
- '10000:10000'
|
||||
- '10001:10001'
|
||||
- '10002:10002'
|
||||
volumes:
|
||||
- ./azurestoragedata:/data"
|
||||
|
||||
volumes:
|
||||
azurestoragedata:
|
||||
@@ -1,14 +0,0 @@
|
||||
import path from 'path'
|
||||
|
||||
import type { GenerateURL } from '../../types.js'
|
||||
|
||||
interface Args {
|
||||
baseURL: string
|
||||
containerName: string
|
||||
}
|
||||
|
||||
export const getGenerateURL =
|
||||
({ baseURL, containerName }: Args): GenerateURL =>
|
||||
({ filename, prefix = '' }) => {
|
||||
return `${baseURL}/${containerName}/${path.posix.join(prefix, filename)}`
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
import type { ContainerClient } from '@azure/storage-blob'
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
import path from 'path'
|
||||
|
||||
import type { HandleDelete } from '../../types.js'
|
||||
|
||||
interface Args {
|
||||
collection: CollectionConfig
|
||||
getStorageClient: () => ContainerClient
|
||||
}
|
||||
|
||||
export const getHandleDelete = ({ getStorageClient }: Args): HandleDelete => {
|
||||
return async ({ doc: { prefix = '' }, filename }) => {
|
||||
const blockBlobClient = getStorageClient().getBlockBlobClient(path.posix.join(prefix, filename))
|
||||
await blockBlobClient.deleteIfExists()
|
||||
}
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
import type { ContainerClient } from '@azure/storage-blob'
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
import { AbortController } from '@azure/abort-controller'
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { Readable } from 'stream'
|
||||
|
||||
import type { HandleUpload } from '../../types.js'
|
||||
|
||||
interface Args {
|
||||
collection: CollectionConfig
|
||||
getStorageClient: () => ContainerClient
|
||||
prefix?: string
|
||||
}
|
||||
|
||||
const multipartThreshold = 1024 * 1024 * 50 // 50MB
|
||||
export const getHandleUpload = ({ getStorageClient, prefix = '' }: Args): HandleUpload => {
|
||||
return async ({ data, file }) => {
|
||||
const fileKey = path.posix.join(data.prefix || prefix, file.filename)
|
||||
|
||||
const blockBlobClient = getStorageClient().getBlockBlobClient(fileKey)
|
||||
|
||||
// when there are no temp files, or the upload is less than the threshold size, do not stream files
|
||||
if (!file.tempFilePath && file.buffer.length > 0 && file.buffer.length < multipartThreshold) {
|
||||
await blockBlobClient.upload(file.buffer, file.buffer.byteLength, {
|
||||
blobHTTPHeaders: { blobContentType: file.mimeType },
|
||||
})
|
||||
|
||||
return data
|
||||
}
|
||||
|
||||
const fileBufferOrStream: Readable = file.tempFilePath
|
||||
? fs.createReadStream(file.tempFilePath)
|
||||
: Readable.from(file.buffer)
|
||||
|
||||
await blockBlobClient.uploadStream(fileBufferOrStream, 4 * 1024 * 1024, 4, {
|
||||
abortSignal: AbortController.timeout(30 * 60 * 1000),
|
||||
})
|
||||
|
||||
return data
|
||||
}
|
||||
}
|
||||
@@ -1,74 +0,0 @@
|
||||
import type { ContainerClient } from '@azure/storage-blob'
|
||||
|
||||
import { BlobServiceClient } from '@azure/storage-blob'
|
||||
|
||||
import type { Adapter, GeneratedAdapter } from '../../types.js'
|
||||
|
||||
import { getGenerateURL } from './generateURL.js'
|
||||
import { getHandleDelete } from './handleDelete.js'
|
||||
import { getHandleUpload } from './handleUpload.js'
|
||||
import { getHandler } from './staticHandler.js'
|
||||
|
||||
export interface Args {
|
||||
allowContainerCreate: boolean
|
||||
baseURL: string
|
||||
connectionString: string
|
||||
containerName: string
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use [`@payloadcms/azure`](https://www.npmjs.com/package/@payloadcms/azure) instead.
|
||||
*
|
||||
* This adapter has been superceded by `@payloadcms/azure` and will be removed in Payload 3.0.
|
||||
*/
|
||||
export const azureBlobStorageAdapter = ({
|
||||
allowContainerCreate,
|
||||
baseURL,
|
||||
connectionString,
|
||||
containerName,
|
||||
}: Args): Adapter => {
|
||||
if (!BlobServiceClient) {
|
||||
throw new Error(
|
||||
'The package @azure/storage-blob is not installed, but is required for the plugin-cloud-storage Azure adapter. Please install it.',
|
||||
)
|
||||
}
|
||||
|
||||
let storageClient: ContainerClient | null = null
|
||||
const getStorageClient = () => {
|
||||
if (storageClient) {
|
||||
return storageClient
|
||||
}
|
||||
let blobServiceClient = null
|
||||
try {
|
||||
blobServiceClient = BlobServiceClient.fromConnectionString(connectionString)
|
||||
} catch (error) {
|
||||
if (/is not a constructor$/.test(error.message)) {
|
||||
throw new Error(
|
||||
'The package @azure/storage-blob is not installed, but is required for the plugin-cloud-storage Azure adapter. Please install it.',
|
||||
)
|
||||
}
|
||||
// Re-throw other unexpected errors.
|
||||
throw error
|
||||
}
|
||||
return (storageClient = blobServiceClient.getContainerClient(containerName))
|
||||
}
|
||||
|
||||
const createContainerIfNotExists = () => {
|
||||
getStorageClient().createIfNotExists({ access: 'blob' })
|
||||
}
|
||||
|
||||
return ({ collection, prefix }): GeneratedAdapter => {
|
||||
return {
|
||||
name: 'azure',
|
||||
generateURL: getGenerateURL({ baseURL, containerName }),
|
||||
handleDelete: getHandleDelete({ collection, getStorageClient }),
|
||||
handleUpload: getHandleUpload({
|
||||
collection,
|
||||
getStorageClient,
|
||||
prefix,
|
||||
}),
|
||||
staticHandler: getHandler({ collection, getStorageClient }),
|
||||
...(allowContainerCreate && { onInit: createContainerIfNotExists }),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
import type { ContainerClient } from '@azure/storage-blob'
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
import path from 'path'
|
||||
|
||||
import type { StaticHandler } from '../../types.js'
|
||||
|
||||
import { getFilePrefix } from '../../utilities/getFilePrefix.js'
|
||||
import getRangeFromHeader from '../../utilities/getRangeFromHeader.js'
|
||||
|
||||
interface Args {
|
||||
collection: CollectionConfig
|
||||
getStorageClient: () => ContainerClient
|
||||
}
|
||||
|
||||
export const getHandler = ({ collection, getStorageClient }: Args): StaticHandler => {
|
||||
return async (req, { params: { filename } }) => {
|
||||
try {
|
||||
const prefix = await getFilePrefix({ collection, filename, req })
|
||||
const blockBlobClient = getStorageClient().getBlockBlobClient(
|
||||
path.posix.join(prefix, filename),
|
||||
)
|
||||
|
||||
const { end, start } = await getRangeFromHeader(blockBlobClient, req.headers.get('range'))
|
||||
|
||||
const blob = await blockBlobClient.download(start, end)
|
||||
|
||||
const response = blob._response
|
||||
|
||||
// Manually create a ReadableStream for the web from a Node.js stream.
|
||||
const readableStream = new ReadableStream({
|
||||
start(controller) {
|
||||
const nodeStream = blob.readableStreamBody
|
||||
nodeStream.on('data', (chunk) => {
|
||||
controller.enqueue(new Uint8Array(chunk))
|
||||
})
|
||||
nodeStream.on('end', () => {
|
||||
controller.close()
|
||||
})
|
||||
nodeStream.on('error', (err) => {
|
||||
controller.error(err)
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
return new Response(readableStream, {
|
||||
headers: response.headers.rawHeaders(),
|
||||
status: response.status,
|
||||
})
|
||||
} catch (err: unknown) {
|
||||
req.payload.logger.error(err)
|
||||
return new Response('Internal Server Error', { status: 500 })
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
google-cloud-storage:
|
||||
image: fsouza/fake-gcs-server
|
||||
restart: always
|
||||
command:
|
||||
[
|
||||
'-scheme',
|
||||
'http',
|
||||
'-port',
|
||||
'4443',
|
||||
'-public-host',
|
||||
'http://localhost:4443',
|
||||
'-external-url',
|
||||
'http://localhost:4443',
|
||||
'-backend',
|
||||
'memory',
|
||||
]
|
||||
ports:
|
||||
- '4443:4443'
|
||||
volumes:
|
||||
- ./google-cloud-storage/payload-bucket:/data/payload-bucket
|
||||
|
||||
volumes:
|
||||
google-cloud-storage:
|
||||
@@ -1,18 +0,0 @@
|
||||
import type { Storage } from '@google-cloud/storage'
|
||||
|
||||
import path from 'path'
|
||||
|
||||
import type { GenerateURL } from '../../types.js'
|
||||
|
||||
interface Args {
|
||||
bucket: string
|
||||
getStorageClient: () => Storage
|
||||
}
|
||||
|
||||
export const getGenerateURL =
|
||||
({ bucket, getStorageClient }: Args): GenerateURL =>
|
||||
({ filename, prefix = '' }) => {
|
||||
return decodeURIComponent(
|
||||
getStorageClient().bucket(bucket).file(path.posix.join(prefix, filename)).publicUrl(),
|
||||
)
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
import type { Storage } from '@google-cloud/storage'
|
||||
|
||||
import path from 'path'
|
||||
|
||||
import type { HandleDelete } from '../../types.js'
|
||||
|
||||
interface Args {
|
||||
bucket: string
|
||||
getStorageClient: () => Storage
|
||||
}
|
||||
|
||||
export const getHandleDelete = ({ bucket, getStorageClient }: Args): HandleDelete => {
|
||||
return async ({ doc: { prefix = '' }, filename }) => {
|
||||
await getStorageClient().bucket(bucket).file(path.posix.join(prefix, filename)).delete({
|
||||
ignoreNotFound: true,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
import type { Storage } from '@google-cloud/storage'
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
import path from 'path'
|
||||
|
||||
import type { HandleUpload } from '../../types.js'
|
||||
|
||||
interface Args {
|
||||
acl?: 'Private' | 'Public'
|
||||
bucket: string
|
||||
collection: CollectionConfig
|
||||
getStorageClient: () => Storage
|
||||
prefix?: string
|
||||
}
|
||||
|
||||
export const getHandleUpload = ({
|
||||
acl,
|
||||
bucket,
|
||||
getStorageClient,
|
||||
prefix = '',
|
||||
}: Args): HandleUpload => {
|
||||
return async ({ data, file }) => {
|
||||
const fileKey = path.posix.join(data.prefix || prefix, file.filename)
|
||||
|
||||
const gcsFile = getStorageClient().bucket(bucket).file(fileKey)
|
||||
await gcsFile.save(file.buffer, {
|
||||
metadata: {
|
||||
contentType: file.mimeType,
|
||||
},
|
||||
})
|
||||
|
||||
if (acl) {
|
||||
await gcsFile[`make${acl}`]()
|
||||
}
|
||||
|
||||
return data
|
||||
}
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
import type { StorageOptions } from '@google-cloud/storage'
|
||||
|
||||
import { Storage } from '@google-cloud/storage'
|
||||
|
||||
import type { Adapter, GeneratedAdapter } from '../../types.js'
|
||||
|
||||
import { getGenerateURL } from './generateURL.js'
|
||||
import { getHandleDelete } from './handleDelete.js'
|
||||
import { getHandleUpload } from './handleUpload.js'
|
||||
import { getHandler } from './staticHandler.js'
|
||||
|
||||
export interface Args {
|
||||
acl?: 'Private' | 'Public'
|
||||
bucket: string
|
||||
options: StorageOptions
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use [`@payloadcms/storage-gcs`](https://www.npmjs.com/package/@payloadcms/storage-gcs) instead.
|
||||
*
|
||||
* This adapter has been superceded by `@payloadcms/storage-gcs` and will be removed in Payload 3.0.
|
||||
*/
|
||||
export const gcsAdapter =
|
||||
({ acl, bucket, options }: Args): Adapter =>
|
||||
({ collection, prefix }): GeneratedAdapter => {
|
||||
if (!Storage) {
|
||||
throw new Error(
|
||||
'The package @google-cloud/storage is not installed, but is required for the plugin-cloud-storage GCS adapter. Please install it.',
|
||||
)
|
||||
}
|
||||
|
||||
let storageClient: null | Storage = null
|
||||
|
||||
const getStorageClient = (): Storage => {
|
||||
if (storageClient) {
|
||||
return storageClient
|
||||
}
|
||||
try {
|
||||
storageClient = new Storage(options)
|
||||
} catch (error) {
|
||||
if (/is not a constructor$/.test(error.message)) {
|
||||
throw new Error(
|
||||
'The package @google-cloud/storage is not installed, but is required for the plugin-cloud-storage GCS adapter. Please install it.',
|
||||
)
|
||||
}
|
||||
// Re-throw other unexpected errors.
|
||||
throw error
|
||||
}
|
||||
return storageClient
|
||||
}
|
||||
|
||||
return {
|
||||
name: 'gcs',
|
||||
generateURL: getGenerateURL({ bucket, getStorageClient }),
|
||||
handleDelete: getHandleDelete({ bucket, getStorageClient }),
|
||||
handleUpload: getHandleUpload({
|
||||
acl,
|
||||
bucket,
|
||||
collection,
|
||||
getStorageClient,
|
||||
prefix,
|
||||
}),
|
||||
staticHandler: getHandler({ bucket, collection, getStorageClient }),
|
||||
}
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
import type { Storage } from '@google-cloud/storage'
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
import path from 'path'
|
||||
|
||||
import type { StaticHandler } from '../../types.js'
|
||||
|
||||
import { getFilePrefix } from '../../utilities/getFilePrefix.js'
|
||||
|
||||
interface Args {
|
||||
bucket: string
|
||||
collection: CollectionConfig
|
||||
getStorageClient: () => Storage
|
||||
}
|
||||
|
||||
export const getHandler = ({ bucket, collection, getStorageClient }: Args): StaticHandler => {
|
||||
return async (req, { params: { filename } }) => {
|
||||
try {
|
||||
const prefix = await getFilePrefix({ collection, filename, req })
|
||||
const file = getStorageClient().bucket(bucket).file(path.posix.join(prefix, filename))
|
||||
|
||||
const [metadata] = await file.getMetadata()
|
||||
|
||||
// Manually create a ReadableStream for the web from a Node.js stream.
|
||||
const readableStream = new ReadableStream({
|
||||
start(controller) {
|
||||
const nodeStream = file.createReadStream()
|
||||
nodeStream.on('data', (chunk) => {
|
||||
controller.enqueue(new Uint8Array(chunk))
|
||||
})
|
||||
nodeStream.on('end', () => {
|
||||
controller.close()
|
||||
})
|
||||
nodeStream.on('error', (err) => {
|
||||
controller.error(err)
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
return new Response(readableStream, {
|
||||
headers: new Headers({
|
||||
'Content-Length': String(metadata.size),
|
||||
'Content-Type': metadata.contentType,
|
||||
ETag: metadata.etag,
|
||||
}),
|
||||
status: 200,
|
||||
})
|
||||
} catch (err: unknown) {
|
||||
req.payload.logger.error(err)
|
||||
return new Response('Internal Server Error', { status: 500 })
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
version: '3.2'
|
||||
services:
|
||||
localstack:
|
||||
image: localstack/localstack:latest
|
||||
container_name: localstack_demo
|
||||
ports:
|
||||
- '4563-4599:4563-4599'
|
||||
- '8055:8080'
|
||||
environment:
|
||||
- SERVICES=s3
|
||||
- DEBUG=1
|
||||
- DATA_DIR=/tmp/localstack/data
|
||||
volumes:
|
||||
- './.localstack:/var/lib/localstack'
|
||||
- '/var/run/docker.sock:/var/run/docker.sock'
|
||||
@@ -1,16 +0,0 @@
|
||||
import type * as AWS from '@aws-sdk/client-s3'
|
||||
|
||||
import path from 'path'
|
||||
|
||||
import type { GenerateURL } from '../../types.js'
|
||||
|
||||
interface Args {
|
||||
bucket: string
|
||||
config: AWS.S3ClientConfig
|
||||
}
|
||||
|
||||
export const getGenerateURL =
|
||||
({ bucket, config: { endpoint } }: Args): GenerateURL =>
|
||||
({ filename, prefix = '' }) => {
|
||||
return `${endpoint}/${bucket}/${path.posix.join(prefix, filename)}`
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
import type * as AWS from '@aws-sdk/client-s3'
|
||||
|
||||
import path from 'path'
|
||||
|
||||
import type { HandleDelete } from '../../types.js'
|
||||
|
||||
interface Args {
|
||||
bucket: string
|
||||
getStorageClient: () => AWS.S3
|
||||
}
|
||||
|
||||
export const getHandleDelete = ({ bucket, getStorageClient }: Args): HandleDelete => {
|
||||
return async ({ doc: { prefix = '' }, filename }) => {
|
||||
await getStorageClient().deleteObject({
|
||||
Bucket: bucket,
|
||||
Key: path.posix.join(prefix, filename),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
import type * as AWS from '@aws-sdk/client-s3'
|
||||
import type { CollectionConfig } from 'payload'
|
||||
import type stream from 'stream'
|
||||
|
||||
import { Upload } from '@aws-sdk/lib-storage'
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
|
||||
import type { HandleUpload } from '../../types.js'
|
||||
|
||||
interface Args {
|
||||
acl?: 'private' | 'public-read'
|
||||
bucket: string
|
||||
collection: CollectionConfig
|
||||
getStorageClient: () => AWS.S3
|
||||
prefix?: string
|
||||
}
|
||||
|
||||
const multipartThreshold = 1024 * 1024 * 50 // 50MB
|
||||
|
||||
export const getHandleUpload = ({
|
||||
acl,
|
||||
bucket,
|
||||
getStorageClient,
|
||||
prefix = '',
|
||||
}: Args): HandleUpload => {
|
||||
return async ({ data, file }) => {
|
||||
const fileKey = path.posix.join(data.prefix || prefix, file.filename)
|
||||
|
||||
const fileBufferOrStream: Buffer | stream.Readable = file.tempFilePath
|
||||
? fs.createReadStream(file.tempFilePath)
|
||||
: file.buffer
|
||||
|
||||
if (file.buffer.length > 0 && file.buffer.length < multipartThreshold) {
|
||||
await getStorageClient().putObject({
|
||||
ACL: acl,
|
||||
Body: fileBufferOrStream,
|
||||
Bucket: bucket,
|
||||
ContentType: file.mimeType,
|
||||
Key: fileKey,
|
||||
})
|
||||
|
||||
return data
|
||||
}
|
||||
|
||||
const parallelUploadS3 = new Upload({
|
||||
client: getStorageClient(),
|
||||
params: {
|
||||
ACL: acl,
|
||||
Body: fileBufferOrStream,
|
||||
Bucket: bucket,
|
||||
ContentType: file.mimeType,
|
||||
Key: fileKey,
|
||||
},
|
||||
partSize: multipartThreshold,
|
||||
queueSize: 4,
|
||||
})
|
||||
|
||||
await parallelUploadS3.done()
|
||||
|
||||
return data
|
||||
}
|
||||
}
|
||||
@@ -1,71 +0,0 @@
|
||||
import * as AWS from '@aws-sdk/client-s3'
|
||||
|
||||
import type { Adapter, GeneratedAdapter } from '../../types.js'
|
||||
|
||||
import { getGenerateURL } from './generateURL.js'
|
||||
import { getHandleDelete } from './handleDelete.js'
|
||||
import { getHandleUpload } from './handleUpload.js'
|
||||
import { getHandler } from './staticHandler.js'
|
||||
|
||||
export interface Args {
|
||||
acl?: 'private' | 'public-read'
|
||||
/**
|
||||
* Bucket name to upload files to.
|
||||
*
|
||||
* Must follow [AWS S3 bucket naming conventions](https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html).
|
||||
*/
|
||||
bucket: string
|
||||
/**
|
||||
* AWS S3 client configuration. Highly dependent on your AWS setup.
|
||||
*
|
||||
* [AWS.S3ClientConfig Docs](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/clients/client-s3/interfaces/s3clientconfig.html)
|
||||
*/
|
||||
config: AWS.S3ClientConfig
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use [`@payloadcms/storage-s3`](https://www.npmjs.com/package/@payloadcms/storage-s3) instead.
|
||||
*
|
||||
* This adapter has been superceded by `@payloadcms/storage-s3` and will be removed in Payload 3.0.
|
||||
*/
|
||||
export const s3Adapter =
|
||||
({ acl, bucket, config = {} }: Args): Adapter =>
|
||||
({ collection, prefix }): GeneratedAdapter => {
|
||||
if (!AWS) {
|
||||
throw new Error(
|
||||
'The packages @aws-sdk/client-s3, @aws-sdk/lib-storage and aws-crt are not installed, but are required for the plugin-cloud-storage S3 adapter. Please install them.',
|
||||
)
|
||||
}
|
||||
let storageClient: AWS.S3 | null = null
|
||||
const getStorageClient: () => AWS.S3 = () => {
|
||||
if (storageClient) {
|
||||
return storageClient
|
||||
}
|
||||
try {
|
||||
storageClient = new AWS.S3(config)
|
||||
} catch (error) {
|
||||
if (/is not a constructor$/.test(error.message)) {
|
||||
throw new Error(
|
||||
'The packages @aws-sdk/client-s3, @aws-sdk/lib-storage and aws-crt are not installed, but are required for the plugin-cloud-storage S3 adapter. Please install them.',
|
||||
)
|
||||
}
|
||||
// Re-throw other unexpected errors.
|
||||
throw error
|
||||
}
|
||||
return storageClient
|
||||
}
|
||||
|
||||
return {
|
||||
name: 's3',
|
||||
generateURL: getGenerateURL({ bucket, config }),
|
||||
handleDelete: getHandleDelete({ bucket, getStorageClient }),
|
||||
handleUpload: getHandleUpload({
|
||||
acl,
|
||||
bucket,
|
||||
collection,
|
||||
getStorageClient,
|
||||
prefix,
|
||||
}),
|
||||
staticHandler: getHandler({ bucket, collection, getStorageClient }),
|
||||
}
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
import type * as AWS from '@aws-sdk/client-s3'
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
import path from 'path'
|
||||
|
||||
import type { StaticHandler } from '../../types.js'
|
||||
|
||||
import { getFilePrefix } from '../../utilities/getFilePrefix.js'
|
||||
|
||||
interface Args {
|
||||
bucket: string
|
||||
collection: CollectionConfig
|
||||
getStorageClient: () => AWS.S3
|
||||
}
|
||||
|
||||
// Convert a stream into a promise that resolves with a Buffer
|
||||
const streamToBuffer = async (readableStream) => {
|
||||
const chunks = []
|
||||
for await (const chunk of readableStream) {
|
||||
chunks.push(typeof chunk === 'string' ? Buffer.from(chunk) : chunk)
|
||||
}
|
||||
return Buffer.concat(chunks)
|
||||
}
|
||||
|
||||
export const getHandler = ({ bucket, collection, getStorageClient }: Args): StaticHandler => {
|
||||
return async (req, { params: { filename } }) => {
|
||||
try {
|
||||
const prefix = await getFilePrefix({ collection, filename, req })
|
||||
|
||||
const object = await getStorageClient().getObject({
|
||||
Bucket: bucket,
|
||||
Key: path.posix.join(prefix, filename),
|
||||
})
|
||||
|
||||
if (!object.Body) {
|
||||
return new Response(null, { status: 404, statusText: 'Not Found' })
|
||||
}
|
||||
|
||||
const bodyBuffer = await streamToBuffer(object.Body)
|
||||
|
||||
return new Response(bodyBuffer, {
|
||||
headers: new Headers({
|
||||
'Accept-Ranges': object.AcceptRanges,
|
||||
'Content-Length': String(object.ContentLength),
|
||||
'Content-Type': object.ContentType,
|
||||
ETag: object.ETag,
|
||||
}),
|
||||
status: 200,
|
||||
})
|
||||
} catch (err) {
|
||||
req.payload.logger.error(err)
|
||||
return new Response('Internal Server Error', { status: 500 })
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
import path from 'path'
|
||||
|
||||
import type { GenerateURL } from '../../types.js'
|
||||
|
||||
type GenerateUrlArgs = {
|
||||
baseUrl: string
|
||||
prefix?: string
|
||||
}
|
||||
|
||||
export const getGenerateUrl = ({ baseUrl }: GenerateUrlArgs): GenerateURL => {
|
||||
return ({ filename, prefix = '' }) => {
|
||||
return `${baseUrl}/${path.posix.join(prefix, filename)}`
|
||||
}
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
import { del } from '@vercel/blob'
|
||||
import path from 'path'
|
||||
|
||||
import type { HandleDelete } from '../../types.js'
|
||||
|
||||
type HandleDeleteArgs = {
|
||||
baseUrl: string
|
||||
prefix?: string
|
||||
token: string
|
||||
}
|
||||
|
||||
export const getHandleDelete = ({ baseUrl, token }: HandleDeleteArgs): HandleDelete => {
|
||||
return async ({ doc: { prefix = '' }, filename }) => {
|
||||
const fileUrl = `${baseUrl}/${path.posix.join(prefix, filename)}`
|
||||
const deletedBlob = await del(fileUrl, { token })
|
||||
|
||||
return deletedBlob
|
||||
}
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
import { put } from '@vercel/blob'
|
||||
import path from 'path'
|
||||
|
||||
import type { HandleUpload } from '../../types.js'
|
||||
import type { VercelBlobAdapterUploadOptions } from './index.js'
|
||||
|
||||
type HandleUploadArgs = {
|
||||
baseUrl: string
|
||||
prefix?: string
|
||||
token: string
|
||||
} & VercelBlobAdapterUploadOptions
|
||||
|
||||
export const getHandleUpload = ({
|
||||
access = 'public',
|
||||
addRandomSuffix,
|
||||
baseUrl,
|
||||
cacheControlMaxAge,
|
||||
prefix = '',
|
||||
token,
|
||||
}: HandleUploadArgs): HandleUpload => {
|
||||
return async ({ data, file: { buffer, filename, mimeType } }) => {
|
||||
const fileKey = path.posix.join(data.prefix || prefix, filename)
|
||||
|
||||
const result = await put(fileKey, buffer, {
|
||||
access,
|
||||
addRandomSuffix,
|
||||
cacheControlMaxAge,
|
||||
contentType: mimeType,
|
||||
token,
|
||||
})
|
||||
|
||||
// Get filename with suffix from returned url
|
||||
if (addRandomSuffix) {
|
||||
data.filename = result.url.replace(`${baseUrl}/`, '')
|
||||
}
|
||||
|
||||
return data
|
||||
}
|
||||
}
|
||||
@@ -1,88 +0,0 @@
|
||||
import type { Adapter, GeneratedAdapter } from '../../types.js'
|
||||
|
||||
import { getGenerateUrl } from './generateURL.js'
|
||||
import { getHandleDelete } from './handleDelete.js'
|
||||
import { getHandleUpload } from './handleUpload.js'
|
||||
import { getStaticHandler } from './staticHandler.js'
|
||||
|
||||
export interface VercelBlobAdapterArgs {
|
||||
options?: VercelBlobAdapterUploadOptions
|
||||
|
||||
/**
|
||||
* Vercel Blob storage read/write token
|
||||
*
|
||||
* Usually process.env.BLOB_READ_WRITE_TOKEN set by Vercel
|
||||
*/
|
||||
token: string
|
||||
}
|
||||
|
||||
export interface VercelBlobAdapterUploadOptions {
|
||||
/**
|
||||
* Access control level
|
||||
*
|
||||
* @default 'public'
|
||||
*/
|
||||
access?: 'public'
|
||||
/**
|
||||
* Add a random suffix to the uploaded file name
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
addRandomSuffix?: boolean
|
||||
/**
|
||||
* Cache-Control max-age in seconds
|
||||
*
|
||||
* @default 31536000 (1 year)
|
||||
*/
|
||||
cacheControlMaxAge?: number
|
||||
}
|
||||
|
||||
const defaultUploadOptions: VercelBlobAdapterUploadOptions = {
|
||||
access: 'public',
|
||||
addRandomSuffix: false,
|
||||
cacheControlMaxAge: 60 * 60 * 24 * 365, // 1 year
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use [`@payloadcms/storage-vercel-blob`](https://www.npmjs.com/package/@payloadcms/storage-vercel-blob) instead.
|
||||
*
|
||||
* This adapter has been superceded by `@payloadcms/storage-vercel-blob` and will be removed in Payload 3.0.
|
||||
*/
|
||||
export const vercelBlobAdapter =
|
||||
({ options = {}, token }: VercelBlobAdapterArgs): Adapter =>
|
||||
({ collection, prefix }): GeneratedAdapter => {
|
||||
if (!token) {
|
||||
throw new Error('The token argument is required for the Vercel Blob adapter.')
|
||||
}
|
||||
|
||||
// Parse storeId from token
|
||||
const storeId = token.match(/^vercel_blob_rw_([a-z\d]+)_[a-z\d]+$/i)?.[1].toLowerCase()
|
||||
|
||||
if (!storeId) {
|
||||
throw new Error(
|
||||
'Invalid token format for Vercel Blob adapter. Should be vercel_blob_rw_<store_id>_<random_string>.',
|
||||
)
|
||||
}
|
||||
|
||||
const { access, addRandomSuffix, cacheControlMaxAge } = {
|
||||
...defaultUploadOptions,
|
||||
...options,
|
||||
}
|
||||
|
||||
const baseUrl = `https://${storeId}.${access}.blob.vercel-storage.com`
|
||||
|
||||
return {
|
||||
name: 'vercel-blob',
|
||||
generateURL: getGenerateUrl({ baseUrl, prefix }),
|
||||
handleDelete: getHandleDelete({ baseUrl, prefix, token }),
|
||||
handleUpload: getHandleUpload({
|
||||
access,
|
||||
addRandomSuffix,
|
||||
baseUrl,
|
||||
cacheControlMaxAge,
|
||||
prefix,
|
||||
token,
|
||||
}),
|
||||
staticHandler: getStaticHandler({ baseUrl, token }, collection),
|
||||
}
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
import { head } from '@vercel/blob'
|
||||
import path from 'path'
|
||||
|
||||
import type { StaticHandler } from '../../types.js'
|
||||
|
||||
import { getFilePrefix } from '../../utilities/getFilePrefix.js'
|
||||
|
||||
type StaticHandlerArgs = {
|
||||
baseUrl: string
|
||||
token: string
|
||||
}
|
||||
|
||||
export const getStaticHandler = (
|
||||
{ baseUrl, token }: StaticHandlerArgs,
|
||||
collection: CollectionConfig,
|
||||
): StaticHandler => {
|
||||
return async (req, { params: { filename } }) => {
|
||||
try {
|
||||
const prefix = await getFilePrefix({ collection, filename, req })
|
||||
|
||||
const fileUrl = `${baseUrl}/${path.posix.join(prefix, filename)}`
|
||||
|
||||
const blobMetadata = await head(fileUrl, { token })
|
||||
if (!blobMetadata) {
|
||||
return new Response(null, { status: 404, statusText: 'Not Found' })
|
||||
}
|
||||
|
||||
const { contentDisposition, contentType, size } = blobMetadata
|
||||
const response = await fetch(fileUrl)
|
||||
const blob = await response.blob()
|
||||
|
||||
if (!blob) {
|
||||
return new Response(null, { status: 204, statusText: 'No Content' })
|
||||
}
|
||||
|
||||
const bodyBuffer = await blob.arrayBuffer()
|
||||
|
||||
return new Response(bodyBuffer, {
|
||||
headers: new Headers({
|
||||
'Content-Disposition': contentDisposition,
|
||||
'Content-Length': String(size),
|
||||
'Content-Type': contentType,
|
||||
}),
|
||||
status: 200,
|
||||
})
|
||||
} catch (err: unknown) {
|
||||
req.payload.logger.error({ err, msg: 'Unexpected error in staticHandler' })
|
||||
return new Response('Internal Server Error', { status: 500 })
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
export { azureBlobStorageAdapter } from '../adapters/azure/index.js'
|
||||
@@ -1 +0,0 @@
|
||||
export { gcsAdapter } from '../adapters/gcs/index.js'
|
||||
@@ -1 +0,0 @@
|
||||
export { s3Adapter } from '../adapters/s3/index.js'
|
||||
@@ -1 +0,0 @@
|
||||
export { vercelBlobAdapter } from '../adapters/vercelBlob/index.js'
|
||||
@@ -1,28 +0,0 @@
|
||||
import type { BlockBlobClient } from '@azure/storage-blob'
|
||||
|
||||
import parseRange from 'range-parser'
|
||||
|
||||
const getRangeFromHeader = async (
|
||||
blockBlobClient: BlockBlobClient,
|
||||
rangeHeader?: string,
|
||||
): Promise<{ end: number | undefined; start: number }> => {
|
||||
const fullRange = { end: undefined, start: 0 }
|
||||
|
||||
if (!rangeHeader) {
|
||||
return fullRange
|
||||
}
|
||||
|
||||
const size = await blockBlobClient.getProperties().then((props) => props.contentLength)
|
||||
if (size === undefined) {
|
||||
return fullRange
|
||||
}
|
||||
|
||||
const range = parseRange(size, rangeHeader)
|
||||
if (range === -1 || range === -2 || range.type !== 'bytes' || range.length !== 1) {
|
||||
return fullRange
|
||||
}
|
||||
|
||||
return range[0]
|
||||
}
|
||||
|
||||
export default getRangeFromHeader
|
||||
18
pnpm-lock.yaml
generated
18
pnpm-lock.yaml
generated
@@ -968,9 +968,6 @@ importers:
|
||||
|
||||
packages/plugin-cloud-storage:
|
||||
dependencies:
|
||||
'@azure/abort-controller':
|
||||
specifier: ^1.0.0
|
||||
version: 1.1.0
|
||||
find-node-modules:
|
||||
specifier: ^2.1.3
|
||||
version: 2.1.3
|
||||
@@ -978,24 +975,9 @@ importers:
|
||||
specifier: ^1.2.1
|
||||
version: 1.2.1
|
||||
devDependencies:
|
||||
'@aws-sdk/client-s3':
|
||||
specifier: ^3.614.0
|
||||
version: 3.687.0
|
||||
'@aws-sdk/lib-storage':
|
||||
specifier: ^3.614.0
|
||||
version: 3.687.0(@aws-sdk/client-s3@3.687.0)
|
||||
'@azure/storage-blob':
|
||||
specifier: ^12.11.0
|
||||
version: 12.25.0
|
||||
'@google-cloud/storage':
|
||||
specifier: ^7.7.0
|
||||
version: 7.14.0
|
||||
'@types/find-node-modules':
|
||||
specifier: ^2.1.2
|
||||
version: 2.1.2
|
||||
'@vercel/blob':
|
||||
specifier: ^0.22.3
|
||||
version: 0.22.3
|
||||
payload:
|
||||
specifier: workspace:*
|
||||
version: link:../payload
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import type { Adapter } from '@payloadcms/plugin-cloud-storage/types'
|
||||
import type { Plugin } from 'payload'
|
||||
|
||||
import { cloudStoragePlugin } from '@payloadcms/plugin-cloud-storage'
|
||||
import { azureBlobStorageAdapter } from '@payloadcms/plugin-cloud-storage/azure'
|
||||
import { gcsAdapter } from '@payloadcms/plugin-cloud-storage/gcs'
|
||||
import { s3Adapter } from '@payloadcms/plugin-cloud-storage/s3'
|
||||
import { azureStorage } from '@payloadcms/storage-azure'
|
||||
import { gcsStorage } from '@payloadcms/storage-gcs'
|
||||
import { s3Storage } from '@payloadcms/storage-s3'
|
||||
import dotenv from 'dotenv'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import path from 'path'
|
||||
@@ -15,10 +14,11 @@ import { MediaWithPrefix } from './collections/MediaWithPrefix.js'
|
||||
import { Users } from './collections/Users.js'
|
||||
import { mediaSlug, mediaWithPrefixSlug, prefix } from './shared.js'
|
||||
import { createTestBucket } from './utils.js'
|
||||
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
const dirname = path.dirname(filename)
|
||||
|
||||
let adapter: Adapter
|
||||
let storagePlugin: Plugin
|
||||
let uploadOptions
|
||||
|
||||
// Load config to work with emulated services
|
||||
@@ -27,19 +27,28 @@ dotenv.config({
|
||||
})
|
||||
|
||||
if (process.env.PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER === 'azure') {
|
||||
adapter = azureBlobStorageAdapter({
|
||||
storagePlugin = azureStorage({
|
||||
collections: {
|
||||
[mediaSlug]: true,
|
||||
[mediaWithPrefixSlug]: {
|
||||
prefix,
|
||||
},
|
||||
},
|
||||
allowContainerCreate: process.env.AZURE_STORAGE_ALLOW_CONTAINER_CREATE === 'true',
|
||||
baseURL: process.env.AZURE_STORAGE_ACCOUNT_BASEURL,
|
||||
connectionString: process.env.AZURE_STORAGE_CONNECTION_STRING,
|
||||
containerName: process.env.AZURE_STORAGE_CONTAINER_NAME,
|
||||
})
|
||||
// uploadOptions = {
|
||||
// useTempFiles: true,
|
||||
// }
|
||||
}
|
||||
|
||||
if (process.env.PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER === 'gcs') {
|
||||
adapter = gcsAdapter({
|
||||
storagePlugin = gcsStorage({
|
||||
collections: {
|
||||
[mediaSlug]: true,
|
||||
[mediaWithPrefixSlug]: {
|
||||
prefix,
|
||||
},
|
||||
},
|
||||
bucket: process.env.GCS_BUCKET,
|
||||
options: {
|
||||
apiEndpoint: process.env.GCS_ENDPOINT,
|
||||
@@ -57,7 +66,13 @@ if (
|
||||
useTempFiles: true,
|
||||
}
|
||||
|
||||
adapter = s3Adapter({
|
||||
storagePlugin = s3Storage({
|
||||
collections: {
|
||||
[mediaSlug]: true,
|
||||
[mediaWithPrefixSlug]: {
|
||||
prefix,
|
||||
},
|
||||
},
|
||||
bucket: process.env.S3_BUCKET,
|
||||
config: {
|
||||
credentials: {
|
||||
@@ -72,16 +87,22 @@ if (
|
||||
}
|
||||
|
||||
if (process.env.PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER === 'r2') {
|
||||
adapter = s3Adapter({
|
||||
s3Storage({
|
||||
collections: {
|
||||
[mediaSlug]: true,
|
||||
[mediaWithPrefixSlug]: {
|
||||
prefix,
|
||||
},
|
||||
},
|
||||
bucket: process.env.R2_BUCKET,
|
||||
config: {
|
||||
credentials: {
|
||||
accessKeyId: process.env.R2_ACCESS_KEY_ID,
|
||||
secretAccessKey: process.env.R2_SECRET_ACCESS_KEY,
|
||||
accessKeyId: process.env.S3_ACCESS_KEY_ID,
|
||||
secretAccessKey: process.env.S3_SECRET_ACCESS_KEY,
|
||||
},
|
||||
endpoint: process.env.R2_ENDPOINT,
|
||||
forcePathStyle: process.env.R2_FORCE_PATH_STYLE === 'true',
|
||||
region: process.env.R2_REGION,
|
||||
endpoint: process.env.S3_ENDPOINT,
|
||||
forcePathStyle: process.env.S3_FORCE_PATH_STYLE === 'true',
|
||||
region: process.env.S3_REGION,
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -126,19 +147,7 @@ export default buildConfigWithDefaults({
|
||||
`Using plugin-cloud-storage adapter: ${process.env.PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER}`,
|
||||
)
|
||||
},
|
||||
plugins: [
|
||||
cloudStoragePlugin({
|
||||
collections: {
|
||||
[mediaSlug]: {
|
||||
adapter,
|
||||
},
|
||||
[mediaWithPrefixSlug]: {
|
||||
adapter,
|
||||
prefix,
|
||||
},
|
||||
},
|
||||
}),
|
||||
],
|
||||
plugins: [storagePlugin],
|
||||
upload: uploadOptions,
|
||||
typescript: {
|
||||
outputFile: path.resolve(dirname, 'payload-types.ts'),
|
||||
|
||||
Reference in New Issue
Block a user