Merge pull request #3847 from payloadcms/import/plugin-cloud-storage

chore: import plugin-cloud-storage
This commit is contained in:
Elliot DeNolf
2023-10-24 16:37:26 -04:00
committed by GitHub
64 changed files with 3120 additions and 147 deletions

View File

@@ -0,0 +1,37 @@
/** @type {import('prettier').Config} */
module.exports = {
extends: ['@payloadcms'],
overrides: [
{
extends: ['plugin:@typescript-eslint/disable-type-checked'],
files: ['*.js', '*.cjs', '*.json', '*.md', '*.yml', '*.yaml'],
},
{
files: ['package.json', 'tsconfig.json'],
rules: {
'perfectionist/sort-array-includes': 'off',
'perfectionist/sort-astro-attributes': 'off',
'perfectionist/sort-classes': 'off',
'perfectionist/sort-enums': 'off',
'perfectionist/sort-exports': 'off',
'perfectionist/sort-imports': 'off',
'perfectionist/sort-interfaces': 'off',
'perfectionist/sort-jsx-props': 'off',
'perfectionist/sort-keys': 'off',
'perfectionist/sort-maps': 'off',
'perfectionist/sort-named-exports': 'off',
'perfectionist/sort-named-imports': 'off',
'perfectionist/sort-object-types': 'off',
'perfectionist/sort-objects': 'off',
'perfectionist/sort-svelte-attributes': 'off',
'perfectionist/sort-union-types': 'off',
'perfectionist/sort-vue-attributes': 'off',
},
},
],
parserOptions: {
project: ['./tsconfig.json'],
tsconfigRootDir: __dirname,
},
root: true,
}

248
packages/plugin-cloud-storage/.gitignore vendored Normal file
View File

@@ -0,0 +1,248 @@
dev/tmp
dev/yarn.lock
# Created by https://www.gitignore.io/api/node,macos,windows,webstorm,sublimetext,visualstudiocode
### macOS ###
*.DS_Store
.AppleDouble
.LSOverride
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
### Node ###
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# nyc test coverage
.nyc_output
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (http://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Typescript v1 declaration files
typings/
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# Yarn Berry
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/sdks
!.yarn/versions
.pnp.*
# dotenv environment variables file
.env
### SublimeText ###
# cache files for sublime text
*.tmlanguage.cache
*.tmPreferences.cache
*.stTheme.cache
# workspace files are user-specific
*.sublime-workspace
# project files should be checked into the repository, unless a significant
# proportion of contributors will probably not be using SublimeText
# *.sublime-project
# sftp configuration file
sftp-config.json
# Package control specific files
Package Control.last-run
Package Control.ca-list
Package Control.ca-bundle
Package Control.system-ca-bundle
Package Control.cache/
Package Control.ca-certs/
Package Control.merged-ca-bundle
Package Control.user-ca-bundle
oscrypto-ca-bundle.crt
bh_unicode_properties.cache
# Sublime-github package stores a github token in this file
# https://packagecontrol.io/packages/sublime-github
GitHub.sublime-settings
### VisualStudioCode ###
.vscode/*
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
.history
### WebStorm ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
.idea/*
# User-specific stuff:
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/dictionaries
# Sensitive or high-churn files:
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.xml
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
# Gradle:
.idea/**/gradle.xml
.idea/**/libraries
# CMake
cmake-build-debug/
# Mongo Explorer plugin:
.idea/**/mongoSettings.xml
## File-based project format:
*.iws
## Plugin-specific files:
# IntelliJ
/out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# Ruby plugin and RubyMine
/.rakeTasks
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
### WebStorm Patch ###
# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
# *.iml
# modules.xml
# .idea/misc.xml
# *.ipr
# Sonarlint plugin
.idea/sonarlint
### Windows ###
# Windows thumbnail cache files
Thumbs.db
ehthumbs.db
ehthumbs_vista.db
# Folder config file
Desktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msm
*.msp
# Windows shortcuts
*.lnk
# End of https://www.gitignore.io/api/node,macos,windows,webstorm,sublimetext,visualstudiocode
# Ignore all uploads
demo/upload
demo/media
demo/files
# Ignore build folder
build
# Ignore built components
components/index.js
components/styles.css
# Ignore generated
demo/generated-types.ts
demo/generated-schema.graphql
# Ignore dist, no need for git
dist
# Ignore emulator volumes
src/adapters/s3/emulator/.localstack/

View File

@@ -0,0 +1,15 @@
{
"$schema": "https://json.schemastore.org/swcrc",
"sourceMaps": "inline",
"jsc": {
"target": "esnext",
"parser": {
"syntax": "typescript",
"tsx": true,
"dts": true
}
},
"module": {
"type": "commonjs"
}
}

View File

@@ -0,0 +1,34 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"cwd": "${workspaceFolder}/dev",
"runtimeArgs": ["-r", "./node_modules/ts-node/register"],
"name": "Debug Cloud Storage - S3",
"program": "${workspaceFolder}/dev/src/server.ts",
"outputCapture": "std",
"env": {
"PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER": "s3",
"PAYLOAD_CONFIG_PATH": "${workspaceFolder}/dev/src/payload.config.ts"
}
},
{
"type": "node",
"request": "launch",
"cwd": "${workspaceFolder}/dev",
"runtimeArgs": ["-r", "./node_modules/ts-node/register"],
"name": "Debug Cloud Storage - Azure",
"program": "${workspaceFolder}/dev/src/server.ts",
"outputCapture": "std",
"env": {
"PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER": "azure",
"PAYLOAD_CONFIG_PATH": "${workspaceFolder}/dev/src/payload.config.ts"
}
}
]
}

View File

@@ -0,0 +1,9 @@
{
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.codeActionsOnSave": {
"source.fixAll": true
},
"editor.formatOnSave": true
}
}

View File

@@ -0,0 +1,22 @@
(The MIT License)
Copyright (c) 2018-20223 Payload CMS, INC <info@payloadcms.com>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
'Software'), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,199 @@
# Payload Cloud Storage Plugin
This repository contains the officially supported Payload Cloud Storage plugin. It extends Payload to allow you to store all uploaded media in third-party permanent storage.
#### Requirements
- Payload version `1.0.19` or higher is required
## Installation
`yarn add @payloadcms/plugin-cloud-storage` or `npm install @payloadcms/plugin-cloud-storage`
## Usage
Add this package into your dependencies executing this code in your command line:
`yarn add @payloadcms/plugin-cloud-storage`
Now install this plugin within your Payload as follows:
```ts
import { buildConfig } from 'payload/config'
import path from 'path'
import { cloudStorage } from '@payloadcms/plugin-cloud-storage'
export default buildConfig({
plugins: [
cloudStorage({
collections: {
'my-collection-slug': {
adapter: theAdapterToUse, // see docs for the adapter you want to use
},
},
}),
],
// The rest of your config goes here
})
```
### Conditionally Enabling/Disabling
The proper way to conditionally enable/disable this plugin is to use the `enabled` property.
```ts
cloudStorage({
enabled: process.env.MY_CONDITION === 'true',
collections: {
'my-collection-slug': {
adapter: theAdapterToUse, // see docs for the adapter you want to use
},
},
}),
```
If the code is included _in any way in your config_ but conditionally disabled in another fashion, you may run into issues such as `Webpack Build Error: Can't Resolve 'fs' and 'stream'` or similar because the plugin must be run at all times in order to properly extend the webpack config.
## Features
**Adapter-based Implementation**
This plugin supports the following adapters:
- [Azure Blob Storage](#azure-blob-storage-adapter)
- [AWS S3-style Storage](#s3-adapter)
- [Google Cloud Storage](#gcs-adapter)
However, you can create your own adapter for any third-party service you would like to use.
All adapters are implemented `dev` directory's [Payload Config](https://github.com/payloadcms/plugin-cloud-storage/blob/master/dev/src/payload.config.ts). See this file for examples.
## Plugin options
This plugin is configurable to work across many different Payload collections. A `*` denotes that the property is required.
| Option | Type | Description |
| ---------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------ | --------------------------------------------------------------------------------------------------------------------------------- |
| `collections` \* | Record<string, [CollectionOptions](https://github.com/payloadcms/plugin-cloud-storage/blob/c4a492a62abc2f21b4cd6a7c97778acd8e831212/src/types.ts#L48)> | Object with keys set to the slug of collections you want to enable the plugin for, and values set to collection-specific options. |
| `enabled` | | `boolean` to conditionally enable/disable plugin. Default: true. |
**Collection-specific options:**
| Option | Type | Description |
| ----------------------------- | -------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `adapter` \* | [Adapter](https://github.com/payloadcms/plugin-cloud-storage/blob/master/src/types.ts#L51) | Pass in the adapter that you'd like to use for this collection. You can also set this field to `null` for local development if you'd like to bypass cloud storage in certain scenarios and use local storage. |
| `disableLocalStorage` | `boolean` | Choose to disable local storage on this collection. Defaults to `true`. |
| `disablePayloadAccessControl` | `true` | Set to `true` to disable Payload's access control. [More](#payload-access-control) |
| `prefix` | `string` | Set to `media/images` to upload files inside `media/images` folder in the bucket. |
| `generateFileURL` | [GenerateFileURL](https://github.com/payloadcms/plugin-cloud-storage/blob/master/src/types.ts#L53) | Override the generated file URL with one that you create. |
### Azure Blob Storage Adapter
To use the Azure Blob Storage adapter, you need to have `@azure/storage-blob` installed in your project dependencies. To do so, run `yarn add @azure/storage-blob`.
From there, create the adapter, passing in all of its required properties:
```js
import { azureBlobStorageAdapter } from '@payloadcms/plugin-cloud-storage/azure'
const adapter = azureBlobStorageAdapter({
connectionString: process.env.AZURE_STORAGE_CONNECTION_STRING,
containerName: process.env.AZURE_STORAGE_CONTAINER_NAME,
allowContainerCreate: process.env.AZURE_STORAGE_ALLOW_CONTAINER_CREATE === 'true',
baseURL: process.env.AZURE_STORAGE_ACCOUNT_BASEURL,
})
// Now you can pass this adapter to the plugin
```
### S3 Adapter
To use the S3 adapter, some peer dependencies need to be installed:
`yarn add @aws-sdk/client-s3 @aws-sdk/lib-storage aws-crt`.
From there, create the adapter, passing in all of its required properties:
```js
import { s3Adapter } from '@payloadcms/plugin-cloud-storage/s3'
const adapter = s3Adapter({
config: {
credentials: {
accessKeyId: process.env.S3_ACCESS_KEY_ID,
secretAccessKey: process.env.S3_SECRET_ACCESS_KEY,
},
region: process.env.S3_REGION,
// ... Other S3 configuration
},
bucket: process.env.S3_BUCKET,
})
// Now you can pass this adapter to the plugin
```
Note that the credentials option does not have to be used when you are using PayloadCMS on an EC2 instance that has been configured with an IAM Role with necessary permissions.
Other S3 Client configuration is documented [here](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/clients/client-s3/interfaces/s3clientconfig.html).
Any upload over 50MB will automatically be uploaded using S3's multi-part upload.
#### Other S3-Compatible Storage
If you're running an S3-compatible object storage such as MinIO or Digital Ocean Spaces, you'll have to set the `endpoint` appropriately for the provider.
```js
import { s3Adapter } from '@payloadcms/plugin-cloud-storage/s3'
const adapter = s3Adapter({
config: {
endpoint: process.env.S3_ENDPOINT, // Configure for your provider
// ...
},
// ...
})
```
### GCS Adapter
To use the GCS adapter, you need to have `@google-cloud/storage` installed in your project dependencies. To do so, run `yarn add @google-cloud/storage`.
From there, create the adapter, passing in all of its required properties:
```js
import { gcsAdapter } from '@payloadcms/plugin-cloud-storage/gcs'
const adapter = gcsAdapter({
options: {
// you can choose any method for authentication, and authorization which is being provided by `@google-cloud/storage`
keyFilename: './gcs-credentials.json',
//OR
credentials: JSON.parse(process.env.GCS_CREDENTIALS || '{}'), // this env variable will have stringify version of your credentials.json file
},
bucket: process.env.GCS_BUCKET,
})
// Now you can pass this adapter to the plugin
```
### Payload Access Control
Payload ships with access control that runs _even on statically served files_. The same `read` access control property on your `upload`-enabled collections is used, and it allows you to restrict who can request your uploaded files.
To preserve this feature, by default, this plugin _keeps all file URLs exactly the same_. Your file URLs won't be updated to point directly to your cloud storage source, as in that case, Payload's access control will be completely bypassed and you would need public readability on your cloud-hosted files.
Instead, all uploads will still be reached from the default `/collectionSlug/staticURL/filename` path. This plugin will "pass through" all files that are hosted on your third-party cloud service—with the added benefit of keeping your existing access control in place.
If this does not apply to you (your upload collection has `read: () => true` or similar) you can disable this functionality by setting `disablePayloadAccessControl` to `true`. When this setting is in place, this plugin will update your file URLs to point directly to your cloud host.
## Local development
For instructions regarding how to develop with this plugin locally, [click here](https://github.com/payloadcms/plugin-cloud-storage/blob/master/docs/local-dev.md).
## Questions
Please contact [Payload](mailto:dev@payloadcms.com) with any questions about using this plugin.
## Credit
This plugin was created with significant help, and code, from [Alex Bechmann](https://github.com/alexbechmann) and [Richard VanBergen](https://github.com/richardvanbergen). Thank you!!

View File

@@ -0,0 +1 @@
export * from './dist/adapters/azure'

View File

@@ -0,0 +1 @@
exports.azureBlobStorageAdapter = require('./dist/adapters/azure').azureBlobStorageAdapter

View File

@@ -0,0 +1,21 @@
MONGODB_URI=mongodb://localhost/payload-plugin-cloud-storage
PAYLOAD_PUBLIC_SERVER_URL=http://localhost:3000
PAYLOAD_SECRET=45ligj345ligj4wl5igj4lw5igj45ligj45wlijl
PAYLOAD_CONFIG_PATH=src/payload.config.ts
AZURE_STORAGE_CONNECTION_STRING=DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://localhost:10000/devstoreaccount1;QueueEndpoint=http://localhost:10001/devstoreaccount1;
AZURE_STORAGE_CONTAINER_NAME=az-media
AZURE_STORAGE_ALLOW_CONTAINER_CREATE=true
AZURE_STORAGE_ACCOUNT_BASEURL=http://localhost:10000/devstoreaccount1
S3_ENDPOINT=http://localhost:4566
S3_ACCESS_KEY_ID=payloadAccessKey
S3_SECRET_ACCESS_KEY=alwiejglaiwhewlihgawe
S3_BUCKET=payload-bucket
S3_FORCE_PATH_STYLE=true
GCS_ENDPOINT=http://localhost:4443
GCS_PROJECT_ID=test
GCS_BUCKET=payload-bucket
PAYLOAD_DROP_DATABASE=true

View File

@@ -0,0 +1,5 @@
{
"exec": "node --trace-warnings -r ts-node/register -r ./src/server.ts",
"ext": "ts",
"watch": ["src/**/*.ts", "../src/**/*.ts"]
}

View File

@@ -0,0 +1,34 @@
{
"name": "payload-plugin-cloud-storage-demo",
"version": "1.0.0",
"main": "dist/server.js",
"license": "MIT",
"private": true,
"scripts": {
"dev:azure": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER=azure nodemon",
"dev:s3": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER=s3 nodemon",
"dev:gcs": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER=gcs nodemon",
"build:payload": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts payload build",
"build:server": "tsc",
"build": "yarn build:payload && yarn build:server",
"serve": "cross-env PAYLOAD_CONFIG_PATH=dist/payload.config.js NODE_ENV=production node dist/server.js",
"generate:types": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts payload generate:types"
},
"dependencies": {
"@aws-sdk/client-s3": "^3.142.0",
"@azure/storage-blob": "^12.11.0",
"@google-cloud/storage": "^6.4.2",
"dotenv": "^8.2.0",
"express": "^4.17.1",
"image-size": "^1.0.2",
"payload": "^1.7.2",
"probe-image-size": "^7.2.3"
},
"devDependencies": {
"@types/express": "^4.17.9",
"cross-env": "^7.0.3",
"nodemon": "^2.0.6",
"ts-node": "^9.1.1",
"typescript": "^4.1.3"
}
}

View File

@@ -0,0 +1,56 @@
/* eslint-disable no-console */
import type { CollectionConfig, Field } from 'payload/types'
const urlField: Field = {
name: 'url',
type: 'text',
hooks: {
afterRead: [
({ value }) => {
console.log('hello from hook')
return value
},
],
},
}
export const Media: CollectionConfig = {
slug: 'media',
upload: {
imageSizes: [
{
height: 400,
width: 400,
crop: 'center',
name: 'square',
},
{
width: 900,
height: 450,
crop: 'center',
name: 'sixteenByNineMedium',
},
],
},
fields: [
{
name: 'alt',
label: 'Alt Text',
type: 'text',
},
// The following fields should be able to be merged in to default upload fields
urlField,
{
name: 'sizes',
type: 'group',
fields: [
{
name: 'square',
type: 'group',
fields: [urlField],
},
],
},
],
}

View File

@@ -0,0 +1,23 @@
import type { CollectionConfig } from 'payload/types'
const Users: CollectionConfig = {
slug: 'users',
auth: true,
access: {
read: () => true,
},
fields: [
{
name: 'avatar',
type: 'upload',
relationTo: 'media',
},
{
name: 'background',
type: 'upload',
relationTo: 'media',
},
],
}
export default Users

View File

@@ -0,0 +1 @@
module.exports = { fs: { createReadStream: () => null } }

View File

@@ -0,0 +1 @@
export const promisify = () => {}

View File

@@ -0,0 +1,123 @@
import { buildConfig } from 'payload/config'
import path from 'path'
import Users from './collections/Users'
import { cloudStorage } from '../../src'
import { s3Adapter } from '../../src/adapters/s3'
import { gcsAdapter } from '../../src/adapters/gcs'
import { azureBlobStorageAdapter } from '../../src/adapters/azure'
import type { Adapter } from '../../src/types'
import { Media } from './collections/Media'
let adapter: Adapter
let uploadOptions
if (process.env.PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER === 'azure') {
adapter = azureBlobStorageAdapter({
connectionString: process.env.AZURE_STORAGE_CONNECTION_STRING,
containerName: process.env.AZURE_STORAGE_CONTAINER_NAME,
allowContainerCreate: process.env.AZURE_STORAGE_ALLOW_CONTAINER_CREATE === 'true',
baseURL: process.env.AZURE_STORAGE_ACCOUNT_BASEURL,
})
// uploadOptions = {
// useTempFiles: true,
// }
}
if (process.env.PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER === 's3') {
// The s3 adapter supports using temp files for uploads
uploadOptions = {
useTempFiles: true,
}
adapter = s3Adapter({
config: {
endpoint: process.env.S3_ENDPOINT,
forcePathStyle: process.env.S3_FORCE_PATH_STYLE === 'true',
region: process.env.S3_REGION,
credentials: {
accessKeyId: process.env.S3_ACCESS_KEY_ID,
secretAccessKey: process.env.S3_SECRET_ACCESS_KEY,
},
},
bucket: process.env.S3_BUCKET,
})
}
if (process.env.PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER === 'gcs') {
adapter = gcsAdapter({
options: {
apiEndpoint: process.env.GCS_ENDPOINT,
projectId: process.env.GCS_PROJECT_ID,
},
bucket: process.env.GCS_BUCKET,
})
}
export default buildConfig({
serverURL: 'http://localhost:3000',
collections: [Media, Users],
upload: uploadOptions,
admin: {
// NOTE - these webpack extensions are only required
// for development of this plugin.
// No need to use these aliases within your own projects.
webpack: (config) => {
const newConfig = {
...config,
resolve: {
...(config.resolve || {}),
alias: {
...(config.resolve.alias || {}),
react: path.resolve(__dirname, '../node_modules/react'),
[path.resolve(__dirname, '../../src/index')]: path.resolve(
__dirname,
'../../src/admin/index.ts',
),
[path.resolve(__dirname, '../../src/adapters/s3/index')]: path.resolve(
__dirname,
'../../src/adapters/s3/mock.js',
),
[path.resolve(__dirname, '../../src/adapters/gcs/index')]: path.resolve(
__dirname,
'../../src/adapters/gcs/mock.js',
),
[path.resolve(__dirname, '../../src/adapters/azure/index')]: path.resolve(
__dirname,
'../../src/adapters/azure/mock.js',
),
},
},
}
return newConfig
},
},
typescript: {
outputFile: path.resolve(__dirname, 'payload-types.ts'),
},
plugins: [
// @ts-expect-error Conflicting types for relative package
cloudStorage({
collections: {
media: {
adapter,
},
},
}),
],
onInit: async (payload) => {
const users = await payload.find({
collection: 'users',
limit: 1,
})
if (!users.docs.length) {
await payload.create({
collection: 'users',
data: {
email: 'dev@payloadcms.com',
password: 'test',
},
})
}
},
})

View File

@@ -0,0 +1,26 @@
import express from 'express'
import payload from 'payload'
// eslint-disable-next-line @typescript-eslint/no-require-imports
require('dotenv').config()
const app = express()
// Redirect root to Admin panel
app.get('/', (_, res) => {
res.redirect('/admin')
})
// Initialize Payload
payload.init({
secret: process.env.PAYLOAD_SECRET,
mongoURL: process.env.MONGODB_URI,
express: app,
onInit: () => {
payload.logger.info(`Payload Admin URL: ${payload.getAdminURL()}`)
},
})
// Add your own express routes here
app.listen(3000)

View File

@@ -0,0 +1,16 @@
{
"compilerOptions": {
"target": "es5",
"lib": ["dom", "dom.iterable", "esnext"],
"strict": false,
"esModuleInterop": true,
"skipLibCheck": true,
"outDir": "./dist",
"rootDir": "../",
"jsx": "react",
"sourceMap": true
},
"ts-node": {
"transpileOnly": true
}
}

View File

@@ -0,0 +1,47 @@
# Developing with this plugin locally
This repository includes a local development environment for local testing and development of this plugin. To run the local sandbox, follow the instructions below.
1. Make sure you have Node and a MongoDB to work with
1. Clone the repo
1. `yarn` in both the root folder of the repo, and the `./dev` folder
1. `cd` into `./dev` and run `cp .env.example .env` to create an `.env` file
1. Open your newly created `./dev/.env` file and _completely_ fill out each property
## Azure Adapter Development
This repository comes with a Docker emulator for Azure Blob Storage.
If you would like to test locally with an emulated blob storage container, you can `cd` into `./src/adapters/azure/emulator` and then run `docker-compose up -d`.
The default `./dev/.env.example` file comes pre-loaded with correct `env` variables that correspond to the Azure Docker emulator.
Otherwise, if you are not using the emulator, make sure your environment variables within `./dev/.env` are configured for your Azure connection.
Finally, to start the Payload dev server with the Azure adapter, run `yarn dev:azure` and then open `http://localhost:3000/admin` in your browser.
## S3 Adapter Development
This repository also includes a Docker LocalStack emulator for S3. It requires a few more steps to get up and running.
To use the S3 emulator, use the following steps:
1. Make sure you have `awscli` installed. On Mac, run `brew install awscli` to get started.
1. Make sure you have an AWS profile created. LocalStack does not verify credentials, so you can create a profile with dummy credentials. However, your `region` will need to match. To create a dummy profile for LocalStack, type `aws configure --profile localstack`. Use the access key and secret from the `./dev/.env.example` and use region `us-east-1`.
1. Now you can start the Docker container via moving to the `./src/adapters/s3/emulator` folder and running `docker-compose up -d`.
1. Once the Docker container is running, you can create a new bucket with the following command: `aws --endpoint-url=http://localhost:4566 s3 mb s3://payload-bucket`. Note that our bucket is called `payload-bucket`.
1. Finally, attach an ACL to the bucket so it is readable: `aws --endpoint-url=http://localhost:4566 s3api put-bucket-acl --bucket payload-bucket --acl public-read`
Finally, you can run `yarn dev:s3` and then open `http://localhost:3000/admin` in your browser.
## Google Cloud Storage (GCS) Adapter Development
This repository comes with a Docker emulator for Google Cloud Storage.
If you would like to test locally with an emulated GCS container, you can `cd` into `./src/adapters/gcs/emulator` and then run `docker-compose up -d`.
The default `./dev/.env.example` file comes pre-loaded with correct `env` variables that correspond to the GCS Docker emulator.
Otherwise, if you are not using the emulator, make sure your environment variables within `./dev/.env` are configured for your Google connection.
Finally, to start the Payload dev server with the GCS adapter, run `yarn dev:gcs` and then open `http://localhost:3000/admin` in your browser.

View File

@@ -0,0 +1 @@
export * from './dist/adapters/gcs'

View File

@@ -0,0 +1 @@
exports.gcsAdapter = require('./dist/adapters/gcs').gcsAdapter

View File

@@ -0,0 +1,62 @@
{
"name": "@payloadcms/plugin-cloud-storage",
"description": "The official cloud storage plugin for Payload CMS",
"version": "1.1.1",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"license": "MIT",
"scripts": {
"build": "pnpm build:swc && pnpm build:types",
"build:swc": "swc ./src -d ./dist --config-file .swcrc",
"build:types": "tsc --emitDeclarationOnly --outDir dist",
"clean": "rimraf {dist,*.tsbuildinfo}",
"prepublishOnly": "yarn clean && yarn build"
},
"peerDependencies": {
"@aws-sdk/client-s3": "^3.142.0",
"@aws-sdk/lib-storage": "^3.267.0",
"@azure/storage-blob": "^12.11.0",
"@azure/abort-controller": "^1.0.0",
"@google-cloud/storage": "^6.4.1",
"payload": "^1.7.2 || ^2.0.0"
},
"peerDependenciesMeta": {
"@aws-sdk/client-s3": {
"optional": true
},
"@aws-sdk/lib-storage": {
"optional": true
},
"@azure/abort-controller": {
"optional": true
},
"@azure/storage-blob": {
"optional": true
},
"@google-cloud/storage": {
"optional": true
}
},
"files": [
"dist",
"*.js",
"*.d.ts"
],
"devDependencies": {
"@aws-sdk/client-s3": "^3.142.0",
"@aws-sdk/lib-storage": "^3.267.0",
"@azure/storage-blob": "^12.11.0",
"@google-cloud/storage": "^6.4.1",
"@types/express": "^4.17.9",
"cross-env": "^7.0.3",
"dotenv": "^8.2.0",
"nodemon": "^2.0.6",
"payload": "^1.7.2",
"rimraf": "^4.1.2",
"ts-node": "^9.1.1",
"webpack": "^5.78.0"
},
"dependencies": {
"range-parser": "^1.2.1"
}
}

1
packages/plugin-cloud-storage/s3.d.ts vendored Normal file
View File

@@ -0,0 +1 @@
export * from './dist/adapters/s3'

View File

@@ -0,0 +1 @@
exports.s3Adapter = require('./dist/adapters/s3').s3Adapter

View File

@@ -0,0 +1,16 @@
version: '3'
services:
azure-storage:
image: mcr.microsoft.com/azure-storage/azurite:3.18.0
restart: always
command: 'azurite --loose --blobHost 0.0.0.0 --tableHost 0.0.0.0 --queueHost 0.0.0.0'
ports:
- '10000:10000'
- '10001:10001'
- '10002:10002'
volumes:
- ./azurestoragedata:/data"
volumes:
azurestoragedata:

View File

@@ -0,0 +1,14 @@
import path from 'path'
import type { GenerateURL } from '../../types'
interface Args {
baseURL: string
containerName: string
}
export const getGenerateURL =
({ baseURL, containerName }: Args): GenerateURL =>
({ filename, prefix = '' }) => {
return `${baseURL}/${containerName}/${path.posix.join(prefix, filename)}`
}

View File

@@ -0,0 +1,18 @@
import type { ContainerClient } from '@azure/storage-blob'
import type { CollectionConfig } from 'payload/types'
import path from 'path'
import type { HandleDelete } from '../../types'
interface Args {
collection: CollectionConfig
getStorageClient: () => ContainerClient
}
export const getHandleDelete = ({ getStorageClient }: Args): HandleDelete => {
return async ({ doc: { prefix = '' }, filename }) => {
const blockBlobClient = getStorageClient().getBlockBlobClient(path.posix.join(prefix, filename))
await blockBlobClient.deleteIfExists()
}
}

View File

@@ -0,0 +1,43 @@
import type { ContainerClient } from '@azure/storage-blob'
import type { CollectionConfig } from 'payload/types'
import { AbortController } from '@azure/abort-controller'
import fs from 'fs'
import path from 'path'
import { Readable } from 'stream'
import type { HandleUpload } from '../../types'
interface Args {
collection: CollectionConfig
getStorageClient: () => ContainerClient
prefix?: string
}
const multipartThreshold = 1024 * 1024 * 50 // 50MB
export const getHandleUpload = ({ getStorageClient, prefix = '' }: Args): HandleUpload => {
return async ({ data, file }) => {
const fileKey = path.posix.join(data.prefix || prefix, file.filename)
const blockBlobClient = getStorageClient().getBlockBlobClient(fileKey)
// when there are no temp files, or the upload is less than the threshold size, do not stream files
if (!file.tempFilePath && file.buffer.length > 0 && file.buffer.length < multipartThreshold) {
await blockBlobClient.upload(file.buffer, file.buffer.byteLength, {
blobHTTPHeaders: { blobContentType: file.mimeType },
})
return data
}
const fileBufferOrStream: Readable = file.tempFilePath
? fs.createReadStream(file.tempFilePath)
: Readable.from(file.buffer)
await blockBlobClient.uploadStream(fileBufferOrStream, 4 * 1024 * 1024, 4, {
abortSignal: AbortController.timeout(30 * 60 * 1000),
})
return data
}
}

View File

@@ -0,0 +1,51 @@
import type { ContainerClient } from '@azure/storage-blob'
import { BlobServiceClient } from '@azure/storage-blob'
import type { Adapter, GeneratedAdapter } from '../../types'
import { getGenerateURL } from './generateURL'
import { getHandleDelete } from './handleDelete'
import { getHandleUpload } from './handleUpload'
import { getHandler } from './staticHandler'
import { extendWebpackConfig } from './webpack'
export interface Args {
allowContainerCreate: boolean
baseURL: string
connectionString: string
containerName: string
}
export const azureBlobStorageAdapter = ({
allowContainerCreate,
baseURL,
connectionString,
containerName,
}: Args): Adapter => {
let storageClient: ContainerClient | null = null
const getStorageClient = () => {
if (storageClient) return storageClient
const blobServiceClient = BlobServiceClient.fromConnectionString(connectionString)
return (storageClient = blobServiceClient.getContainerClient(containerName))
}
const createContainerIfNotExists = () => {
getStorageClient().createIfNotExists({ access: 'blob' })
}
return ({ collection, prefix }): GeneratedAdapter => {
return {
generateURL: getGenerateURL({ baseURL, containerName }),
handleDelete: getHandleDelete({ collection, getStorageClient }),
handleUpload: getHandleUpload({
collection,
getStorageClient,
prefix,
}),
staticHandler: getHandler({ collection, getStorageClient }),
webpack: extendWebpackConfig,
...(allowContainerCreate && { onInit: createContainerIfNotExists }),
}
}
}

View File

@@ -0,0 +1 @@
export const azureBlobStorageAdapter = () => ({})

View File

@@ -0,0 +1,41 @@
import type { ContainerClient } from '@azure/storage-blob'
import type { CollectionConfig } from 'payload/types'
import path from 'path'
import type { StaticHandler } from '../../types'
import { getFilePrefix } from '../../utilities/getFilePrefix'
import getRangeFromHeader from '../../utilities/getRangeFromHeader'
interface Args {
collection: CollectionConfig
getStorageClient: () => ContainerClient
}
export const getHandler = ({ collection, getStorageClient }: Args): StaticHandler => {
return async (req, res, next) => {
try {
const prefix = await getFilePrefix({ collection, req })
const blockBlobClient = getStorageClient().getBlockBlobClient(
path.posix.join(prefix, req.params.filename),
)
const { end, start } = await getRangeFromHeader(blockBlobClient, req.headers.range)
const blob = await blockBlobClient.download(start, end)
// eslint-disable-next-line no-underscore-dangle
const response = blob._response
res.header(response.headers.rawHeaders())
res.status(response.status)
if (blob?.readableStreamBody) {
return blob.readableStreamBody.pipe(res)
}
return next()
} catch (err: unknown) {
return next()
}
}
}

View File

@@ -0,0 +1,22 @@
import type { Configuration as WebpackConfig } from 'webpack'
import path from 'path'
export const extendWebpackConfig = (existingWebpackConfig: WebpackConfig): WebpackConfig => {
const newConfig: WebpackConfig = {
...existingWebpackConfig,
resolve: {
...(existingWebpackConfig.resolve || {}),
alias: {
...(existingWebpackConfig.resolve?.alias ? existingWebpackConfig.resolve.alias : {}),
'@payloadcms/plugin-cloud-storage/azure': path.resolve(__dirname, './mock.js'),
},
fallback: {
...(existingWebpackConfig.resolve?.fallback ? existingWebpackConfig.resolve.fallback : {}),
stream: false,
},
},
}
return newConfig
}

View File

@@ -0,0 +1,26 @@
version: '3'
services:
google-cloud-storage:
image: fsouza/fake-gcs-server
restart: always
command:
[
'-scheme',
'http',
'-port',
'4443',
'-public-host',
'http://localhost:4443',
'-external-url',
'http://localhost:4443',
'-backend',
'memory',
]
ports:
- '4443:4443'
volumes:
- ./google-cloud-storage/payload-bucket:/data/payload-bucket
volumes:
google-cloud-storage:

View File

@@ -0,0 +1,18 @@
import type { Storage } from '@google-cloud/storage'
import path from 'path'
import type { GenerateURL } from '../../types'
interface Args {
bucket: string
getStorageClient: () => Storage
}
export const getGenerateURL =
({ bucket, getStorageClient }: Args): GenerateURL =>
({ filename, prefix = '' }) => {
return decodeURIComponent(
getStorageClient().bucket(bucket).file(path.posix.join(prefix, filename)).publicUrl(),
)
}

View File

@@ -0,0 +1,18 @@
import type { Storage } from '@google-cloud/storage'
import path from 'path'
import type { HandleDelete } from '../../types'
interface Args {
bucket: string
getStorageClient: () => Storage
}
export const getHandleDelete = ({ bucket, getStorageClient }: Args): HandleDelete => {
return async ({ doc: { prefix = '' }, filename }) => {
await getStorageClient().bucket(bucket).file(path.posix.join(prefix, filename)).delete({
ignoreNotFound: true,
})
}
}

View File

@@ -0,0 +1,38 @@
import type { Storage } from '@google-cloud/storage'
import type { CollectionConfig } from 'payload/types'
import path from 'path'
import type { HandleUpload } from '../../types'
interface Args {
acl?: 'Private' | 'Public'
bucket: string
collection: CollectionConfig
getStorageClient: () => Storage
prefix?: string
}
export const getHandleUpload = ({
acl,
bucket,
getStorageClient,
prefix = '',
}: Args): HandleUpload => {
return async ({ data, file }) => {
const fileKey = path.posix.join(data.prefix || prefix, file.filename)
const gcsFile = getStorageClient().bucket(bucket).file(fileKey)
await gcsFile.save(file.buffer, {
metadata: {
contentType: file.mimeType,
},
})
if (acl) {
await gcsFile[`make${acl}`]()
}
return data
}
}

View File

@@ -0,0 +1,43 @@
import type { StorageOptions } from '@google-cloud/storage'
import { Storage } from '@google-cloud/storage'
import type { Adapter, GeneratedAdapter } from '../../types'
import { getGenerateURL } from './generateURL'
import { getHandleDelete } from './handleDelete'
import { getHandleUpload } from './handleUpload'
import { getHandler } from './staticHandler'
import { extendWebpackConfig } from './webpack'
export interface Args {
acl?: 'Private' | 'Public'
bucket: string
options: StorageOptions
}
export const gcsAdapter =
({ acl, bucket, options }: Args): Adapter =>
({ collection, prefix }): GeneratedAdapter => {
let storageClient: Storage | null = null
const getStorageClient = (): Storage => {
if (storageClient) return storageClient
storageClient = new Storage(options)
return storageClient
}
return {
generateURL: getGenerateURL({ bucket, getStorageClient }),
handleDelete: getHandleDelete({ bucket, getStorageClient }),
handleUpload: getHandleUpload({
acl,
bucket,
collection,
getStorageClient,
prefix,
}),
staticHandler: getHandler({ bucket, collection, getStorageClient }),
webpack: extendWebpackConfig,
}
}

View File

@@ -0,0 +1 @@
export const gcsAdapter = () => ({})

View File

@@ -0,0 +1,37 @@
import type { Storage } from '@google-cloud/storage'
import type { CollectionConfig } from 'payload/types'
import path from 'path'
import type { StaticHandler } from '../../types'
import { getFilePrefix } from '../../utilities/getFilePrefix'
interface Args {
bucket: string
collection: CollectionConfig
getStorageClient: () => Storage
}
export const getHandler = ({ bucket, collection, getStorageClient }: Args): StaticHandler => {
return async (req, res, next) => {
try {
const prefix = await getFilePrefix({ collection, req })
const file = getStorageClient()
.bucket(bucket)
.file(path.posix.join(prefix, req.params.filename))
const [metadata] = await file.getMetadata()
res.set({
'Content-Length': metadata.size,
'Content-Type': metadata.contentType,
ETag: metadata.etag,
})
return file.createReadStream().pipe(res)
} catch (err: unknown) {
return next()
}
}
}

View File

@@ -0,0 +1,22 @@
import type { Configuration as WebpackConfig } from 'webpack'
import path from 'path'
export const extendWebpackConfig = (existingWebpackConfig: WebpackConfig): WebpackConfig => {
const newConfig: WebpackConfig = {
...existingWebpackConfig,
resolve: {
...(existingWebpackConfig.resolve || {}),
alias: {
...(existingWebpackConfig.resolve?.alias ? existingWebpackConfig.resolve.alias : {}),
'@google-cloud/storage': path.resolve(__dirname, './mock.js'),
},
fallback: {
...(existingWebpackConfig.resolve?.fallback ? existingWebpackConfig.resolve.fallback : {}),
stream: false,
},
},
}
return newConfig
}

View File

@@ -0,0 +1,15 @@
version: '3.2'
services:
localstack:
image: localstack/localstack:latest
container_name: localstack_demo
ports:
- '4563-4599:4563-4599'
- '8055:8080'
environment:
- SERVICES=s3
- DEBUG=1
- DATA_DIR=/tmp/localstack/data
volumes:
- './.localstack:/var/lib/localstack'
- '/var/run/docker.sock:/var/run/docker.sock'

View File

@@ -0,0 +1,16 @@
import type * as AWS from '@aws-sdk/client-s3'
import path from 'path'
import type { GenerateURL } from '../../types'
interface Args {
bucket: string
config: AWS.S3ClientConfig
}
export const getGenerateURL =
({ bucket, config: { endpoint } }: Args): GenerateURL =>
({ filename, prefix = '' }) => {
return `${endpoint}/${bucket}/${path.posix.join(prefix, filename)}`
}

View File

@@ -0,0 +1,19 @@
import type * as AWS from '@aws-sdk/client-s3'
import path from 'path'
import type { HandleDelete } from '../../types'
interface Args {
bucket: string
getStorageClient: () => AWS.S3
}
export const getHandleDelete = ({ bucket, getStorageClient }: Args): HandleDelete => {
return async ({ doc: { prefix = '' }, filename }) => {
await getStorageClient().deleteObject({
Bucket: bucket,
Key: path.posix.join(prefix, filename),
})
}
}

View File

@@ -0,0 +1,63 @@
import type * as AWS from '@aws-sdk/client-s3'
import type { CollectionConfig } from 'payload/types'
import type stream from 'stream'
import { Upload } from '@aws-sdk/lib-storage'
import fs from 'fs'
import path from 'path'
import type { HandleUpload } from '../../types'
interface Args {
acl?: 'private' | 'public-read'
bucket: string
collection: CollectionConfig
getStorageClient: () => AWS.S3
prefix?: string
}
const multipartThreshold = 1024 * 1024 * 50 // 50MB
export const getHandleUpload = ({
acl,
bucket,
getStorageClient,
prefix = '',
}: Args): HandleUpload => {
return async ({ data, file }) => {
const fileKey = path.posix.join(data.prefix || prefix, file.filename)
const fileBufferOrStream: Buffer | stream.Readable = file.tempFilePath
? fs.createReadStream(file.tempFilePath)
: file.buffer
if (file.buffer.length > 0 && file.buffer.length < multipartThreshold) {
await getStorageClient().putObject({
ACL: acl,
Body: fileBufferOrStream,
Bucket: bucket,
ContentType: file.mimeType,
Key: fileKey,
})
return data
}
const parallelUploadS3 = new Upload({
client: getStorageClient(),
params: {
ACL: acl,
Body: fileBufferOrStream,
Bucket: bucket,
ContentType: file.mimeType,
Key: fileKey,
},
partSize: multipartThreshold,
queueSize: 4,
})
await parallelUploadS3.done()
return data
}
}

View File

@@ -0,0 +1,50 @@
import * as AWS from '@aws-sdk/client-s3'
import type { Adapter, GeneratedAdapter } from '../../types'
import { getGenerateURL } from './generateURL'
import { getHandleDelete } from './handleDelete'
import { getHandleUpload } from './handleUpload'
import { getHandler } from './staticHandler'
import { extendWebpackConfig } from './webpack'
export interface Args {
acl?: 'private' | 'public-read'
/**
* Bucket name to upload files to.
*
* Must follow [AWS S3 bucket naming conventions](https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html).
*/
bucket: string
/**
* AWS S3 client configuration. Highly dependent on your AWS setup.
*
* [AWS.S3ClientConfig Docs](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/clients/client-s3/interfaces/s3clientconfig.html)
*/
config: AWS.S3ClientConfig
}
export const s3Adapter =
({ acl, bucket, config = {} }: Args): Adapter =>
({ collection, prefix }): GeneratedAdapter => {
let storageClient: AWS.S3 | null = null
const getStorageClient: () => AWS.S3 = () => {
if (storageClient) return storageClient
storageClient = new AWS.S3(config)
return storageClient
}
return {
generateURL: getGenerateURL({ bucket, config }),
handleDelete: getHandleDelete({ bucket, getStorageClient }),
handleUpload: getHandleUpload({
acl,
bucket,
collection,
getStorageClient,
prefix,
}),
staticHandler: getHandler({ bucket, collection, getStorageClient }),
webpack: extendWebpackConfig,
}
}

View File

@@ -0,0 +1 @@
export const s3Adapter = () => ({})

View File

@@ -0,0 +1,44 @@
import type * as AWS from '@aws-sdk/client-s3'
import type { CollectionConfig } from 'payload/types'
import type { Readable } from 'stream'
import path from 'path'
import type { StaticHandler } from '../../types'
import { getFilePrefix } from '../../utilities/getFilePrefix'
interface Args {
bucket: string
collection: CollectionConfig
getStorageClient: () => AWS.S3
}
export const getHandler = ({ bucket, collection, getStorageClient }: Args): StaticHandler => {
return async (req, res, next) => {
try {
const prefix = await getFilePrefix({ collection, req })
const object = await getStorageClient().getObject({
Bucket: bucket,
Key: path.posix.join(prefix, req.params.filename),
})
res.set({
'Accept-Ranges': object.AcceptRanges,
'Content-Length': object.ContentLength,
'Content-Type': object.ContentType,
ETag: object.ETag,
})
if (object?.Body) {
return (object.Body as Readable).pipe(res)
}
return next()
} catch (err: unknown) {
req.payload.logger.error(err)
return next()
}
}
}

View File

@@ -0,0 +1,22 @@
import type { Configuration as WebpackConfig } from 'webpack'
import path from 'path'
export const extendWebpackConfig = (existingWebpackConfig: WebpackConfig): WebpackConfig => {
const newConfig: WebpackConfig = {
...existingWebpackConfig,
resolve: {
...(existingWebpackConfig.resolve || {}),
alias: {
...(existingWebpackConfig.resolve?.alias ? existingWebpackConfig.resolve.alias : {}),
'@payloadcms/plugin-cloud-storage/s3': path.resolve(__dirname, './mock.js'),
},
fallback: {
...(existingWebpackConfig.resolve?.fallback ? existingWebpackConfig.resolve.fallback : {}),
stream: false,
},
},
}
return newConfig
}

View File

@@ -0,0 +1,127 @@
import type { GroupField, TextField } from 'payload/dist/fields/config/types'
import type { CollectionConfig, Field } from 'payload/types'
import path from 'path'
interface Args {
collection: CollectionConfig
prefix?: string
}
export const getFields = ({ collection, prefix }: Args): Field[] => {
const baseURLField: Field = {
name: 'url',
admin: {
hidden: true,
readOnly: true,
},
label: 'URL',
type: 'text',
}
const basePrefixField: Field = {
name: 'prefix',
admin: {
hidden: true,
readOnly: true,
},
type: 'text',
}
const fields = [...collection.fields]
// Inject a hook into all URL fields to generate URLs
let existingURLFieldIndex = -1
const existingURLField = fields.find((existingField, i) => {
if ('name' in existingField && existingField.name === 'url') {
existingURLFieldIndex = i
return true
}
return false
}) as TextField
if (existingURLFieldIndex > -1) {
fields.splice(existingURLFieldIndex, 1)
}
fields.push({
...baseURLField,
...(existingURLField || {}),
})
if (typeof collection.upload === 'object' && collection.upload.imageSizes) {
let existingSizesFieldIndex = -1
const existingSizesField = fields.find((existingField, i) => {
if ('name' in existingField && existingField.name === 'sizes') {
existingSizesFieldIndex = i
return true
}
return false
}) as GroupField
if (existingSizesFieldIndex > -1) {
fields.splice(existingSizesFieldIndex, 1)
}
const sizesField: Field = {
...(existingSizesField || {}),
name: 'sizes',
admin: {
hidden: true,
},
fields: collection.upload.imageSizes.map((size) => {
const existingSizeField = existingSizesField?.fields.find(
(existingField) => 'name' in existingField && existingField.name === size.name,
) as GroupField
const existingSizeURLField = existingSizeField?.fields.find(
(existingField) => 'name' in existingField && existingField.name === 'url',
) as GroupField
return {
...existingSizeField,
name: size.name,
fields: [
{
...(existingSizeURLField || {}),
...baseURLField,
},
],
type: 'group',
}
}),
type: 'group',
}
fields.push(sizesField)
}
// If prefix is enabled, save it to db
if (typeof prefix !== 'undefined') {
let existingPrefixFieldIndex = -1
const existingPrefixField = fields.find((existingField, i) => {
if ('name' in existingField && existingField.name === 'prefix') {
existingPrefixFieldIndex = i
return true
}
return false
}) as TextField
if (existingPrefixFieldIndex > -1) {
fields.splice(existingPrefixFieldIndex, 1)
}
fields.push({
...basePrefixField,
...(existingPrefixField || {}),
defaultValue: path.posix.join(prefix),
})
}
return fields
}

View File

@@ -0,0 +1,41 @@
import type { Config } from 'payload/config'
import type { PluginOptions } from '../types'
import { getFields } from './fields/getFields'
// This is the admin plugin cloud-storage stubfile.
// It only extends the config that are required by the admin UI.
export const cloudStorage =
(pluginOptions: PluginOptions) =>
(incomingConfig: Config): Config => {
const { collections: allCollectionOptions, enabled } = pluginOptions
const config = { ...incomingConfig }
// Return early if disabled. Only webpack config mods are applied.
if (enabled === false) {
return config
}
return {
...config,
collections: (config.collections || []).map((existingCollection) => {
const options = allCollectionOptions[existingCollection.slug]
if (options?.adapter) {
const fields = getFields({
collection: existingCollection,
prefix: options.prefix,
})
return {
...existingCollection,
fields,
}
}
return existingCollection
}),
}
}

View File

@@ -0,0 +1,158 @@
import type { GroupField, TextField } from 'payload/dist/fields/config/types'
import type { CollectionConfig, Field } from 'payload/types'
import path from 'path'
import type { GenerateFileURL, GeneratedAdapter } from '../types'
import { getAfterReadHook } from '../hooks/afterRead'
interface Args {
adapter: GeneratedAdapter
collection: CollectionConfig
disablePayloadAccessControl?: true
generateFileURL?: GenerateFileURL
prefix?: string
}
export const getFields = ({
adapter,
collection,
disablePayloadAccessControl,
generateFileURL,
prefix,
}: Args): Field[] => {
const baseURLField: Field = {
name: 'url',
admin: {
hidden: true,
readOnly: true,
},
label: 'URL',
type: 'text',
}
const basePrefixField: Field = {
name: 'prefix',
admin: {
hidden: true,
readOnly: true,
},
type: 'text',
}
const fields = [...collection.fields]
// Inject a hook into all URL fields to generate URLs
let existingURLFieldIndex = -1
const existingURLField = fields.find((existingField, i) => {
if ('name' in existingField && existingField.name === 'url') {
existingURLFieldIndex = i
return true
}
return false
}) as TextField
if (existingURLFieldIndex > -1) {
fields.splice(existingURLFieldIndex, 1)
}
fields.push({
...baseURLField,
...(existingURLField || {}),
hooks: {
afterRead: [
getAfterReadHook({ adapter, collection, disablePayloadAccessControl, generateFileURL }),
...(existingURLField?.hooks?.afterRead || []),
],
},
})
if (typeof collection.upload === 'object' && collection.upload.imageSizes) {
let existingSizesFieldIndex = -1
const existingSizesField = fields.find((existingField, i) => {
if ('name' in existingField && existingField.name === 'sizes') {
existingSizesFieldIndex = i
return true
}
return false
}) as GroupField
if (existingSizesFieldIndex > -1) {
fields.splice(existingSizesFieldIndex, 1)
}
const sizesField: Field = {
...(existingSizesField || {}),
name: 'sizes',
admin: {
hidden: true,
},
fields: collection.upload.imageSizes.map((size) => {
const existingSizeField = existingSizesField?.fields.find(
(existingField) => 'name' in existingField && existingField.name === size.name,
) as GroupField
const existingSizeURLField = existingSizeField?.fields.find(
(existingField) => 'name' in existingField && existingField.name === 'url',
) as GroupField
return {
...existingSizeField,
name: size.name,
fields: [
{
...(existingSizeURLField || {}),
...baseURLField,
hooks: {
afterRead: [
getAfterReadHook({
adapter,
collection,
disablePayloadAccessControl,
generateFileURL,
size,
}),
...(existingSizeURLField?.hooks?.afterRead || []),
],
},
},
],
type: 'group',
}
}),
type: 'group',
}
fields.push(sizesField)
}
// If prefix is enabled, save it to db
if (typeof prefix !== 'undefined') {
let existingPrefixFieldIndex = -1
const existingPrefixField = fields.find((existingField, i) => {
if ('name' in existingField && existingField.name === 'prefix') {
existingPrefixFieldIndex = i
return true
}
return false
}) as TextField
if (existingPrefixFieldIndex > -1) {
fields.splice(existingPrefixFieldIndex, 1)
}
fields.push({
...basePrefixField,
...(existingPrefixField || {}),
defaultValue: path.posix.join(prefix),
})
}
return fields
}

View File

@@ -0,0 +1,36 @@
import type { TypeWithID } from 'payload/dist/globals/config/types'
import type { FileData } from 'payload/dist/uploads/types'
import type { CollectionAfterDeleteHook, CollectionConfig } from 'payload/types'
import type { GeneratedAdapter, TypeWithPrefix } from '../types'
interface Args {
adapter: GeneratedAdapter
collection: CollectionConfig
}
export const getAfterDeleteHook = ({
adapter,
collection,
}: Args): CollectionAfterDeleteHook<FileData & TypeWithID & TypeWithPrefix> => {
return async ({ doc, req }) => {
try {
const filesToDelete: string[] = [
doc.filename,
...Object.values(doc?.sizes || []).map((resizedFileData) => resizedFileData?.filename),
]
const promises = filesToDelete.map(async (filename) => {
if (filename) await adapter.handleDelete({ collection, doc, filename, req })
})
await Promise.all(promises)
} catch (err: unknown) {
req.payload.logger.error(
`There was an error while deleting files corresponding to the ${collection.labels?.singular} with ID ${doc.id}:`,
)
req.payload.logger.error(err)
}
return doc
}
}

View File

@@ -0,0 +1,39 @@
import type { ImageSize } from 'payload/dist/uploads/types'
import type { CollectionConfig, FieldHook } from 'payload/types'
import type { GenerateFileURL, GeneratedAdapter } from '../types'
interface Args {
adapter: GeneratedAdapter
collection: CollectionConfig
disablePayloadAccessControl?: boolean
generateFileURL?: GenerateFileURL
size?: ImageSize
}
export const getAfterReadHook =
({ adapter, collection, disablePayloadAccessControl, generateFileURL, size }: Args): FieldHook =>
async ({ data, value }) => {
const filename = size ? data?.sizes?.[size.name]?.filename : data?.filename
const prefix = data?.prefix
let url = value
if (disablePayloadAccessControl && filename) {
url = await adapter.generateURL({
collection,
filename,
prefix,
})
}
if (generateFileURL) {
url = await generateFileURL({
collection,
filename,
prefix,
size,
})
}
return url
}

View File

@@ -0,0 +1,61 @@
import type { TypeWithID } from 'payload/dist/collections/config/types'
import type { FileData } from 'payload/dist/uploads/types'
import type { CollectionBeforeChangeHook, CollectionConfig } from 'payload/types'
import type { GeneratedAdapter } from '../types'
import { getIncomingFiles } from '../utilities/getIncomingFiles'
interface Args {
adapter: GeneratedAdapter
collection: CollectionConfig
}
export const getBeforeChangeHook =
({ adapter, collection }: Args): CollectionBeforeChangeHook<FileData & TypeWithID> =>
async ({ data, originalDoc, req }) => {
try {
const files = getIncomingFiles({ data, req })
if (files.length > 0) {
// If there is an original doc,
// And we have new files,
// We need to delete the old files before uploading new
if (originalDoc) {
let filesToDelete: string[] = []
if (typeof originalDoc?.filename === 'string') {
filesToDelete.push(originalDoc.filename)
}
if (typeof originalDoc.sizes === 'object') {
filesToDelete = filesToDelete.concat(
Object.values(originalDoc?.sizes || []).map(
(resizedFileData) => resizedFileData?.filename,
),
)
}
const deletionPromises = filesToDelete.map(async (filename) => {
if (filename) {
await adapter.handleDelete({ collection, doc: originalDoc, filename, req })
}
})
await Promise.all(deletionPromises)
}
const promises = files.map(async (file) => {
await adapter.handleUpload({ collection, data, file, req })
})
await Promise.all(promises)
}
} catch (err: unknown) {
req.payload.logger.error(
`There was an error while uploading files corresponding to the collection ${collection.slug} with filename ${data.filename}:`,
)
req.payload.logger.error(err)
}
return data
}

View File

@@ -0,0 +1 @@
export { cloudStorage } from './plugin'

View File

@@ -0,0 +1,103 @@
import type { Config } from 'payload/config'
import type { PluginOptions } from './types'
import { getFields } from './fields/getFields'
import { getAfterDeleteHook } from './hooks/afterDelete'
import { getBeforeChangeHook } from './hooks/beforeChange'
import { extendWebpackConfig } from './webpack'
// This plugin extends all targeted collections by offloading uploaded files
// to cloud storage instead of solely storing files locally.
// It is based on an adapter approach, where adapters can be written for any cloud provider.
// Adapters are responsible for providing four actions that this plugin will use:
// 1. handleUpload, 2. handleDelete, 3. generateURL, 4. staticHandler
// Optionally, the adapter can specify any Webpack config overrides if they are necessary.
export const cloudStorage =
(pluginOptions: PluginOptions) =>
(incomingConfig: Config): Config => {
const { collections: allCollectionOptions, enabled } = pluginOptions
const config = { ...incomingConfig }
const webpack = extendWebpackConfig({ config: incomingConfig, options: pluginOptions })
config.admin = {
...(config.admin || {}),
webpack,
}
// Return early if disabled. Only webpack config mods are applied.
if (enabled === false) {
return config
}
const initFunctions: Array<() => void> = []
return {
...config,
collections: (config.collections || []).map((existingCollection) => {
const options = allCollectionOptions[existingCollection.slug]
if (options?.adapter) {
const adapter = options.adapter({
collection: existingCollection,
prefix: options.prefix,
})
if (adapter.onInit) initFunctions.push(adapter.onInit)
const fields = getFields({
adapter,
collection: existingCollection,
disablePayloadAccessControl: options.disablePayloadAccessControl,
generateFileURL: options.generateFileURL,
prefix: options.prefix,
})
const handlers = [
...(typeof existingCollection.upload === 'object' &&
Array.isArray(existingCollection.upload.handlers)
? existingCollection.upload.handlers
: []),
]
if (!options.disablePayloadAccessControl) {
handlers.push(adapter.staticHandler)
}
return {
...existingCollection,
fields,
hooks: {
...(existingCollection.hooks || {}),
afterDelete: [
...(existingCollection.hooks?.afterDelete || []),
getAfterDeleteHook({ adapter, collection: existingCollection }),
],
beforeChange: [
...(existingCollection.hooks?.beforeChange || []),
getBeforeChangeHook({ adapter, collection: existingCollection }),
],
},
upload: {
...(typeof existingCollection.upload === 'object' ? existingCollection.upload : {}),
disableLocalStorage:
typeof options.disableLocalStorage === 'boolean'
? options.disableLocalStorage
: true,
handlers,
},
}
}
return existingCollection
}),
onInit: async (payload) => {
initFunctions.forEach((fn) => fn())
if (config.onInit) await config.onInit(payload)
},
}
}

View File

@@ -0,0 +1,79 @@
import type { NextFunction, Response } from 'express'
import type { TypeWithID } from 'payload/dist/collections/config/types'
import type { FileData, ImageSize } from 'payload/dist/uploads/types'
import type { CollectionConfig, PayloadRequest } from 'payload/types'
import type { Configuration as WebpackConfig } from 'webpack'
export interface File {
buffer: Buffer
filename: string
filesize: number
mimeType: string
tempFilePath?: string
}
export type HandleUpload = (args: {
collection: CollectionConfig
data: any
file: File
req: PayloadRequest
}) => Promise<void> | void
export interface TypeWithPrefix {
prefix?: string
}
export type HandleDelete = (args: {
collection: CollectionConfig
doc: TypeWithID & FileData & TypeWithPrefix
filename: string
req: PayloadRequest
}) => Promise<void> | void
export type GenerateURL = (args: {
collection: CollectionConfig
filename: string
prefix?: string
}) => Promise<string> | string
export type StaticHandler = (
req: PayloadRequest,
res: Response,
next: NextFunction,
) => Promise<unknown> | unknown
export interface GeneratedAdapter {
generateURL: GenerateURL
handleDelete: HandleDelete
handleUpload: HandleUpload
onInit?: () => void
staticHandler: StaticHandler
webpack?: (config: WebpackConfig) => WebpackConfig
}
export type Adapter = (args: { collection: CollectionConfig; prefix?: string }) => GeneratedAdapter
export type GenerateFileURL = (args: {
collection: CollectionConfig
filename: string
prefix?: string
size?: ImageSize
}) => Promise<string> | string
export interface CollectionOptions {
adapter: Adapter | null
disableLocalStorage?: boolean
disablePayloadAccessControl?: true
generateFileURL?: GenerateFileURL
prefix?: string
}
export interface PluginOptions {
collections: Record<string, CollectionOptions>
/**
* Whether or not to enable the plugin
*
* Default: true
*/
enabled?: boolean
}

View File

@@ -0,0 +1,26 @@
import type { IncomingUploadType } from 'payload/dist/uploads/types'
import type { CollectionConfig, PayloadRequest } from 'payload/types'
export async function getFilePrefix({
collection,
req,
}: {
collection: CollectionConfig
req: PayloadRequest
}): Promise<string> {
const imageSizes = (collection?.upload as IncomingUploadType)?.imageSizes || []
const files = await req.payload.find({
collection: collection.slug,
where: {
or: [
{
filename: { equals: req.params.filename },
},
...imageSizes.map((imageSize) => ({
[`sizes.${imageSize.name}.filename`]: { equals: req.params.filename },
})),
],
},
})
return files?.docs?.[0]?.prefix || ''
}

View File

@@ -0,0 +1,45 @@
import type { FileData } from 'payload/dist/uploads/types'
import type { PayloadRequest } from 'payload/types'
import type { File } from '../types'
export function getIncomingFiles({
data,
req,
}: {
data: Partial<FileData>
req: PayloadRequest
}): File[] {
const file = req.files?.file
let files: File[] = []
if (file && data.filename && data.mimeType) {
const mainFile: File = {
buffer: file.data,
filename: data.filename,
filesize: file.size,
mimeType: data.mimeType,
tempFilePath: file.tempFilePath,
}
files = [mainFile]
if (data?.sizes) {
Object.entries(data.sizes).forEach(([key, resizedFileData]) => {
if (req.payloadUploadSizes?.[key] && data.mimeType) {
files = files.concat([
{
buffer: req.payloadUploadSizes[key],
filename: `${resizedFileData.filename}`,
filesize: req.payloadUploadSizes[key].length,
mimeType: data.mimeType,
},
])
}
})
}
}
return files
}

View File

@@ -0,0 +1,28 @@
import type { BlockBlobClient } from '@azure/storage-blob'
import parseRange from 'range-parser'
const getRangeFromHeader = async (
blockBlobClient: BlockBlobClient,
rangeHeader?: string,
): Promise<{ end: number | undefined; start: number }> => {
const fullRange = { end: undefined, start: 0 }
if (!rangeHeader) {
return fullRange
}
const size = await blockBlobClient.getProperties().then((props) => props.contentLength)
if (size === undefined) {
return fullRange
}
const range = parseRange(size, rangeHeader)
if (range === -1 || range === -2 || range.type !== 'bytes' || range.length !== 1) {
return fullRange
}
return range[0]
}
export default getRangeFromHeader

View File

@@ -0,0 +1,50 @@
import type { Config } from 'payload/config'
import type { Configuration as WebpackConfig } from 'webpack'
import path from 'path'
import type { GeneratedAdapter, PluginOptions } from './types'
interface Args {
config: Config
options: PluginOptions
}
export const extendWebpackConfig =
({ config, options }: Args): ((webpackConfig: WebpackConfig) => WebpackConfig) =>
(webpackConfig) => {
const existingWebpackConfig =
typeof config.admin?.webpack === 'function'
? config.admin.webpack(webpackConfig)
: webpackConfig
const newConfig: WebpackConfig = {
...existingWebpackConfig,
resolve: {
...(existingWebpackConfig.resolve || {}),
alias: {
...(existingWebpackConfig.resolve?.alias ? existingWebpackConfig.resolve.alias : {}),
'@payloadcms/plugin-cloud-storage$': path.resolve(__dirname, './admin/index.js'),
},
},
}
return Object.entries(options.collections).reduce(
(resultingWebpackConfig, [slug, collectionOptions]) => {
const matchedCollection = config.collections?.find((coll) => coll.slug === slug)
if (matchedCollection && typeof collectionOptions.adapter === 'function') {
const adapter: GeneratedAdapter = collectionOptions.adapter({
collection: matchedCollection,
})
if (adapter.webpack) {
return adapter.webpack(resultingWebpackConfig)
}
}
return resultingWebpackConfig
},
newConfig,
)
}

View File

@@ -0,0 +1,24 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"composite": true, // Make sure typescript knows that this module depends on their references
"noEmit": false /* Do not emit outputs. */,
"emitDeclarationOnly": true,
"outDir": "./dist" /* Specify an output folder for all emitted files. */,
"rootDir": "./src" /* Specify the root folder within your source files. */
},
"exclude": [
"dist",
"build",
"tests",
"test",
"node_modules",
".eslintrc.js",
"src/**/*.spec.js",
"src/**/*.spec.jsx",
"src/**/*.spec.ts",
"src/**/*.spec.tsx"
],
"include": ["src/**/*.ts", "src/**/*.tsx", "src/**/*.d.ts", "src/**/*.json"],
"references": [{ "path": "../payload" }]
}

825
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff