Compare commits

..

19 Commits

Author SHA1 Message Date
Elliot DeNolf
c73113565a 1.1.0-beta.1 2023-10-06 09:41:41 -04:00
James
ad924c8d7b Merge branch 'master' of github.com:payloadcms/plugin-cloud-storage 2023-10-06 09:02:15 -04:00
James
bb6956cd32 chore: plugin alias specificity 2023-10-06 09:02:08 -04:00
Elliot DeNolf
5c97d05acf 1.1.0-beta.0 2023-10-05 14:15:02 -04:00
James
6bad383a95 chore: working local dev aliases 2023-10-05 13:28:22 -04:00
James
4b726eb139 chore: improves alias pattern 2023-10-05 12:35:14 -04:00
Elliot DeNolf
3d5858ca6d 1.0.19 2023-06-26 14:23:23 -04:00
James Mikrut
5456695728 Merge pull request #62 from lucyawrey/individual-prefixes
Add support for 'prefix' field on individual uploads, takes priority over global prefix field
2023-06-26 14:22:04 -04:00
Lucy Awrey
b79da5920a Add support for 'prefix' field on individual uploads, takes priority over global prefix field
squashed a few commits testing different approaches to this
2023-06-26 13:18:36 -04:00
Elliot DeNolf
a0a92952eb 1.0.18 2023-06-23 17:13:57 -04:00
Elliot DeNolf
a687dfdb16 docs: arg comments and dev readme formatting 2023-06-23 17:13:10 -04:00
Elliot DeNolf
b9d7e82052 chore: add region aws adapter example in readme 2023-06-23 14:42:15 -04:00
TomDo1234
fb10af8365 docs: mention usage with AWS EC2 IAM Role (#30)
Specified that you don't need to provide any credentials when using a correct IAM Role. IAM Roles are recommended by AWS over direct credentials due to superior security.
2023-06-23 11:40:08 -04:00
Seth Syberg
53427443a7 fix: allow empty config on s3 adapter (#60) 2023-06-23 11:39:04 -04:00
vvvctr
62fae5520d chore: fix email link (#44)
Fix mail link in readme.
2023-06-23 11:33:23 -04:00
DireWolf707
8b186dbf83 docs: add default value for GCS_CREDENTIALS (#48)
without default value, it gives error in payload admin page (in console of browser)

caught SyntaxError: "undefined" is not valid JSON
    at JSON.parse (<anonymous>)
    at ./src/payload.config.ts 

as envs are not availabe in payload admin GCS_CREDENTIALS gives undefined
resulting JSON.parse(undefined) raises this error
2023-06-23 11:32:02 -04:00
Elliot DeNolf
4ede3384f0 1.0.17 2023-06-12 10:54:34 -04:00
James
297e7f8c1d 1.0.16 2023-04-27 16:38:21 -04:00
James
5c9a01aa1c 1.0.15 2023-04-27 16:37:34 -04:00
21 changed files with 222 additions and 54 deletions

View File

@@ -122,7 +122,8 @@ const adapter = s3Adapter({
credentials: {
accessKeyId: process.env.S3_ACCESS_KEY_ID,
secretAccessKey: process.env.S3_SECRET_ACCESS_KEY,
}
},
region: process.env.S3_REGION,
// ... Other S3 configuration
},
bucket: process.env.S3_BUCKET,
@@ -130,6 +131,7 @@ const adapter = s3Adapter({
// Now you can pass this adapter to the plugin
```
Note that the credentials option does not have to be used when you are using PayloadCMS on an EC2 instance that has been configured with an IAM Role with necessary permissions.
Other S3 Client configuration is documented [here](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/clients/client-s3/interfaces/s3clientconfig.html).
@@ -165,7 +167,7 @@ const adapter = gcsAdapter({
// you can choose any method for authentication, and authorization which is being provided by `@google-cloud/storage`
keyFilename: './gcs-credentials.json',
//OR
credentials: JSON.parse(process.env.GCS_CREDENTIALS) // this env variable will have stringify version of your credentials.json file
credentials: JSON.parse(process.env.GCS_CREDENTIALS || "{}") // this env variable will have stringify version of your credentials.json file
},
bucket: process.env.GCS_BUCKET,
})
@@ -189,7 +191,7 @@ For instructions regarding how to develop with this plugin locally, [click here]
## Questions
Please contact [Payload](dev@payloadcms.com) with any questions about using this plugin.
Please contact [Payload](mailto:dev@payloadcms.com) with any questions about using this plugin.
## Credit

View File

@@ -69,10 +69,22 @@ export default buildConfig({
alias: {
...(config.resolve.alias || {}),
react: path.resolve(__dirname, '../node_modules/react'),
'@azure/storage-blob': path.resolve(__dirname, '../../src/adapters/azure/mock.js'),
'@aws-sdk/client-s3': path.resolve(__dirname, '../../src/adapters/s3/mock.js'),
'@google-cloud/storage': path.resolve(__dirname, '../../src/adapters/gcs/mock.js'),
fs: path.resolve(__dirname, '../../src/adapters/s3/fsMock.js'),
[path.resolve(__dirname, '../../src/index')]: path.resolve(
__dirname,
'../../src/admin/index.ts',
),
[path.resolve(__dirname, '../../src/adapters/s3/index')]: path.resolve(
__dirname,
'../../src/adapters/s3/mock.js',
),
[path.resolve(__dirname, '../../src/adapters/gcs/index')]: path.resolve(
__dirname,
'../../src/adapters/gcs/mock.js',
),
[path.resolve(__dirname, '../../src/adapters/azure/index')]: path.resolve(
__dirname,
'../../src/adapters/azure/mock.js',
),
},
},
}

View File

@@ -8,7 +8,7 @@ This repository includes a local development environment for local testing and d
1. `cd` into `./dev` and run `cp .env.example .env` to create an `.env` file
1. Open your newly created `./dev/.env` file and _completely_ fill out each property
### Azure Adapter Development
## Azure Adapter Development
This repository comes with a Docker emulator for Azure Blob Storage.
@@ -20,7 +20,7 @@ Otherwise, if you are not using the emulator, make sure your environment variabl
Finally, to start the Payload dev server with the Azure adapter, run `yarn dev:azure` and then open `http://localhost:3000/admin` in your browser.
### S3 Adapter Development
## S3 Adapter Development
This repository also includes a Docker LocalStack emulator for S3. It requires a few more steps to get up and running.
@@ -34,7 +34,7 @@ To use the S3 emulator, use the following steps:
Finally, you can run `yarn dev:s3` and then open `http://localhost:3000/admin` in your browser.
### Google Cloud Storage(GCS) Adapter Development
## Google Cloud Storage (GCS) Adapter Development
This repository comes with a Docker emulator for Google Cloud Storage.

View File

@@ -1,7 +1,7 @@
{
"name": "@payloadcms/plugin-cloud-storage",
"description": "The official cloud storage plugin for Payload CMS",
"version": "1.0.17-beta.1",
"version": "1.1.0-beta.1",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"license": "MIT",

View File

@@ -1 +0,0 @@
export default 'file-stub'

View File

@@ -15,8 +15,10 @@ interface Args {
const multipartThreshold = 1024 * 1024 * 50 // 50MB
export const getHandleUpload = ({ getStorageClient, prefix = '' }: Args): HandleUpload => {
return async ({ data, file }) => {
const fileKey = path.posix.join(data.prefix || prefix, file.filename)
const blockBlobClient = getStorageClient().getBlockBlobClient(
path.posix.join(prefix, file.filename),
fileKey,
)
// when there are no temp files, or the upload is less than the threshold size, do not stream files

View File

@@ -1,13 +1 @@
exports.BlobServiceClient = {
fromConnectionString: () => ({
getContainerClient: () => ({
createIfNotExists: () => null,
}),
}),
}
exports.AbortController = {
timeout: () => null,
}
exports.Readable = { from: () => null }
export const azureBlobStorageAdapter = () => ({})

View File

@@ -12,10 +12,7 @@ export const extendWebpackConfig = (existingWebpackConfig: WebpackConfig): Webpa
},
alias: {
...(existingWebpackConfig.resolve?.alias ? existingWebpackConfig.resolve.alias : {}),
stream: path.resolve(__dirname, './mock.js'),
'@azure/storage-blob': path.resolve(__dirname, './mock.js'),
'@azure/abort-controller': path.resolve(__dirname, './mock.js'),
fs: path.resolve(__dirname, './fileStub.js'),
'@payloadcms/plugin-cloud-storage/azure': path.resolve(__dirname, './mock.js'),
},
},
}

View File

@@ -18,7 +18,9 @@ export const getHandleUpload = ({
prefix = '',
}: Args): HandleUpload => {
return async ({ data, file }) => {
const gcsFile = getStorageClient().bucket(bucket).file(path.posix.join(prefix, file.filename))
const fileKey = path.posix.join(data.prefix || prefix, file.filename)
const gcsFile = getStorageClient().bucket(bucket).file(fileKey)
await gcsFile.save(file.buffer, {
metadata: {
contentType: file.mimeType,

View File

@@ -1,4 +1,5 @@
import { Storage, StorageOptions } from '@google-cloud/storage'
import type { StorageOptions } from '@google-cloud/storage'
import { Storage } from '@google-cloud/storage'
import type { Adapter, GeneratedAdapter } from '../../types'
import { getGenerateURL } from './generateURL'
import { getHandler } from './staticHandler'
@@ -16,9 +17,11 @@ export const gcsAdapter =
({ options, bucket, acl }: Args): Adapter =>
({ collection, prefix }): GeneratedAdapter => {
let storageClient: Storage | null = null
const getStorageClient = () => {
const getStorageClient = (): Storage => {
if (storageClient) return storageClient
return (storageClient = new Storage(options))
storageClient = new Storage(options)
return storageClient
}
return {

View File

@@ -1,3 +1 @@
exports.Storage = function () {
return null
}
export const gcsAdapter = () => ({})

View File

@@ -1 +0,0 @@
export default 'file-stub'

View File

@@ -1 +0,0 @@
module.exports = { fs: { createReadStream: () => null } }

View File

@@ -24,7 +24,7 @@ export const getHandleUpload = ({
prefix = '',
}: Args): HandleUpload => {
return async ({ data, file }) => {
const fileKey = path.posix.join(prefix, file.filename)
const fileKey = path.posix.join(data.prefix || prefix, file.filename)
const fileBufferOrStream: Buffer | stream.Readable = file.tempFilePath
? fs.createReadStream(file.tempFilePath)

View File

@@ -7,13 +7,23 @@ import { getHandleUpload } from './handleUpload'
import { extendWebpackConfig } from './webpack'
export interface Args {
/**
* AWS S3 client configuration. Highly dependent on your AWS setup.
*
* [AWS.S3ClientConfig Docs](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/clients/client-s3/interfaces/s3clientconfig.html)
*/
config: AWS.S3ClientConfig
/**
* Bucket name to upload files to.
*
* Must follow [AWS S3 bucket naming conventions](https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html).
*/
bucket: string
acl?: 'private' | 'public-read'
}
export const s3Adapter =
({ config, bucket, acl }: Args): Adapter =>
({ config = {}, bucket, acl }: Args): Adapter =>
({ collection, prefix }): GeneratedAdapter => {
let storageClient: AWS.S3 | null = null
const getStorageClient: () => AWS.S3 = () => {

View File

@@ -1,9 +1 @@
exports.S3 = () => null
exports.Upload = () => null
exports.HeadObjectCommand = () => null
exports.PutObjectCommand = () => null
exports.UploadPartCommand = () => null
exports.CreateMultipartUploadCommand = () => null
exports.CompleteMultipartUploadCommand = () => null
exports.PutObjectTaggingCommand = () => null
export const s3Adapter = () => ({})

View File

@@ -12,9 +12,7 @@ export const extendWebpackConfig = (existingWebpackConfig: WebpackConfig): Webpa
},
alias: {
...(existingWebpackConfig.resolve?.alias ? existingWebpackConfig.resolve.alias : {}),
'@aws-sdk/client-s3': path.resolve(__dirname, './mock.js'),
'@aws-sdk/lib-storage': path.resolve(__dirname, './mock.js'),
fs: path.resolve(__dirname, './fsMock.js'),
'@payloadcms/plugin-cloud-storage/s3': path.resolve(__dirname, './mock.js'),
},
},
}

View File

@@ -0,0 +1,126 @@
import path from 'path'
import type { GroupField, TextField } from 'payload/dist/fields/config/types'
import type { CollectionConfig, Field } from 'payload/types'
interface Args {
collection: CollectionConfig
prefix?: string
}
export const getFields = ({ collection, prefix }: Args): Field[] => {
const baseURLField: Field = {
name: 'url',
label: 'URL',
type: 'text',
admin: {
readOnly: true,
hidden: true,
},
}
const basePrefixField: Field = {
name: 'prefix',
type: 'text',
admin: {
readOnly: true,
hidden: true,
},
}
const fields = [...collection.fields]
// Inject a hook into all URL fields to generate URLs
let existingURLFieldIndex = -1
const existingURLField = fields.find((existingField, i) => {
if ('name' in existingField && existingField.name === 'url') {
existingURLFieldIndex = i
return true
}
return false
}) as TextField
if (existingURLFieldIndex > -1) {
fields.splice(existingURLFieldIndex, 1)
}
fields.push({
...baseURLField,
...(existingURLField || {}),
})
if (typeof collection.upload === 'object' && collection.upload.imageSizes) {
let existingSizesFieldIndex = -1
const existingSizesField = fields.find((existingField, i) => {
if ('name' in existingField && existingField.name === 'sizes') {
existingSizesFieldIndex = i
return true
}
return false
}) as GroupField
if (existingSizesFieldIndex > -1) {
fields.splice(existingSizesFieldIndex, 1)
}
const sizesField: Field = {
...(existingSizesField || {}),
name: 'sizes',
type: 'group',
admin: {
hidden: true,
},
fields: collection.upload.imageSizes.map(size => {
const existingSizeField = existingSizesField?.fields.find(
existingField => 'name' in existingField && existingField.name === size.name,
) as GroupField
const existingSizeURLField = existingSizeField?.fields.find(
existingField => 'name' in existingField && existingField.name === 'url',
) as GroupField
return {
...existingSizeField,
name: size.name,
type: 'group',
fields: [
{
...(existingSizeURLField || {}),
...baseURLField,
},
],
}
}),
}
fields.push(sizesField)
}
// If prefix is enabled, save it to db
if (typeof prefix !== 'undefined') {
let existingPrefixFieldIndex = -1
const existingPrefixField = fields.find((existingField, i) => {
if ('name' in existingField && existingField.name === 'prefix') {
existingPrefixFieldIndex = i
return true
}
return false
}) as TextField
if (existingPrefixFieldIndex > -1) {
fields.splice(existingPrefixFieldIndex, 1)
}
fields.push({
...basePrefixField,
...(existingPrefixField || {}),
defaultValue: path.posix.join(prefix),
})
}
return fields
}

39
src/admin/index.ts Normal file
View File

@@ -0,0 +1,39 @@
import type { Config } from 'payload/config'
import type { PluginOptions } from '../types'
import { getFields } from './fields/getFields'
// This is the admin plugin cloud-storage stubfile.
// It only extends the config that are required by the admin UI.
export const cloudStorage =
(pluginOptions: PluginOptions) =>
(incomingConfig: Config): Config => {
const { collections: allCollectionOptions, enabled } = pluginOptions
const config = { ...incomingConfig }
// Return early if disabled. Only webpack config mods are applied.
if (enabled === false) {
return config
}
return {
...config,
collections: (config.collections || []).map(existingCollection => {
const options = allCollectionOptions[existingCollection.slug]
if (options?.adapter) {
const fields = getFields({
collection: existingCollection,
prefix: options.prefix,
})
return {
...existingCollection,
fields,
}
}
return existingCollection
}),
}
}

View File

@@ -129,7 +129,7 @@ export const getFields = ({
}
// If prefix is enabled, save it to db
if (prefix) {
if (typeof prefix !== 'undefined') {
let existingPrefixFieldIndex = -1
const existingPrefixField = fields.find((existingField, i) => {

View File

@@ -1,3 +1,4 @@
import path from 'path'
import type { Config } from 'payload/config'
import type { Configuration as WebpackConfig } from 'webpack'
import type { GeneratedAdapter, PluginOptions } from './types'
@@ -21,6 +22,7 @@ export const extendWebpackConfig =
...(existingWebpackConfig.resolve || {}),
alias: {
...(existingWebpackConfig.resolve?.alias ? existingWebpackConfig.resolve.alias : {}),
'@payloadcms/plugin-cloud-storage$': path.resolve(__dirname, './admin/index.js'),
},
},
}