build: move larger scripts into tools dir in workspace (#10653)

Having the `scripts` dir re-use all packages from the top-level was
getting quite unwieldy. Created new `tools` directory that is part of
the workspace. Packages are exported with the `@tools` package
namespace.
This commit is contained in:
Elliot DeNolf
2025-01-20 11:34:51 -05:00
committed by GitHub
parent ef4b8d9b00
commit f18ca9cc2b
32 changed files with 448 additions and 262 deletions

View File

@@ -1,84 +0,0 @@
import chalk from 'chalk'
import { exec as execOrig, execSync } from 'child_process'
import fs from 'fs/promises'
import { fileURLToPath } from 'node:url'
import path from 'path'
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
main().catch((error) => {
console.error(error)
process.exit(1)
})
async function main() {
const templateDir = path.resolve(dirname, '../templates')
const templateName = process.argv[2]
const templatePath = path.join(templateDir, templateName)
const databaseConnection = process.argv[3] || 'mongodb://127.0.0.1/your-database-name'
console.log({
templatePath,
databaseConnection,
})
const execOpts = {
cwd: templatePath,
stdio: 'inherit' as const,
}
const allFiles = await fs.readdir(templatePath, { withFileTypes: true })
const allTgzs = allFiles
.filter((file) => file.isFile())
.map((file) => file.name)
.filter((file) => file.endsWith('.tgz'))
console.log({
allTgzs,
})
execSync('pnpm add ./*.tgz --ignore-workspace', execOpts)
execSync('pnpm install --ignore-workspace', execOpts)
const packageJsonPath = path.join(templatePath, 'package.json')
const packageJson = await fs.readFile(packageJsonPath, 'utf-8')
const packageJsonObj = JSON.parse(packageJson) as {
dependencies: Record<string, string>
pnpm?: { overrides: Record<string, string> }
}
// Get key/value pairs for any package that starts with '@payloadcms'
const payloadValues =
packageJsonObj.dependencies &&
Object.entries(packageJsonObj.dependencies).filter(
([key, value]) => key.startsWith('@payloadcms') || key === 'payload',
)
// Add each package to the overrides
const overrides = packageJsonObj.pnpm?.overrides || {}
payloadValues.forEach(([key, value]) => {
overrides[key] = value
})
// Write package.json back to disk
packageJsonObj.pnpm = { overrides }
await fs.writeFile(packageJsonPath, JSON.stringify(packageJsonObj, null, 2))
execSync('pnpm install --ignore-workspace --no-frozen-lockfile', execOpts)
await fs.writeFile(
path.resolve(templatePath, '.env'),
// Populate POSTGRES_URL just in case it's needed
`PAYLOAD_SECRET=secret
DATABASE_URI=${databaseConnection}
POSTGRES_URL=${databaseConnection}
BLOB_READ_WRITE_TOKEN=vercel_blob_rw_TEST_asdf`,
)
execSync('pnpm run build', execOpts)
header(`\n🎉 Done!`)
}
function header(message: string, opts?: { enable?: boolean }) {
console.log(chalk.bold.green(`${message}\n`))
}

View File

@@ -1,381 +0,0 @@
/**
* This script generates variations of the templates into the `templates` directory.
*
* How to use:
*
* pnpm run script:gen-templates
*
* NOTE: You will likely have to commit by using the `--no-verify` flag to avoid the repo linting
* There is no way currently to have lint-staged ignore the templates directory.
*/
import chalk from 'chalk'
import { execSync } from 'child_process'
import { configurePayloadConfig } from 'create-payload-app/lib/configure-payload-config.js'
import { copyRecursiveSync } from 'create-payload-app/utils/copy-recursive-sync.js'
import minimist from 'minimist'
import * as fs from 'node:fs/promises'
import { fileURLToPath } from 'node:url'
import path from 'path'
import type { DbType, StorageAdapterType } from '../packages/create-payload-app/src/types.js'
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
type TemplateVariations = {
/** package.json name */
name: string
/** Base template to copy from */
base?: string
/** Directory in templates dir */
dirname: string
db: DbType
storage: StorageAdapterType
sharp: boolean
vercelDeployButtonLink?: string
envNames?: {
dbUri: string
}
/**
* @default false
*/
skipReadme?: boolean
skipConfig?: boolean
/**
* @default false
*/
skipDockerCompose?: boolean
configureConfig?: boolean
generateLockfile?: boolean
}
main().catch((error) => {
console.error(error)
process.exit(1)
})
async function main() {
const args = minimist(process.argv.slice(2))
const template = args['template'] // template directory name
const templatesDir = path.resolve(dirname, '../templates')
const templateRepoUrlBase = `https://github.com/payloadcms/payload/tree/main/templates`
let variations: TemplateVariations[] = [
{
name: 'payload-vercel-postgres-template',
dirname: 'with-vercel-postgres',
db: 'vercel-postgres',
storage: 'vercelBlobStorage',
sharp: false,
vercelDeployButtonLink:
`https://vercel.com/new/clone?repository-url=` +
encodeURI(
`${templateRepoUrlBase}/with-vercel-postgres` +
'&project-name=payload-project' +
'&env=PAYLOAD_SECRET' +
'&build-command=pnpm run ci' +
'&stores=[{"type":"postgres"},{"type":"blob"}]', // Postgres and Vercel Blob Storage
),
envNames: {
// This will replace the process.env.DATABASE_URI to process.env.POSTGRES_URL
dbUri: 'POSTGRES_URL',
},
},
{
name: 'payload-vercel-website-template',
base: 'website', // This is the base template to copy from
dirname: 'with-vercel-website',
db: 'vercel-postgres',
storage: 'vercelBlobStorage',
sharp: true,
vercelDeployButtonLink:
`https://vercel.com/new/clone?repository-url=` +
encodeURI(
`${templateRepoUrlBase}/with-vercel-website` +
'&project-name=payload-project' +
'&env=PAYLOAD_SECRET%2CCRON_SECRET' +
'&build-command=pnpm run ci' +
'&stores=[{"type":"postgres"},{"type":"blob"}]', // Postgres and Vercel Blob Storage
),
envNames: {
// This will replace the process.env.DATABASE_URI to process.env.POSTGRES_URL
dbUri: 'POSTGRES_URL',
},
skipReadme: true,
skipDockerCompose: true,
},
{
name: 'payload-postgres-template',
dirname: 'with-postgres',
db: 'postgres',
storage: 'localDisk',
sharp: true,
},
{
name: 'payload-vercel-mongodb-template',
dirname: 'with-vercel-mongodb',
db: 'mongodb',
storage: 'vercelBlobStorage',
sharp: false,
vercelDeployButtonLink:
`https://vercel.com/new/clone?repository-url=` +
encodeURI(
`${templateRepoUrlBase}/with-vercel-mongodb` +
'&project-name=payload-project' +
'&env=PAYLOAD_SECRET' +
'&build-command=pnpm run ci' +
'&stores=[{"type":"blob"}]' + // Vercel Blob Storage
'&integration-ids=oac_jnzmjqM10gllKmSrG0SGrHOH', // MongoDB Atlas
),
envNames: {
dbUri: 'MONGODB_URI',
},
},
{
name: 'blank',
dirname: 'blank',
db: 'mongodb',
generateLockfile: true,
storage: 'localDisk',
sharp: true,
skipConfig: true, // Do not copy the payload.config.ts file from the base template
// The blank template is used as a base for create-payload-app functionality,
// so we do not configure the payload.config.ts file, which leaves the placeholder comments.
configureConfig: false,
},
]
// If template is set, only generate that template
if (template) {
const variation = variations.find((v) => v.dirname === template)
if (!variation) {
throw new Error(`Variation not found: ${template}`)
}
variations = [variation]
}
for (const {
name,
base,
dirname,
db,
generateLockfile,
storage,
vercelDeployButtonLink,
envNames,
sharp,
configureConfig,
skipReadme = false,
skipConfig = false,
skipDockerCompose = false,
} of variations) {
header(`Generating ${name}...`)
const destDir = path.join(templatesDir, dirname)
copyRecursiveSync(path.join(templatesDir, base || '_template'), destDir, [
'node_modules',
'\\*\\.tgz',
'.next',
'.env$',
'pnpm-lock.yaml',
...(skipReadme ? ['README.md'] : []),
...(skipDockerCompose ? ['docker-compose.yml'] : []),
...(skipConfig ? ['payload.config.ts'] : []),
])
log(`Copied to ${destDir}`)
if (configureConfig !== false) {
log('Configuring payload.config.ts')
const configureArgs = {
dbType: db,
packageJsonName: name,
projectDirOrConfigPath: { projectDir: destDir },
storageAdapter: storage,
sharp,
envNames,
}
await configurePayloadConfig(configureArgs)
log('Configuring .env.example')
// Replace DATABASE_URI with the correct env name if set
await writeEnvExample({
destDir,
envNames,
dbType: db,
})
}
if (!skipReadme) {
await generateReadme({
destDir,
data: {
name,
description: name, // TODO: Add descriptions
attributes: { db, storage },
...(vercelDeployButtonLink && { vercelDeployButtonLink }),
},
})
}
if (generateLockfile) {
log('Generating pnpm-lock.yaml')
execSyncSafe(`pnpm install --ignore-workspace`, { cwd: destDir })
} else {
log('Installing dependencies without generating lockfile')
execSyncSafe(`pnpm install --ignore-workspace`, { cwd: destDir })
await fs.rm(`${destDir}/pnpm-lock.yaml`, { force: true })
}
// Copy in initial migration if db is postgres. This contains user and media.
if (db === 'postgres' || db === 'vercel-postgres') {
// Add "ci" script to package.json
const packageJsonPath = path.join(destDir, 'package.json')
const packageJson = JSON.parse(await fs.readFile(packageJsonPath, 'utf8'))
packageJson.scripts = packageJson.scripts || {}
packageJson.scripts.ci = 'payload migrate && pnpm build'
await fs.writeFile(packageJsonPath, JSON.stringify(packageJson, null, 2))
const migrationDestDir = path.join(destDir, 'src/migrations')
// Delete and recreate migrations directory
await fs.rm(migrationDestDir, { recursive: true, force: true })
await fs.mkdir(migrationDestDir, { recursive: true })
log(`Generating initial migrations in ${migrationDestDir}`)
execSyncSafe(`pnpm run payload migrate:create initial`, {
cwd: destDir,
env: {
...process.env,
PAYLOAD_SECRET: 'asecretsolongnotevensantacouldguessit',
BLOB_READ_WRITE_TOKEN: 'vercel_blob_rw_TEST_asdf',
DATABASE_URI: process.env.POSTGRES_URL || 'postgres://localhost:5432/your-database-name',
},
})
}
// TODO: Email?
// TODO: Sharp?
log(`Done configuring payload config for ${destDir}/src/payload.config.ts`)
}
// TODO: Run prettier manually on the generated files, husky blows up
log('Running prettier on generated files...')
execSyncSafe(`pnpm prettier --write templates "*.{js,jsx,ts,tsx}"`)
log('Template generation complete!')
}
async function generateReadme({
destDir,
data: { name, description, attributes, vercelDeployButtonLink },
}: {
destDir: string
data: {
name: string
description: string
attributes: Pick<TemplateVariations, 'db' | 'storage'>
vercelDeployButtonLink?: string
}
}) {
let header = `# ${name}\n`
if (vercelDeployButtonLink) {
header += `\n[![Deploy with Vercel](https://vercel.com/button)](${vercelDeployButtonLink})`
}
const readmeContent = `${header}
${description}
## Attributes
- **Database**: ${attributes.db}
- **Storage Adapter**: ${attributes.storage}
`
const readmePath = path.join(destDir, 'README.md')
await fs.writeFile(readmePath, readmeContent)
log('Generated README.md')
}
async function writeEnvExample({
destDir,
envNames,
dbType,
}: {
destDir: string
envNames?: TemplateVariations['envNames']
dbType: DbType
}) {
const envExamplePath = path.join(destDir, '.env.example')
const envFileContents = await fs.readFile(envExamplePath, 'utf8')
const fileContents = envFileContents
.split('\n')
.filter((l) => {
// Remove the unwanted PostgreSQL connection comment for "with-vercel-website"
if (
dbType === 'vercel-postgres' &&
(l.startsWith('# Or use a PG connection string') ||
l.startsWith('#DATABASE_URI=postgresql://'))
) {
return false // Skip this line
}
return true // Keep other lines
})
.map((l) => {
if (l.startsWith('DATABASE_URI')) {
if (dbType === 'mongodb') {
l = 'MONGODB_URI=mongodb://127.0.0.1/your-database-name'
}
// Use db-appropriate connection string
if (dbType.includes('postgres')) {
l = 'DATABASE_URI=postgresql://127.0.0.1:5432/your-database-name'
}
// Replace DATABASE_URI with the correct env name if set
if (envNames?.dbUri) {
l = l.replace('DATABASE_URI', envNames.dbUri)
}
}
return l
})
.filter((l) => l.trim() !== '')
.join('\n')
console.log(`Writing to ${envExamplePath}`)
await fs.writeFile(envExamplePath, fileContents)
}
function header(message: string) {
console.log(chalk.bold.green(`\n${message}\n`))
}
function log(message: string) {
console.log(chalk.dim(message))
}
function execSyncSafe(command: string, options?: Parameters<typeof execSync>[1]) {
try {
console.log(`Executing: ${command}`)
execSync(command, { stdio: 'inherit', ...options })
} catch (error) {
if (error instanceof Error) {
const stderr = (error as any).stderr?.toString()
const stdout = (error as any).stdout?.toString()
if (stderr && stderr.trim()) {
console.error('Standard Error:', stderr)
} else if (stdout && stdout.trim()) {
console.error('Standard Output (likely contains error details):', stdout)
} else {
console.error('An unknown error occurred with no output.')
}
} else {
console.error('An unexpected error occurred:', error)
}
throw error
}
}

View File

@@ -1,53 +0,0 @@
import fse from 'fs-extra'
import globby from 'globby'
import path, { dirname } from 'path'
import { fileURLToPath } from 'url'
const __filename = fileURLToPath(import.meta.url)
const __dirname = path.dirname(__filename)
const projectRoot = path.resolve(__dirname, '../../')
export type PackageDetails = {
/** Name in package.json / npm registry */
name: string
/** Full path to package relative to project root */
packagePath: `packages/${string}`
/** Short name is the directory name */
shortName: string
/** Version in package.json */
version: string
}
/**
* Accepts package whitelist (directory names inside packages dir) and returns details for each package
*/
export const getPackageDetails = async (packages: string[]): Promise<PackageDetails[]> => {
// Fetch all package.json files, filter out packages not in the whitelist
const packageJsons = await globby('packages/*/package.json', {
cwd: projectRoot,
absolute: true,
})
const packageDetails = await Promise.all(
packageJsons.map(async (packageJsonPath) => {
const packageJson = await fse.readJson(packageJsonPath)
const isPublic = packageJson.private !== true
if (!isPublic) return null
const isInWhitelist = packages
? packages.includes(path.basename(path.dirname(packageJsonPath)))
: true
if (!isInWhitelist) return null
return {
name: packageJson.name as string,
packagePath: path.relative(projectRoot, dirname(packageJsonPath)),
shortName: path.dirname(packageJsonPath),
version: packageJson.version,
} as PackageDetails
}),
)
return packageDetails.filter((p): p is Exclude<typeof p, null> => p !== null)
}

View File

@@ -1,34 +0,0 @@
import chalk from 'chalk'
import pLimit from 'p-limit'
import { getPackageDetails } from './getPackageDetails.js'
import { packagePublishList } from './publishList.js'
const npmRequestLimit = pLimit(40)
export const getPackageRegistryVersions = async (): Promise<void> => {
const packageDetails = await getPackageDetails(packagePublishList)
const results = await Promise.all(
packageDetails.map(async (pkg) =>
npmRequestLimit(async () => {
// Get published version from npm
const json = await fetch(`https://registry.npmjs.org/${pkg.name}`).then((res) => res.json())
const { latest = 'N/A', beta = 'N/A', canary = 'N/A' } = json['dist-tags'] ?? {}
const msg = `${pkg.name.padEnd(36)}${latest?.padEnd(16)}${beta?.padEnd(16)}${canary}`
return msg
}),
),
)
const header = chalk.bold.green(
'Package Versions'.padEnd(36) + 'Latest'.padEnd(16) + 'Beta'.padEnd(16) + 'Canary',
)
console.log(header)
console.log()
console.log(results.sort().join('\n'))
}
if (import.meta.url === new URL(import.meta.url).href) {
await getPackageRegistryVersions()
}

View File

@@ -1,231 +0,0 @@
import type { ReleaseType } from 'semver'
import { execSync } from 'child_process'
import execa from 'execa'
import fse from 'fs-extra'
import { fileURLToPath } from 'node:url'
import pLimit from 'p-limit'
import path from 'path'
import semver from 'semver'
import { getPackageDetails } from './getPackageDetails.js'
import { packagePublishList } from './publishList.js'
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
const projectRoot = path.resolve(dirname, '../../')
const rootPackageJsonPath = path.resolve(projectRoot, 'package.json')
const npmPublishLimit = pLimit(5)
const cwd = path.resolve(dirname, '../../')
const execaOpts: execa.Options = { stdio: 'inherit' }
type PackageDetails = {
/** Name in package.json / npm registry */
name: string
/** Full path to package relative to project root */
packagePath: `packages/${string}`
/** Short name is the directory name */
shortName: string
/** Version in package.json */
version: string
}
type PackageReleaseType = 'canary' | ReleaseType
type PublishResult = {
name: string
success: boolean
details?: string
}
type PublishOpts = {
dryRun?: boolean
tag?: 'beta' | 'canary' | 'latest'
}
type Workspace = {
version: () => Promise<string>
tag: string
packages: PackageDetails[]
showVersions: () => Promise<void>
bumpVersion: (type: PackageReleaseType) => Promise<void>
build: () => Promise<void>
publish: (opts: PublishOpts) => Promise<void>
publishSync: (opts: PublishOpts) => Promise<void>
}
export const getWorkspace = async () => {
const build = async () => {
await execa('pnpm', ['install'], execaOpts)
const buildResult = await execa('pnpm', ['build:all', '--output-logs=errors-only'], execaOpts)
if (buildResult.exitCode !== 0) {
console.error('Build failed')
console.log(buildResult.stderr)
throw new Error('Build failed')
}
}
// Publish one package at a time
const publishSync: Workspace['publishSync'] = async ({ dryRun, tag = 'canary' }) => {
const packageDetails = await getPackageDetails(packagePublishList)
const results: PublishResult[] = []
for (const pkg of packageDetails) {
const res = await publishSinglePackage(pkg, { dryRun, tag })
results.push(res)
}
console.log(`\n\nResults:\n`)
console.log(
results
.map((result) => {
if (!result.success) {
console.error(result.details)
return `${result.name}`
}
return `${result.name}`
})
.join('\n') + '\n',
)
}
const publish = async () => {
const packageDetails = await getPackageDetails(packagePublishList)
const results = await Promise.allSettled(
packageDetails.map((pkg) => publishPackageThrottled(pkg, { dryRun: true })),
)
console.log(`\n\nResults:\n`)
console.log(
results
.map((result) => {
if (result.status === 'rejected') {
console.error(result.reason)
return `${String(result.reason)}`
}
const { name, success, details } = result.value
let summary = ` ${success ? '✅' : '❌'} ${name}`
if (details) {
summary += `\n ${details}\n`
}
return summary
})
.join('\n') + '\n',
)
}
const showVersions = async () => {
const { packages, version } = await getCurrentPackageState()
console.log(`\n Version: ${version}\n`)
console.log(` Changes (${packages.length} packages):\n`)
console.log(`${packages.map((p) => ` - ${p.name.padEnd(32)} ${p.version}`).join('\n')}\n`)
}
const setVersion = async (version: string) => {
const rootPackageJson = await fse.readJSON(rootPackageJsonPath)
rootPackageJson.version = version
await fse.writeJSON(rootPackageJsonPath, rootPackageJson, { spaces: 2 })
const packageJsons = await getPackageDetails(packagePublishList)
await Promise.all(
packageJsons.map(async (pkg) => {
const packageJson = await fse.readJSON(`${pkg.packagePath}/package.json`)
packageJson.version = version
await fse.writeJSON(`${pkg.packagePath}/package.json`, packageJson, { spaces: 2 })
}),
)
}
const bumpVersion = async (bumpType: PackageReleaseType) => {
const { version: monorepoVersion, packages: packageDetails } = await getCurrentPackageState()
let nextReleaseVersion
if (bumpType === 'canary') {
const hash = execSync('git rev-parse --short HEAD', { encoding: 'utf8' }).trim().slice(0, 7)
nextReleaseVersion = semver.inc(monorepoVersion, 'minor') + `-canary.${hash}`
} else {
nextReleaseVersion = semver.inc(monorepoVersion, bumpType)
}
console.log(`\n Version: ${monorepoVersion} => ${nextReleaseVersion}\n`)
console.log(` Bump: ${bumpType}`)
console.log(` Changes (${packageDetails.length} packages):\n`)
console.log(
`${packageDetails.map((p) => ` - ${p.name.padEnd(32)} ${p.version} => ${nextReleaseVersion}`).join('\n')}\n`,
)
await setVersion(nextReleaseVersion)
}
const workspace: Workspace = {
version: async () => (await fse.readJSON(rootPackageJsonPath)).version,
tag: 'latest',
packages: await getPackageDetails(packagePublishList),
showVersions,
bumpVersion,
build,
publish,
publishSync,
}
return workspace
}
async function getCurrentPackageState(): Promise<{
packages: PackageDetails[]
version: string
}> {
const packageDetails = await getPackageDetails(packagePublishList)
const rootPackageJson = await fse.readJSON(rootPackageJsonPath)
return { packages: packageDetails, version: rootPackageJson.version }
}
/** Publish with promise concurrency throttling */
async function publishPackageThrottled(pkg: PackageDetails, opts?: { dryRun?: boolean }) {
const { dryRun = true } = opts ?? {}
return npmPublishLimit(() => publishSinglePackage(pkg, { dryRun }))
}
async function publishSinglePackage(pkg: PackageDetails, opts: PublishOpts) {
console.log(`🚀 ${pkg.name} publishing...`)
const { dryRun, tag = 'canary' } = opts
try {
const cmdArgs = ['publish', '-C', pkg.packagePath, '--no-git-checks', '--tag', tag]
if (dryRun) {
cmdArgs.push('--dry-run')
}
const { exitCode, stderr } = await execa('pnpm', cmdArgs, {
cwd,
// stdio: ['ignore', 'ignore', 'pipe'],
stdio: 'inherit',
})
if (exitCode !== 0) {
console.log(`\n\n❌ ${pkg.name} ERROR: pnpm publish failed\n\n${stderr}`)
return {
name: pkg.name,
success: false,
details: `Exit Code: ${exitCode}, stderr: ${stderr}`,
}
}
console.log(`${pkg.name} published`)
return { name: pkg.name, success: true }
} catch (err: unknown) {
console.error(err)
return {
name: pkg.name,
success: false,
details:
err instanceof Error
? `Error publishing ${pkg.name}: ${err.message}`
: `Unexpected error publishing ${pkg.name}: ${String(err)}`,
}
}
}

View File

@@ -1,56 +0,0 @@
/**
* Packages that should be published to NPM
*
* Note that this does not include all packages in the monorepo
*/
export const packagePublishList = [
'payload',
'translations',
'ui',
'next',
'graphql',
'live-preview',
'live-preview-react',
'live-preview-vue',
'richtext-slate',
'richtext-lexical',
'create-payload-app',
// DB Adapters
'drizzle',
'db-mongodb',
'db-postgres',
'db-sqlite',
'db-vercel-postgres',
// Adapters
'email-nodemailer',
'email-resend',
// Storage
'storage-s3',
'storage-azure',
'storage-gcs',
'storage-vercel-blob',
'storage-uploadthing',
// Plugins
'payload-cloud',
'plugin-cloud',
'plugin-cloud-storage',
'plugin-form-builder',
// 'plugin-multi-tenant',
'plugin-nested-docs',
'plugin-redirects',
'plugin-search',
'plugin-sentry',
'plugin-seo',
'plugin-stripe',
// Unpublished
// 'storage-uploadthing',
// 'eslint-config',
// 'eslint-plugin',
// 'live-preview-vue',
]

View File

@@ -1,100 +0,0 @@
import type { ExecSyncOptions } from 'child_process'
import type execa from 'execa'
import chalk from 'chalk'
import { exec as execOrig, execSync } from 'child_process'
import fse from 'fs-extra'
import minimist from 'minimist'
import { fileURLToPath } from 'node:url'
import path from 'path'
import util from 'util'
import type { PackageDetails } from './lib/getPackageDetails.js'
import { getPackageDetails } from './lib/getPackageDetails.js'
const execOpts: ExecSyncOptions = { stdio: 'inherit' }
const execaOpts: execa.Options = { stdio: 'inherit' }
const __filename = fileURLToPath(import.meta.url)
const __dirname = path.dirname(__filename)
const exec = util.promisify(execOrig)
main().catch((error) => {
console.error(error)
process.exit(1)
})
async function main() {
const all = process.argv.includes('--all')
process.argv = process.argv.filter((arg) => arg !== '--all')
const noBuild = process.argv.includes('--no-build')
process.argv = process.argv.filter((arg) => arg !== '--no-build')
const args = minimist(process.argv.slice(2))
const { dest } = args
if (!dest) {
throw new Error('--dest is required')
}
const resolvedDest = path.resolve(dest)
const packageWhitelist = all
? null
: [
'payload',
'db-mongodb',
'db-postgres',
'db-sqlite',
'db-vercel-postgres',
'drizzle',
'graphql',
'live-preview-react',
'next',
'payload-cloud',
'plugin-cloud',
'plugin-form-builder',
'plugin-nested-docs',
'plugin-redirects',
'plugin-search',
'plugin-seo',
'richtext-lexical',
'translations',
'ui',
]
const packageDetails = await getPackageDetails(packageWhitelist)
// Prebuild all packages
header(`\n🔨 Prebuilding all packages...`)
const filtered = packageDetails.filter((p): p is Exclude<typeof p, null> => p !== null)
if (!noBuild) {
execSync('pnpm build:all --output-logs=errors-only', { stdio: 'inherit' })
}
header(`\nOutputting ${filtered.length} packages...
${chalk.white.bold(listPackages(filtered))}`)
header(`\n📦 Packing all packages to ${dest}...`)
await Promise.all(
filtered.map(async (p) => {
await exec(`pnpm pack -C ${p.packagePath} --pack-destination ${resolvedDest}`)
}),
)
header(`\n🎉 Done!`)
}
function header(message: string, opts?: { enable?: boolean }) {
console.log(chalk.bold.green(`${message}\n`))
}
function listPackages(packages: PackageDetails[]) {
return packages.map((p) => ` - ${p.name}`).join('\n')
}

View File

@@ -1,58 +0,0 @@
import type { ExecSyncOptions } from 'child_process'
import type execa from 'execa'
import chalk from 'chalk'
import minimist from 'minimist'
import { fileURLToPath } from 'node:url'
import pLimit from 'p-limit'
import path from 'path'
import { getWorkspace } from './lib/getWorkspace.js'
const npmPublishLimit = pLimit(5)
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
const cwd = path.resolve(dirname, '..')
const execOpts: ExecSyncOptions = { stdio: 'inherit' }
const execaOpts: execa.Options = { stdio: 'inherit' }
const args = minimist(process.argv.slice(2))
// const {
// bump = 'patch', // Semver release type
// changelog = false, // Whether to update the changelog. WARNING: This gets throttled on too many commits
// 'dry-run': dryRun,
// 'git-tag': gitTag = true, // Whether to run git tag and commit operations
// 'git-commit': gitCommit = true, // Whether to run git commit operations
// tag = 'latest',
// } = args
const dryRun = true
async function main() {
const workspace = await getWorkspace()
await workspace.bumpVersion('canary')
await workspace.build()
await workspace.publishSync({ dryRun: false, tag: 'canary' })
header('🎉 Done!')
}
main().catch((error) => {
console.error(error)
process.exit(1)
})
function abort(message = 'Abort', exitCode = 1) {
console.error(chalk.bold.red(`\n${message}\n`))
process.exit(exitCode)
}
function header(message: string, opts?: { enable?: boolean }) {
const { enable } = opts ?? {}
if (!enable) return
console.log(chalk.bold.green(`${message}\n`))
}

View File

@@ -1,376 +0,0 @@
/**
* Usage: GITHUB_TOKEN=$GITHUB_TOKEN pnpm release --bump <minor|patch>
*
* Ensure your GITHUB_TOKEN is set in your environment variables
* and also has the ability to create releases in the repository.
*/
import type { ExecSyncOptions } from 'child_process'
import chalk from 'chalk'
import { loadChangelogConfig } from 'changelogen'
import { execSync } from 'child_process'
import execa from 'execa'
import fse from 'fs-extra'
import minimist from 'minimist'
import { fileURLToPath } from 'node:url'
import path from 'path'
import prompts from 'prompts'
import semver from 'semver'
import type { PackageDetails } from './lib/getPackageDetails.js'
import { getPackageDetails } from './lib/getPackageDetails.js'
import { packagePublishList } from './lib/publishList.js'
import { createDraftGitHubRelease } from './utils/createDraftGitHubRelease.js'
import { generateReleaseNotes } from './utils/generateReleaseNotes.js'
import { getRecommendedBump } from './utils/getRecommendedBump.js'
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
const cwd = path.resolve(dirname, '..')
const execOpts: ExecSyncOptions = { stdio: 'inherit' }
const execaOpts: execa.Options = { stdio: 'inherit' }
const args = minimist(process.argv.slice(2))
const {
bump, // Semver release type: major, minor, patch, premajor, preminor, prepatch, prerelease
changelog = false, // Whether to update the changelog. WARNING: This gets throttled on too many commits
'dry-run': dryRun,
'git-tag': gitTag = true, // Whether to run git tag and commit operations
'git-commit': gitCommit = true, // Whether to run git commit operations
tag, // Tag to publish to: latest, beta, canary
} = args
const logPrefix = dryRun ? chalk.bold.magenta('[dry-run] >') : ''
const cmdRunner =
(dryRun: boolean, gitTag: boolean) => (cmd: string, execOpts: ExecSyncOptions) => {
const isGitCommand = cmd.startsWith('git')
if (dryRun || (isGitCommand && !gitTag)) {
console.log(logPrefix, cmd)
} else {
execSync(cmd, execOpts)
}
}
const cmdRunnerAsync =
(dryRun: boolean) => async (cmd: string, args: string[], options?: execa.Options) => {
if (dryRun) {
console.log(logPrefix, cmd, args.join(' '))
return { exitCode: 0 }
} else {
return await execa(cmd, args, options ?? { stdio: 'inherit' })
}
}
async function main() {
if (!process.env.GITHUB_TOKEN) {
throw new Error('GITHUB_TOKEN env var is required')
}
if (dryRun) {
console.log(chalk.bold.yellow(chalk.bold.magenta('\n 👀 Dry run mode enabled')))
}
console.log({ args })
const fromVersion = execSync('git describe --match "v*" --tags --abbrev=0').toString().trim()
const config = await loadChangelogConfig(process.cwd(), {
repo: 'payloadcms/payload',
})
if (!semver.RELEASE_TYPES.includes(bump)) {
abort(`Invalid bump type: ${bump}.\n\nMust be one of: ${semver.RELEASE_TYPES.join(', ')}`)
}
const recommendedBump = (await getRecommendedBump(fromVersion, 'HEAD', config)) || 'patch'
if (bump !== recommendedBump) {
console.log(
chalk.bold.yellow(
`Recommended bump type is '${recommendedBump}' based on commits since last release`,
),
)
const confirmBump = await confirm(`Do you want to continue with bump: '${bump}'?`)
if (!confirmBump) {
abort()
}
}
const runCmd = cmdRunner(dryRun, gitTag)
const runCmdAsync = cmdRunnerAsync(dryRun)
if (bump.startsWith('pre') && tag === 'latest') {
abort(`Prerelease bumps must have tag: beta or canary`)
}
const monorepoVersion = fse.readJSONSync('package.json')?.version
if (!monorepoVersion) {
throw new Error('Could not find version in package.json')
}
const nextReleaseVersion = semver.inc(monorepoVersion, bump, undefined, tag)
if (!nextReleaseVersion) {
abort(`Invalid nextReleaseVersion: ${nextReleaseVersion}`)
return // For TS type checking
}
// Preview/Update changelog
header(`${logPrefix}📝 Updating changelog...`)
const {
changelog: changelogContent,
releaseUrl: prefilledReleaseUrl,
releaseNotes,
} = await generateReleaseNotes({
bump,
dryRun,
toVersion: 'HEAD',
fromVersion,
openReleaseUrl: true,
})
console.log(chalk.green('\nFull Release Notes:\n\n'))
console.log(chalk.gray(releaseNotes) + '\n\n')
console.log(`\n\nRelease URL: ${chalk.dim(prefilledReleaseUrl)}`)
let packageDetails = await getPackageDetails(packagePublishList)
console.log(chalk.bold(`\n Version: ${monorepoVersion} => ${chalk.green(nextReleaseVersion)}\n`))
console.log(chalk.bold.yellow(` Bump: ${bump}`))
console.log(chalk.bold.yellow(` Tag: ${tag}\n`))
console.log(chalk.bold.green(` Changes (${packageDetails.length} packages):\n`))
console.log(
`${packageDetails.map((p) => ` - ${p.name.padEnd(32)} ${p.version} => ${chalk.green(nextReleaseVersion)}`).join('\n')}\n`,
)
const confirmPublish = await confirm('Are you sure you want to create these versions?')
if (!confirmPublish) {
abort()
}
// Prebuild all packages
header(`\n🔨 Prebuilding all packages...`)
await execa('pnpm', ['install'], execaOpts)
const buildResult = await execa('pnpm', ['build:all', '--output-logs=errors-only'], execaOpts)
if (buildResult.exitCode !== 0) {
console.error(chalk.bold.red('Build failed'))
console.log(buildResult.stderr)
abort('Build failed')
}
// Increment all package versions
header(`${logPrefix}📦 Updating package.json versions...`)
await Promise.all(
packageDetails.map(async (pkg) => {
const packageJson = await fse.readJSON(`${pkg.packagePath}/package.json`)
packageJson.version = nextReleaseVersion
if (!dryRun) {
await fse.writeJSON(`${pkg.packagePath}/package.json`, packageJson, { spaces: 2 })
}
}),
)
// Set version in root package.json
header(`${logPrefix}📦 Updating root package.json...`)
const rootPackageJsonPath = path.resolve(dirname, '../package.json')
const rootPackageJson = await fse.readJSON(rootPackageJsonPath)
rootPackageJson.version = nextReleaseVersion
if (!dryRun) {
await fse.writeJSON(rootPackageJsonPath, rootPackageJson, { spaces: 2 })
}
// Commit
header(`🧑‍💻 Committing changes...`)
// Commit all staged changes
runCmd(`git add packages/**/package.json package.json`, execOpts)
// Wait 500ms to avoid .git/index.lock errors
await new Promise((resolve) => setTimeout(resolve, 500))
if (gitCommit) {
runCmd(`git commit -m "chore(release): v${nextReleaseVersion} [skip ci]"`, execOpts)
}
// Tag
header(`🏷️ Tagging release v${nextReleaseVersion}`, { enable: gitTag })
runCmd(`git tag -a v${nextReleaseVersion} -m "v${nextReleaseVersion}"`, execOpts)
// Publish only payload to get 5 min auth token
packageDetails = packageDetails.filter((p) => p.name !== 'payload')
runCmd(`pnpm publish -C packages/payload --no-git-checks --json --tag ${tag}`, execOpts)
const results: PublishResult[] = []
for (const pkg of packageDetails) {
const res = await publishSinglePackage(pkg, { dryRun })
results.push(res)
}
console.log(chalk.bold.green(`\n\nResults:\n`))
console.log(
results
.map((result) => {
if (!result.success) {
console.error(result.details)
}
return ` ${result.success ? '✅' : '❌'} ${result.name}`
})
.join('\n') + '\n',
)
header(`🚀 Publishing complete!`)
const pushTags = await confirm('Push commit and tags to remote?')
if (pushTags) {
runCmd(`git push --follow-tags`, execOpts)
console.log(chalk.bold.green('Commit and tags pushed to remote'))
}
const createDraftRelease = await confirm('Create draft release on GitHub?')
if (createDraftRelease) {
try {
const { releaseUrl: draftReleaseUrl } = await createDraftGitHubRelease({
branch: 'main',
tag: `v${nextReleaseVersion}`,
releaseNotes,
})
console.log(chalk.bold.green(`Draft release created on GitHub: ${draftReleaseUrl}`))
} catch (error) {
console.log(chalk.bold.red('\nFull Release Notes:\n\n'))
console.log(chalk.gray(releaseNotes) + '\n\n')
console.log(`\n\nRelease URL: ${chalk.dim(prefilledReleaseUrl)}`)
console.log(chalk.bold.red(`Error creating draft release on GitHub: ${error.message}`))
console.log(
chalk.bold.red(
`Use the above link to create the release manually and optionally add the release notes.`,
),
)
}
}
header('🎉 Done!')
}
main().catch((error) => {
console.error(error)
process.exit(1)
})
async function publishSinglePackage(pkg: PackageDetails, opts?: { dryRun?: boolean }) {
const { dryRun = false } = opts ?? {}
console.log(chalk.bold(`🚀 ${pkg.name} publishing...`))
try {
const cmdArgs = ['publish', '-C', pkg.packagePath, '--no-git-checks', '--json', '--tag', tag]
if (dryRun) {
cmdArgs.push('--dry-run')
}
const { exitCode, stderr } = await execa('pnpm', cmdArgs, {
cwd,
stdio: ['ignore', 'ignore', 'pipe'],
// stdio: 'inherit',
})
if (exitCode !== 0) {
console.log(chalk.bold.red(`\n\n❌ ${pkg.name} ERROR: pnpm publish failed\n\n${stderr}`))
// Retry publish
console.log(chalk.bold.yellow(`\nRetrying publish for ${pkg.name}...`))
const { exitCode: retryExitCode, stderr: retryStdError } = await execa('pnpm', cmdArgs, {
cwd,
stdio: 'inherit', // log full output
})
if (retryExitCode !== 0) {
console.error(
chalk.bold.red(
`\n\n❌ ${pkg.name} ERROR: pnpm publish failed on retry\n\n${retryStdError}`,
),
)
}
return {
name: pkg.name,
success: false,
details: `Exit Code: ${retryExitCode}, stderr: ${retryStdError}`,
}
}
console.log(`${logPrefix} ${chalk.green(`${pkg.name} published`)}`)
return { name: pkg.name, success: true }
} catch (err: unknown) {
console.error(err)
return {
name: pkg.name,
success: false,
details:
err instanceof Error
? `Error publishing ${pkg.name}: ${err.message}`
: `Unexpected error publishing ${pkg.name}: ${JSON.stringify(err)}`,
}
}
}
function abort(message = 'Abort', exitCode = 1) {
console.error(chalk.bold.red(`\n${message}\n`))
process.exit(exitCode)
}
async function confirm(message: string): Promise<boolean> {
const { confirm } = await prompts(
{
name: 'confirm',
type: 'confirm',
initial: false,
message,
},
{
onCancel: () => {
abort()
},
},
)
return confirm
}
async function question(message: string): Promise<string> {
const { value } = await prompts(
{
name: 'value',
type: 'text',
message,
},
{
onCancel: () => {
abort()
},
},
)
return value
}
function header(message: string, opts?: { enable?: boolean }) {
const { enable } = opts ?? {}
if (!enable) {
return
}
console.log(chalk.bold.green(`${message}\n`))
}
type PublishResult = {
name: string
success: boolean
details?: string
}

View File

@@ -1,37 +0,0 @@
type Args = {
branch: string
tag: string
releaseNotes: string
}
export const createDraftGitHubRelease = async ({
branch,
tag,
releaseNotes,
}: Args): Promise<{ releaseUrl: string }> => {
// https://docs.github.com/en/rest/releases/releases?apiVersion=2022-11-28#create-a-release
const res = await fetch(`https://api.github.com/repos/payloadcms/payload/releases`, {
headers: {
Accept: 'application/vnd.github.v3+json',
Authorization: `token ${process.env.GITHUB_TOKEN}`,
},
method: 'POST',
body: JSON.stringify({
tag_name: tag,
target_commitish: branch,
name: tag,
body: releaseNotes,
draft: true,
prerelease: false,
generate_release_notes: false,
}),
})
if (!res.ok) {
throw new Error(`Failed to create release: ${await res.text()}`)
}
const resBody = await res.json()
return { releaseUrl: resBody.html_url }
}

View File

@@ -1,346 +0,0 @@
import type { GitCommit } from 'changelogen'
import { execSync } from 'child_process'
import fse from 'fs-extra'
import minimist from 'minimist'
import open from 'open'
import semver from 'semver'
import { getLatestCommits } from './getLatestCommits.js'
import { getRecommendedBump } from './getRecommendedBump.js'
type Args = {
fromVersion?: string
toVersion?: string
bump?: 'major' | 'minor' | 'patch' | 'prerelease'
dryRun?: boolean
openReleaseUrl?: boolean
}
type ChangelogResult = {
/**
* URL to open releases/new with the changelog pre-filled
*/
releaseUrl: string
/**
* The changelog content, does not include contributors
*/
changelog: string
/**
* The release notes, includes contributors. This is the content used for the releaseUrl
*/
releaseNotes: string
/**
* The release tag, includes prefix 'v'
*/
releaseTag: string
}
export const generateReleaseNotes = async (args: Args = {}): Promise<ChangelogResult> => {
const { toVersion = 'HEAD', dryRun, bump, openReleaseUrl } = args
const fromVersion =
args.fromVersion || execSync('git describe --match "v*" --tags --abbrev=0').toString().trim()
const tag = fromVersion.match(/-(\w+)\.\d+$/)?.[1] || 'latest'
const recommendedBump =
tag !== 'latest' ? 'prerelease' : await getRecommendedBump(fromVersion, toVersion)
if (bump && bump !== recommendedBump) {
console.log(`WARNING: Recommended bump is '${recommendedBump}', but you specified '${bump}'`)
}
const calculatedBump = bump || recommendedBump
if (!calculatedBump) {
throw new Error('Could not determine bump type')
}
const proposedReleaseVersion = 'v' + semver.inc(fromVersion, calculatedBump, undefined, tag)
console.log(`Generating release notes for ${fromVersion} to ${toVersion}...`)
console.log({
tag,
recommendedBump,
fromVersion,
toVersion,
proposedVersion: proposedReleaseVersion,
})
const conventionalCommits = await getLatestCommits(fromVersion, toVersion)
const commitTypesForChangelog = [
'feat',
'fix',
'perf',
'refactor',
'docs',
'style',
'test',
'templates',
'examples',
'build',
'ci',
'chore',
'breaking',
] as const
type Sections = (typeof commitTypesForChangelog)[number]
const emojiHeaderMap: Record<Sections, string> = {
feat: '🚀 Features',
fix: '🐛 Bug Fixes',
perf: '⚡ Performance',
refactor: '🛠 Refactors',
docs: '📚 Documentation',
style: '🎨 Styles',
test: '🧪 Tests',
templates: '📝 Templates',
examples: '📓 Examples',
build: '🔨 Build',
ci: '⚙️ CI',
chore: '🏡 Chores',
breaking: '⚠️ BREAKING CHANGES',
}
const sections = conventionalCommits.reduce(
(sections, c) => {
if (c.isBreaking) {
sections.breaking.push(c)
}
if (commitTypesForChangelog.includes(c.type as Sections)) {
if (!sections[c.type]) {
sections[c.type] = []
}
sections[c.type].push(c)
}
return sections
},
{} as Record<'breaking' | Sections, GitCommit[]>,
)
// Sort commits by scope, unscoped first
Object.values(sections).forEach((section) => {
section.sort((a, b) => (a.scope || '').localeCompare(b.scope || ''))
})
const stringifiedSections = Object.fromEntries(
Object.entries(sections).map(([key, commits]) => [
key,
commits.map((commit) => formatCommitForChangelog(commit, key === 'breaking')),
]),
)
// Fetch commits for fromVersion to toVersion
const contributors = await createContributorSection(conventionalCommits)
const yyyyMMdd = new Date().toISOString().split('T')[0]
// Might need to swap out HEAD for the new proposed version
let changelog = `## [${proposedReleaseVersion}](https://github.com/payloadcms/payload/compare/${fromVersion}...${proposedReleaseVersion}) (${yyyyMMdd})\n\n\n`
for (const section of commitTypesForChangelog) {
if (stringifiedSections[section]?.length) {
changelog += `### ${emojiHeaderMap[section]}\n\n${stringifiedSections[section].join('\n')}\n\n`
}
}
// Add contributors after writing to file
const releaseNotes = changelog + contributors
let releaseUrl = `https://github.com/payloadcms/payload/releases/new?tag=${proposedReleaseVersion}&title=${proposedReleaseVersion}&body=${encodeURIComponent(releaseNotes)}`
if (tag !== 'latest') {
releaseUrl += `&prerelease=1`
}
if (!openReleaseUrl) {
await open(releaseUrl)
}
return {
releaseUrl,
changelog,
releaseNotes,
releaseTag: proposedReleaseVersion,
}
}
// Helper functions
async function createContributorSection(commits: GitCommit[]): Promise<string> {
console.log('Fetching contributors...')
const contributors = await getContributors(commits)
if (!contributors.length) {
return ''
}
let contributorsSection = `### 🤝 Contributors\n\n`
for (const contributor of contributors) {
contributorsSection += `- ${contributor.name} (@${contributor.username})\n`
}
return contributorsSection
}
async function getContributors(commits: GitCommit[]): Promise<Contributor[]> {
const contributors: Contributor[] = []
const emails = new Set<string>()
const headers = {
Accept: 'application/vnd.github.v3+json',
Authorization: `token ${process.env.GITHUB_TOKEN}`,
}
for (const commit of commits) {
console.log(`Fetching details for ${commit.message} - ${commit.shortHash}`)
if (emails.has(commit.author.email) || commit.author.name.includes('[bot]')) {
continue
}
const res = await fetch(
`https://api.github.com/repos/payloadcms/payload/commits/${commit.shortHash}`,
{
headers,
},
)
if (!res.ok) {
console.error(await res.text())
console.log(`Failed to fetch commit: ${res.status} ${res.statusText}`)
continue
}
const { author } = (await res.json()) as { author: { login: string; email: string } }
if (!contributors.some((c) => c.username === author.login)) {
contributors.push({ name: commit.author.name, username: author.login })
}
emails.add(author.email)
// Check git commit for 'Co-authored-by:' lines
const coAuthorPattern = /Co-authored-by: (?<name>[^<]+) <(?<email>[^>]+)>/g
const coAuthors = Array.from(
commit.body.matchAll(coAuthorPattern),
(match) => match.groups,
).filter((e) => !e?.email.includes('[bot]')) as { name: string; email: string }[]
if (!coAuthors.length) {
continue
}
console.log(
`Fetching co-author details for hash: ${commit.shortHash}. Co-authors:`,
coAuthors.map((c) => c.email).join(', '),
)
// Attempt to co-authors by email
await Promise.all(
(coAuthors || [])
.map(async ({ name, email }) => {
// Check if this co-author has already been added
if (emails.has(email)) {
return null
}
// Get co-author's GitHub username by email
try {
const response = await fetch(
`https://api.github.com/search/users?q=${encodeURIComponent(email)}+in:email`,
{
headers,
},
)
if (!response.ok) {
console.log('Bad response from GitHub API fetching co-author by email')
console.error(response.status)
return null
}
const data = (await response.json()) as { items?: { login: string }[] }
const user = data.items?.[0]
if (!user) {
return null
}
console.log(`Found co-author by email: ${user.login}`)
if (!contributors.some((c) => c.username === user.login)) {
contributors.push({ name, username: user.login })
}
emails.add(email)
return user.login
} catch (error) {
console.log(`ERROR: Failed to fetch co-author by email`)
console.error(error)
return null
}
})
.filter(Boolean),
)
}
return contributors
}
type Contributor = { name: string; username: string }
function formatCommitForChangelog(commit: GitCommit, includeBreakingNotes = false): string {
const { scope, references, description, isBreaking } = commit
let formatted = `* ${scope ? `**${scope}:** ` : ''}${description}`
references.forEach((ref) => {
if (ref.type === 'pull-request') {
// /issues will redirect to /pulls if the issue is a PR
formatted += ` ([${ref.value}](https://github.com/payloadcms/payload/issues/${ref.value.replace('#', '')}))`
}
if (ref.type === 'hash') {
const shortHash = ref.value.slice(0, 7)
formatted += ` ([${shortHash}](https://github.com/payloadcms/payload/commit/${shortHash}))`
}
})
if (isBreaking && includeBreakingNotes) {
// Parse breaking change notes from commit body
const [rawNotes, _] = commit.body.split('\n\n')
let notes =
` ` +
rawNotes
.split('\n')
.map((l) => ` ${l}`) // Indent notes
.join('\n')
.trim()
// Remove random trailing quotes that sometimes appear
if (notes.endsWith('"')) {
notes = notes.slice(0, -1)
}
formatted += `\n\n${notes}\n\n`
}
return formatted
}
// module import workaround for ejs
if (import.meta.url === `file://${process.argv[1]}`) {
// This module is being run directly
const { fromVersion, toVersion, bump, openReleaseUrl } = minimist(process.argv.slice(2))
generateReleaseNotes({
bump,
fromVersion,
toVersion,
dryRun: false,
openReleaseUrl,
})
.then(() => {
console.log('Done')
})
.catch((err) => {
console.error(err)
})
}

View File

@@ -1,16 +0,0 @@
import type { ChangelogConfig } from 'changelogen'
import { determineSemverChange, getGitDiff, loadChangelogConfig, parseCommits } from 'changelogen'
export async function getLatestCommits(
fromVersion: string,
toVersion: string,
config?: ChangelogConfig,
) {
if (!config) {
config = await loadChangelogConfig(process.cwd(), {
repo: 'payloadcms/payload',
})
}
return parseCommits(await getGitDiff(fromVersion, toVersion), config)
}

View File

@@ -1,20 +0,0 @@
import type { ChangelogConfig } from 'changelogen'
import { determineSemverChange, getGitDiff, loadChangelogConfig, parseCommits } from 'changelogen'
import { getLatestCommits } from './getLatestCommits.js'
export async function getRecommendedBump(
fromVersion: string,
toVersion: string,
config?: ChangelogConfig,
) {
if (!config) {
config = await loadChangelogConfig(process.cwd(), {
repo: 'payloadcms/payload',
})
}
const commits = await getLatestCommits(fromVersion, toVersion, config)
const bumpType = determineSemverChange(commits, config)
return bumpType === 'major' ? 'minor' : bumpType
}