Linted
This commit is contained in:
@@ -5,5 +5,6 @@
|
||||
"printWidth": 120,
|
||||
"trailingComma": "es5",
|
||||
"arrowParens": "avoid",
|
||||
"singleQuote": true
|
||||
"singleQuote": true,
|
||||
"useTabs": true
|
||||
}
|
||||
|
||||
@@ -3,33 +3,30 @@
|
||||
* If the action is called multiple times while it's still running, only the last call will be executed post completion.
|
||||
* Handles function parameters, error logging, and maintains original return values.
|
||||
*/
|
||||
export function condenseAction<T extends (...args: any[]) => Promise<any>>(
|
||||
fn: T
|
||||
): T {
|
||||
let actionInProgress = false;
|
||||
let actionRequested = false;
|
||||
export function condenseAction<T extends (...args: any[]) => Promise<any>>(fn: T): T {
|
||||
let actionInProgress = false;
|
||||
let actionRequested = false;
|
||||
|
||||
return new Proxy(fn, {
|
||||
async apply(target, thisArg, args) {
|
||||
if (actionInProgress) {
|
||||
actionRequested = true;
|
||||
return;
|
||||
}
|
||||
return new Proxy(fn, {
|
||||
async apply(target, thisArg, args) {
|
||||
if (actionInProgress) {
|
||||
actionRequested = true;
|
||||
return;
|
||||
}
|
||||
|
||||
do {
|
||||
actionInProgress = true;
|
||||
actionRequested = false;
|
||||
do {
|
||||
actionInProgress = true;
|
||||
actionRequested = false;
|
||||
|
||||
try {
|
||||
const result = await Reflect.apply(target, thisArg, args);
|
||||
return result;
|
||||
} catch (err) {
|
||||
console.error('Error in condensed action:', err);
|
||||
} finally {
|
||||
actionInProgress = false;
|
||||
}
|
||||
|
||||
} while (actionRequested);
|
||||
}
|
||||
}) as T;
|
||||
}
|
||||
try {
|
||||
const result = await Reflect.apply(target, thisArg, args);
|
||||
return result;
|
||||
} catch (err) {
|
||||
console.error('Error in condensed action:', err);
|
||||
} finally {
|
||||
actionInProgress = false;
|
||||
}
|
||||
} while (actionRequested);
|
||||
},
|
||||
}) as T;
|
||||
}
|
||||
|
||||
@@ -3,19 +3,22 @@ import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
export async function copyConfig(force: boolean, { logger }: { logger: any }) {
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const srcDir = path.resolve(__dirname, '../install');
|
||||
const targetDir = process.cwd();
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const srcDir = path.resolve(__dirname, '../install');
|
||||
const targetDir = process.cwd();
|
||||
|
||||
// Test if it doesn't already exist then if it does exit
|
||||
if (!force) {
|
||||
await fs.access(path.resolve(targetDir, 'schema-sync')).then(() => {
|
||||
logger.info('Config folder already exists, use --force to override');
|
||||
process.exit(0);
|
||||
}).catch(() => {
|
||||
logger.info('Creating config folder...');
|
||||
});
|
||||
}
|
||||
// Test if it doesn't already exist then if it does exit
|
||||
if (!force) {
|
||||
await fs
|
||||
.access(path.resolve(targetDir, 'schema-sync'))
|
||||
.then(() => {
|
||||
logger.info('Config folder already exists, use --force to override');
|
||||
process.exit(0);
|
||||
})
|
||||
.catch(() => {
|
||||
logger.info('Creating config folder...');
|
||||
});
|
||||
}
|
||||
|
||||
await fs.cp(srcDir, targetDir, { recursive: true });
|
||||
await fs.cp(srcDir, targetDir, { recursive: true });
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
/**
|
||||
* This file contains the default configuration for the schema exporter.
|
||||
*
|
||||
*
|
||||
* ONLY CHANGE THIS FILE IF YOU REALLY HAVE TO AND KNOW WHAT YOU ARE DOING!
|
||||
*/
|
||||
|
||||
import { ExportCollectionConfig } from "./types";
|
||||
import { ExportCollectionConfig } from './types';
|
||||
|
||||
export const syncDirectusCollections: ExportCollectionConfig = {
|
||||
directus_collections: {
|
||||
@@ -16,7 +16,7 @@ export const syncDirectusCollections: ExportCollectionConfig = {
|
||||
directus_fields: {
|
||||
watch: ['fields', 'collections'],
|
||||
excludeFields: ['id'],
|
||||
getKey: (o) => `${o.collection}-${o.field}`,
|
||||
getKey: o => `${o.collection}-${o.field}`,
|
||||
query: {
|
||||
sort: ['collection', 'field'],
|
||||
},
|
||||
@@ -24,7 +24,7 @@ export const syncDirectusCollections: ExportCollectionConfig = {
|
||||
directus_relations: {
|
||||
watch: ['relations'],
|
||||
excludeFields: ['id'],
|
||||
getKey: (o) => `${o.many_collection}-${o.many_field}`,
|
||||
getKey: o => `${o.many_collection}-${o.many_field}`,
|
||||
query: {
|
||||
sort: ['many_collection', 'many_field'],
|
||||
},
|
||||
@@ -46,7 +46,7 @@ export const syncDirectusCollections: ExportCollectionConfig = {
|
||||
directus_permissions: {
|
||||
watch: ['permissions', 'collections', 'fields'],
|
||||
excludeFields: ['id'],
|
||||
getKey: (o) => `${o.role ?? 'public'}-${o.collection}--${o.action}`,
|
||||
getKey: o => `${o.role ?? 'public'}-${o.collection}--${o.action}`,
|
||||
query: {
|
||||
sort: ['role', 'collection', 'action'],
|
||||
},
|
||||
@@ -75,7 +75,7 @@ export const syncDirectusCollections: ExportCollectionConfig = {
|
||||
directus_translations: {
|
||||
watch: ['translations'],
|
||||
excludeFields: ['id'],
|
||||
getKey: (o) => `${o.key}-${o.language}`,
|
||||
getKey: o => `${o.key}-${o.language}`,
|
||||
query: {
|
||||
sort: ['key', 'language'],
|
||||
},
|
||||
@@ -84,4 +84,4 @@ export const syncDirectusCollections: ExportCollectionConfig = {
|
||||
watch: ['webhooks'],
|
||||
excludeFields: ['url'],
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
@@ -4,64 +4,61 @@ import { CollectionExporter } from './collectionExporter.js';
|
||||
import { ExportCollectionConfig, IExporterConfig, IGetItemsService } from './types';
|
||||
|
||||
export class ExportManager {
|
||||
protected exporters: IExporterConfig[] = [];
|
||||
protected exporters: IExporterConfig[] = [];
|
||||
|
||||
constructor(protected logger: ApiExtensionContext['logger']) {}
|
||||
constructor(protected logger: ApiExtensionContext['logger']) {}
|
||||
|
||||
// FIRST: Add exporters
|
||||
public addExporter(exporterConfig: IExporterConfig) {
|
||||
this.exporters.push(exporterConfig);
|
||||
}
|
||||
// FIRST: Add exporters
|
||||
public addExporter(exporterConfig: IExporterConfig) {
|
||||
this.exporters.push(exporterConfig);
|
||||
}
|
||||
|
||||
public addCollectionExporter(
|
||||
config: ExportCollectionConfig,
|
||||
getItemsService: IGetItemsService
|
||||
) {
|
||||
for (let collectionName in config) {
|
||||
const opts = config[collectionName]!;
|
||||
this.exporters.push({
|
||||
watch: opts.watch,
|
||||
exporter: new CollectionExporter(collectionName, getItemsService, opts, this.logger),
|
||||
});
|
||||
}
|
||||
}
|
||||
public addCollectionExporter(config: ExportCollectionConfig, getItemsService: IGetItemsService) {
|
||||
for (let collectionName in config) {
|
||||
const opts = config[collectionName]!;
|
||||
this.exporters.push({
|
||||
watch: opts.watch,
|
||||
exporter: new CollectionExporter(collectionName, getItemsService, opts, this.logger),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// SECOND: Import if needed
|
||||
public async loadAll(merge = false) {
|
||||
await this._loadNextExporter(0, merge);
|
||||
}
|
||||
// SECOND: Import if needed
|
||||
public async loadAll(merge = false) {
|
||||
await this._loadNextExporter(0, merge);
|
||||
}
|
||||
|
||||
protected async _loadNextExporter(i = 0, merge = false) {
|
||||
if (i >= this.exporters.length) return;
|
||||
protected async _loadNextExporter(i = 0, merge = false) {
|
||||
if (i >= this.exporters.length) return;
|
||||
|
||||
try {
|
||||
const finishUp = await this.exporters[i]!.exporter.load(merge);
|
||||
await this._loadNextExporter(i + 1, merge);
|
||||
if (finishUp) await finishUp();
|
||||
} catch (e) {
|
||||
this.logger.error(`Failed loading "${this.exporters[i]!.exporter.name}".`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
try {
|
||||
const finishUp = await this.exporters[i]!.exporter.load(merge);
|
||||
await this._loadNextExporter(i + 1, merge);
|
||||
if (finishUp) await finishUp();
|
||||
} catch (e) {
|
||||
this.logger.error(`Failed loading "${this.exporters[i]!.exporter.name}".`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
// THIRD: Start watching for changes
|
||||
public attachAllWatchers(action: (event: string, handler: ActionHandler) => void, updateMeta: () => Promise<void>) {
|
||||
// EXPORT SCHEMAS & COLLECTIONS ON CHANGE //
|
||||
const actions = ['create', 'update', 'delete'];
|
||||
this.exporters.forEach(({ watch, exporter }) => {
|
||||
watch.forEach(col => {
|
||||
actions.forEach(evt => {
|
||||
action(`${col}.${evt}`, async () => {
|
||||
await exporter.export();
|
||||
await updateMeta();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
// THIRD: Start watching for changes
|
||||
public attachAllWatchers(action: (event: string, handler: ActionHandler) => void, updateMeta: () => Promise<void>) {
|
||||
// EXPORT SCHEMAS & COLLECTIONS ON CHANGE //
|
||||
const actions = ['create', 'update', 'delete'];
|
||||
this.exporters.forEach(({ watch, exporter }) => {
|
||||
watch.forEach(col => {
|
||||
actions.forEach(evt => {
|
||||
action(`${col}.${evt}`, async () => {
|
||||
await exporter.export();
|
||||
await updateMeta();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public async exportAll() {
|
||||
console.log('Exporting ', this.exporters.length, ' exporters');
|
||||
await Promise.all(this.exporters.map(e => e.exporter.export()));
|
||||
}
|
||||
public async exportAll() {
|
||||
console.log('Exporting ', this.exporters.length, ' exporters');
|
||||
await Promise.all(this.exporters.map(e => e.exporter.export()));
|
||||
}
|
||||
}
|
||||
|
||||
340
src/index.ts
340
src/index.ts
@@ -8,202 +8,206 @@ import { UpdateManager } from './updateManager';
|
||||
import { ADMIN_ACCOUNTABILITY, ExportHelper, nodeImport } from './utils';
|
||||
|
||||
const registerHook: HookConfig = async ({ action, init }, { env, services, database, getSchema, logger }) => {
|
||||
const { SchemaService, ItemsService } = services;
|
||||
const { SchemaService, ItemsService } = services;
|
||||
|
||||
const schemaOptions = {
|
||||
split: typeof env.SCHEMA_SYNC_SPLIT === 'boolean' ? env.SCHEMA_SYNC_SPLIT : true,
|
||||
}
|
||||
const schemaOptions = {
|
||||
split: typeof env.SCHEMA_SYNC_SPLIT === 'boolean' ? env.SCHEMA_SYNC_SPLIT : true,
|
||||
};
|
||||
|
||||
let schema: SchemaOverview | null;
|
||||
const getAdminSchema = async () =>
|
||||
schema ||
|
||||
(schema = await getSchema({
|
||||
accountability: ADMIN_ACCOUNTABILITY,
|
||||
database,
|
||||
}));
|
||||
const clearAdminSchema = () => (schema = null);
|
||||
const getSchemaService = () =>
|
||||
new SchemaService({
|
||||
knex: database,
|
||||
accountability: ADMIN_ACCOUNTABILITY,
|
||||
});
|
||||
const getItemsService: IGetItemsService = async (collectionName: string) =>
|
||||
new ItemsService(collectionName, {
|
||||
schema: await getAdminSchema(),
|
||||
accountability: ADMIN_ACCOUNTABILITY,
|
||||
knex: database,
|
||||
}) as ItemsService;
|
||||
let schema: SchemaOverview | null;
|
||||
const getAdminSchema = async () =>
|
||||
schema ||
|
||||
(schema = await getSchema({
|
||||
accountability: ADMIN_ACCOUNTABILITY,
|
||||
database,
|
||||
}));
|
||||
const clearAdminSchema = () => (schema = null);
|
||||
const getSchemaService = () =>
|
||||
new SchemaService({
|
||||
knex: database,
|
||||
accountability: ADMIN_ACCOUNTABILITY,
|
||||
});
|
||||
const getItemsService: IGetItemsService = async (collectionName: string) =>
|
||||
new ItemsService(collectionName, {
|
||||
schema: await getAdminSchema(),
|
||||
accountability: ADMIN_ACCOUNTABILITY,
|
||||
knex: database,
|
||||
}) as ItemsService;
|
||||
|
||||
const updateManager = new UpdateManager(database);
|
||||
const updateManager = new UpdateManager(database);
|
||||
|
||||
// We need to do this in async in order to load the config files
|
||||
let _exportManager: ExportManager;
|
||||
const exportManager = async () => {
|
||||
if (!_exportManager) {
|
||||
_exportManager = new ExportManager(logger);
|
||||
// We need to do this in async in order to load the config files
|
||||
let _exportManager: ExportManager;
|
||||
const exportManager = async () => {
|
||||
if (!_exportManager) {
|
||||
_exportManager = new ExportManager(logger);
|
||||
|
||||
_exportManager.addExporter({
|
||||
watch: ['collections', 'fields', 'relations'],
|
||||
exporter: new SchemaExporter(getSchemaService, logger, schemaOptions),
|
||||
});
|
||||
_exportManager.addExporter({
|
||||
watch: ['collections', 'fields', 'relations'],
|
||||
exporter: new SchemaExporter(getSchemaService, logger, schemaOptions),
|
||||
});
|
||||
|
||||
const { syncDirectusCollections } = (await nodeImport(ExportHelper.schemaDir, 'directus_config.js')) as {
|
||||
syncDirectusCollections: ExportCollectionConfig;
|
||||
};
|
||||
const { syncCustomCollections } = (await nodeImport(ExportHelper.schemaDir, 'config.js')) as {
|
||||
syncCustomCollections: ExportCollectionConfig;
|
||||
};
|
||||
_exportManager.addCollectionExporter(syncDirectusCollections, getItemsService);
|
||||
_exportManager.addCollectionExporter(syncCustomCollections, getItemsService);
|
||||
const { syncDirectusCollections } = (await nodeImport(ExportHelper.schemaDir, 'directus_config.js')) as {
|
||||
syncDirectusCollections: ExportCollectionConfig;
|
||||
};
|
||||
const { syncCustomCollections } = (await nodeImport(ExportHelper.schemaDir, 'config.js')) as {
|
||||
syncCustomCollections: ExportCollectionConfig;
|
||||
};
|
||||
_exportManager.addCollectionExporter(syncDirectusCollections, getItemsService);
|
||||
_exportManager.addCollectionExporter(syncCustomCollections, getItemsService);
|
||||
|
||||
// Additional config
|
||||
if (env.SCHEMA_SYNC_CONFIG) {
|
||||
const { syncCustomCollections } = (await nodeImport(ExportHelper.schemaDir, env.SCHEMA_SYNC_CONFIG)) as {
|
||||
syncCustomCollections: ExportCollectionConfig;
|
||||
};
|
||||
if (syncCustomCollections) {
|
||||
_exportManager.addCollectionExporter(syncCustomCollections, getItemsService);
|
||||
} else {
|
||||
logger.warn(`Additonal config specified but not exporting "syncCustomCollections"`);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Additional config
|
||||
if (env.SCHEMA_SYNC_CONFIG) {
|
||||
const { syncCustomCollections } = (await nodeImport(ExportHelper.schemaDir, env.SCHEMA_SYNC_CONFIG)) as {
|
||||
syncCustomCollections: ExportCollectionConfig;
|
||||
};
|
||||
if (syncCustomCollections) {
|
||||
_exportManager.addCollectionExporter(syncCustomCollections, getItemsService);
|
||||
} else {
|
||||
logger.warn(`Additonal config specified but not exporting "syncCustomCollections"`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return _exportManager;
|
||||
};
|
||||
return _exportManager;
|
||||
};
|
||||
|
||||
const updateMeta = condenseAction(async (saveToDb = true) => {
|
||||
const meta = await ExportHelper.updateExportMeta();
|
||||
if (saveToDb && meta && (await updateManager.lockForUpdates(meta.hash, meta.ts))) {
|
||||
await updateManager.commitUpdates();
|
||||
}
|
||||
});
|
||||
const updateMeta = condenseAction(async (saveToDb = true) => {
|
||||
const meta = await ExportHelper.updateExportMeta();
|
||||
if (saveToDb && meta && (await updateManager.lockForUpdates(meta.hash, meta.ts))) {
|
||||
await updateManager.commitUpdates();
|
||||
}
|
||||
});
|
||||
|
||||
function attachExporters() {
|
||||
if (env.SCHEMA_SYNC === 'BOTH' || env.SCHEMA_SYNC === 'EXPORT') {
|
||||
exportManager().then(expMng => expMng.attachAllWatchers(action, updateMeta));
|
||||
}
|
||||
}
|
||||
function attachExporters() {
|
||||
if (env.SCHEMA_SYNC === 'BOTH' || env.SCHEMA_SYNC === 'EXPORT') {
|
||||
exportManager().then(expMng => expMng.attachAllWatchers(action, updateMeta));
|
||||
}
|
||||
}
|
||||
|
||||
// LOAD EXPORTED SCHEMAS & COLLECTIONS
|
||||
if (env.SCHEMA_SYNC === 'BOTH' || env.SCHEMA_SYNC === 'IMPORT') {
|
||||
init('app.before', async () => {
|
||||
try {
|
||||
const meta = await ExportHelper.getExportMeta();
|
||||
if (!meta) return logger.info('Nothing exported yet it seems');
|
||||
if (!(await updateManager.lockForUpdates(meta.hash, meta.ts))) return; // Schema is locked / no change, nothing to do
|
||||
// LOAD EXPORTED SCHEMAS & COLLECTIONS
|
||||
if (env.SCHEMA_SYNC === 'BOTH' || env.SCHEMA_SYNC === 'IMPORT') {
|
||||
init('app.before', async () => {
|
||||
try {
|
||||
const meta = await ExportHelper.getExportMeta();
|
||||
if (!meta) return logger.info('Nothing exported yet it seems');
|
||||
if (!(await updateManager.lockForUpdates(meta.hash, meta.ts))) return; // Schema is locked / no change, nothing to do
|
||||
|
||||
logger.info(`Updating schema and data with hash: ${meta.hash}`);
|
||||
const expMng = await exportManager();
|
||||
await expMng.loadAll();
|
||||
logger.info(`Updating schema and data with hash: ${meta.hash}`);
|
||||
const expMng = await exportManager();
|
||||
await expMng.loadAll();
|
||||
|
||||
await updateManager.commitUpdates();
|
||||
clearAdminSchema();
|
||||
} catch (e) {
|
||||
logger.error(e);
|
||||
logger.info('Releasing lock...');
|
||||
await updateManager.releaseLock();
|
||||
} finally {
|
||||
await attachExporters();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
attachExporters();
|
||||
}
|
||||
await updateManager.commitUpdates();
|
||||
clearAdminSchema();
|
||||
} catch (e) {
|
||||
logger.error(e);
|
||||
logger.info('Releasing lock...');
|
||||
await updateManager.releaseLock();
|
||||
} finally {
|
||||
await attachExporters();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
attachExporters();
|
||||
}
|
||||
|
||||
init('cli.before', async ({ program }) => {
|
||||
const dbCommand = program.command('schema-sync');
|
||||
init('cli.before', async ({ program }) => {
|
||||
const dbCommand = program.command('schema-sync');
|
||||
|
||||
dbCommand
|
||||
.command('export-schema')
|
||||
.description('Export only the schema file')
|
||||
.option('-S, --split', `Splits the schema file into multiple files per collection`)
|
||||
.action(async (args: { split: boolean }) => {
|
||||
logger.info('Exporting schema...');
|
||||
const exportSchema = new SchemaExporter(getSchemaService, logger, args && 'split' in args ? args : schemaOptions);
|
||||
await exportSchema.export();
|
||||
dbCommand
|
||||
.command('export-schema')
|
||||
.description('Export only the schema file')
|
||||
.option('-S, --split', `Splits the schema file into multiple files per collection`)
|
||||
.action(async (args: { split: boolean }) => {
|
||||
logger.info('Exporting schema...');
|
||||
const exportSchema = new SchemaExporter(
|
||||
getSchemaService,
|
||||
logger,
|
||||
args && 'split' in args ? args : schemaOptions
|
||||
);
|
||||
await exportSchema.export();
|
||||
|
||||
await updateMeta();
|
||||
await updateMeta();
|
||||
|
||||
logger.info('Done!');
|
||||
process.exit(0);
|
||||
});
|
||||
logger.info('Done!');
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
dbCommand
|
||||
.command('import-schema')
|
||||
.description('Import only the schema file')
|
||||
.action(async () => {
|
||||
logger.info('Importing schema...');
|
||||
const meta = await ExportHelper.getExportMeta();
|
||||
if (!meta) return logger.info('Nothing exported yet it seems');
|
||||
dbCommand
|
||||
.command('import-schema')
|
||||
.description('Import only the schema file')
|
||||
.action(async () => {
|
||||
logger.info('Importing schema...');
|
||||
const meta = await ExportHelper.getExportMeta();
|
||||
if (!meta) return logger.info('Nothing exported yet it seems');
|
||||
|
||||
const exportSchema = new SchemaExporter(getSchemaService, logger, schemaOptions);
|
||||
await exportSchema.load();
|
||||
const exportSchema = new SchemaExporter(getSchemaService, logger, schemaOptions);
|
||||
await exportSchema.load();
|
||||
|
||||
await updateManager.forceCommitUpdates(meta.hash, meta.ts);
|
||||
logger.info('Done!');
|
||||
process.exit(0);
|
||||
});
|
||||
await updateManager.forceCommitUpdates(meta.hash, meta.ts);
|
||||
logger.info('Done!');
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
dbCommand
|
||||
.command('install')
|
||||
.description('Ensures the DB is ready for schema sync, and creates the schema-sync config folder')
|
||||
.option('--force', 'Override schema-sync config folder')
|
||||
.action(async ({ force }: { force: boolean }) => {
|
||||
logger.info('Installing Schema sync...');
|
||||
await updateManager.ensureInstalled();
|
||||
await copyConfig(force, { logger });
|
||||
dbCommand
|
||||
.command('install')
|
||||
.description('Ensures the DB is ready for schema sync, and creates the schema-sync config folder')
|
||||
.option('--force', 'Override schema-sync config folder')
|
||||
.action(async ({ force }: { force: boolean }) => {
|
||||
logger.info('Installing Schema sync...');
|
||||
await updateManager.ensureInstalled();
|
||||
await copyConfig(force, { logger });
|
||||
|
||||
logger.info('Done!');
|
||||
process.exit(0);
|
||||
});
|
||||
logger.info('Done!');
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
dbCommand
|
||||
.command('hash')
|
||||
.description('Recalculate the hash for all the data files')
|
||||
.action(async () => {
|
||||
await updateMeta(false);
|
||||
logger.info('Done!');
|
||||
process.exit(0);
|
||||
});
|
||||
dbCommand
|
||||
.command('hash')
|
||||
.description('Recalculate the hash for all the data files')
|
||||
.action(async () => {
|
||||
await updateMeta(false);
|
||||
logger.info('Done!');
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
dbCommand
|
||||
.command('import')
|
||||
.description('Import the schema and all available data from file to DB.')
|
||||
.option('--merge', 'Only upsert data and not delete')
|
||||
.action(async ({ merge }: { merge: boolean }) => {
|
||||
try {
|
||||
logger.info(`Importing everything from: ${ExportHelper.dataDir}`);
|
||||
const expMng = await exportManager();
|
||||
await expMng.loadAll(merge);
|
||||
dbCommand
|
||||
.command('import')
|
||||
.description('Import the schema and all available data from file to DB.')
|
||||
.option('--merge', 'Only upsert data and not delete')
|
||||
.action(async ({ merge }: { merge: boolean }) => {
|
||||
try {
|
||||
logger.info(`Importing everything from: ${ExportHelper.dataDir}`);
|
||||
const expMng = await exportManager();
|
||||
await expMng.loadAll(merge);
|
||||
|
||||
logger.info('Done!');
|
||||
process.exit(0);
|
||||
} catch (err: any) {
|
||||
logger.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
logger.info('Done!');
|
||||
process.exit(0);
|
||||
} catch (err: any) {
|
||||
logger.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
dbCommand
|
||||
.command('export')
|
||||
.description('Export the schema and all data as configured from DB to file')
|
||||
.action(async () => {
|
||||
try {
|
||||
logger.info(`Exporting everything to: ${ExportHelper.dataDir}`);
|
||||
const expMng = await exportManager();
|
||||
await expMng.exportAll();
|
||||
dbCommand
|
||||
.command('export')
|
||||
.description('Export the schema and all data as configured from DB to file')
|
||||
.action(async () => {
|
||||
try {
|
||||
logger.info(`Exporting everything to: ${ExportHelper.dataDir}`);
|
||||
const expMng = await exportManager();
|
||||
await expMng.exportAll();
|
||||
|
||||
await updateMeta();
|
||||
await updateMeta();
|
||||
|
||||
logger.info('Done!');
|
||||
process.exit(0);
|
||||
} catch (err: any) {
|
||||
logger.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
});
|
||||
logger.info('Done!');
|
||||
process.exit(0);
|
||||
} catch (err: any) {
|
||||
logger.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
export default registerHook;
|
||||
|
||||
@@ -14,16 +14,20 @@ export class SchemaExporter implements IExporter {
|
||||
protected _exportHandler = condenseAction(() => this.createAndSaveSnapshot());
|
||||
|
||||
// Directus SchemaService, database and getSchema
|
||||
constructor(getSchemaService: () => any, protected logger: ApiExtensionContext['logger'], protected options = { split: true }) {
|
||||
constructor(
|
||||
getSchemaService: () => any,
|
||||
protected logger: ApiExtensionContext['logger'],
|
||||
protected options = { split: true }
|
||||
) {
|
||||
this._getSchemaService = () => getSchemaService();
|
||||
this._filePath = `${ExportHelper.dataDir}/schema.json`
|
||||
this._filePath = `${ExportHelper.dataDir}/schema.json`;
|
||||
}
|
||||
|
||||
protected ensureSchemaFilesDir = async () => {
|
||||
if (!await ExportHelper.fileExists(`${ExportHelper.dataDir}/schema`)) {
|
||||
if (!(await ExportHelper.fileExists(`${ExportHelper.dataDir}/schema`))) {
|
||||
await mkdir(`${ExportHelper.dataDir}/schema`, { recursive: true });
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
protected schemaFilesPath(collection: string) {
|
||||
return `${ExportHelper.dataDir}/schema/${collection}.json`;
|
||||
@@ -45,7 +49,11 @@ export class SchemaExporter implements IExporter {
|
||||
if (json) {
|
||||
const schemaParsed = JSON.parse(json);
|
||||
// For older versions, the snapshot was stored under the key `snapshot`
|
||||
const { partial, hash, ...snapshot } = ((schemaParsed as any).snapshot ? Object.assign((schemaParsed as any).snapshot, { hash: schemaParsed.hash }) : schemaParsed) as Snapshot & { partial?: boolean, hash: string };
|
||||
const { partial, hash, ...snapshot } = (
|
||||
(schemaParsed as any).snapshot
|
||||
? Object.assign((schemaParsed as any).snapshot, { hash: schemaParsed.hash })
|
||||
: schemaParsed
|
||||
) as Snapshot & { partial?: boolean; hash: string };
|
||||
|
||||
if (partial) {
|
||||
snapshot.collections = [];
|
||||
@@ -56,7 +64,10 @@ export class SchemaExporter implements IExporter {
|
||||
const files = await glob(this.schemaFilesPath('*'));
|
||||
for (const file of files) {
|
||||
const collectionJson = await readFile(file, { encoding: 'utf8' });
|
||||
const { fields, relations, ...collectionInfo } = JSON.parse(collectionJson) as Collection & { fields: SnapshotField[], relations: SnapshotRelation[] };
|
||||
const { fields, relations, ...collectionInfo } = JSON.parse(collectionJson) as Collection & {
|
||||
fields: SnapshotField[];
|
||||
relations: SnapshotRelation[];
|
||||
};
|
||||
++found;
|
||||
|
||||
// Only add collection if it has a meta definition (actual table or group)
|
||||
@@ -100,14 +111,14 @@ export class SchemaExporter implements IExporter {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Create and save the schema snapshot to file
|
||||
*/
|
||||
protected createAndSaveSnapshot = async () => {
|
||||
const svc = this._getSchemaService();
|
||||
let snapshot = await svc.snapshot() as Snapshot;
|
||||
let snapshot = (await svc.snapshot()) as Snapshot;
|
||||
snapshot = exportHook(snapshot);
|
||||
let hash = svc.getHashedSnapshot(snapshot).hash;
|
||||
|
||||
@@ -120,10 +131,12 @@ export class SchemaExporter implements IExporter {
|
||||
relations.sort((a, b) => a.field.localeCompare(b.field));
|
||||
|
||||
// Sort relations also by related_collection
|
||||
relations.sort((a, b) => a.related_collection && b.related_collection ? a.related_collection.localeCompare(b.related_collection) : 0);
|
||||
relations.sort((a, b) =>
|
||||
a.related_collection && b.related_collection ? a.related_collection.localeCompare(b.related_collection) : 0
|
||||
);
|
||||
|
||||
const map: Record<string, any> = {};
|
||||
collections.forEach((item) => {
|
||||
collections.forEach(item => {
|
||||
map[item.collection] = item;
|
||||
map[item.collection].fields = [] as SnapshotField[];
|
||||
map[item.collection].relations = [] as SnapshotRelation[];
|
||||
@@ -142,7 +155,7 @@ export class SchemaExporter implements IExporter {
|
||||
if (!map[collection]) {
|
||||
map[collection] = { collection, fields: [], relations: [] };
|
||||
}
|
||||
|
||||
|
||||
map[collection].relations.push(relationMeta);
|
||||
}
|
||||
|
||||
@@ -159,5 +172,5 @@ export class SchemaExporter implements IExporter {
|
||||
const schemaJson = JSON.stringify(Object.assign({ hash }, snapshot), null, 2);
|
||||
await writeFile(this._filePath, schemaJson);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
import * as pgUtils from './dialects/postgres/utils.js';
|
||||
|
||||
const modifiers: modifiersType = {
|
||||
postgres: [pgUtils.sequenceToSerialType]
|
||||
}
|
||||
|
||||
export function exportHook<T extends Record<string, any>>(snapshot: T) {
|
||||
if (modifiers[snapshot.vendor]?.length)
|
||||
return modifiers[snapshot.vendor]!.reduce((_snapshot, modifier) => {
|
||||
return modifier(_snapshot);
|
||||
}, snapshot)
|
||||
return snapshot;
|
||||
postgres: [pgUtils.sequenceToSerialType],
|
||||
};
|
||||
|
||||
type modifiersType = Record<string, snapshotFunctionType[]>
|
||||
export function exportHook<T extends Record<string, any>>(snapshot: T) {
|
||||
if (modifiers[snapshot.vendor]?.length)
|
||||
return modifiers[snapshot.vendor]!.reduce((_snapshot, modifier) => {
|
||||
return modifier(_snapshot);
|
||||
}, snapshot);
|
||||
return snapshot;
|
||||
}
|
||||
|
||||
type snapshotFunctionType = <T extends Record<string, any>>(snapshotWithHash: T) => T
|
||||
type modifiersType = Record<string, snapshotFunctionType[]>;
|
||||
|
||||
type snapshotFunctionType = <T extends Record<string, any>>(snapshotWithHash: T) => T;
|
||||
|
||||
10
src/types.ts
10
src/types.ts
@@ -39,18 +39,18 @@ export type CollectionExporterOptions = {
|
||||
getKey?: (o: Item) => PrimaryKey;
|
||||
|
||||
// Specify additional query options to filter, sort and limit the exported items
|
||||
query?: Pick<Query, 'filter'|'sort'|'limit'>;
|
||||
query?: Pick<Query, 'filter' | 'sort' | 'limit'>;
|
||||
|
||||
// Prefix to add to the exported file name
|
||||
prefix?: string;
|
||||
onExport?: (item: Item, srv: ItemsService) => Promise<Item | null>;
|
||||
onImport?: (item: Item, srv: ItemsService) => Promise<Item | null>;
|
||||
}
|
||||
};
|
||||
|
||||
export type ToUpdateItemDiff = {
|
||||
pkey: PrimaryKey
|
||||
diff: any
|
||||
}
|
||||
pkey: PrimaryKey;
|
||||
diff: any;
|
||||
};
|
||||
|
||||
//
|
||||
// Defining used Directus types here in order to get type hinting without installing entire Directus
|
||||
|
||||
@@ -2,108 +2,109 @@ import type { Knex } from 'knex';
|
||||
import { ExportHelper } from './utils';
|
||||
|
||||
export class UpdateManager {
|
||||
protected db: Knex;
|
||||
protected tableName = 'directus_settings';
|
||||
protected rowId = 1;
|
||||
protected db: Knex;
|
||||
protected tableName = 'directus_settings';
|
||||
protected rowId = 1;
|
||||
|
||||
protected _locking = false;
|
||||
protected _locked:
|
||||
| {
|
||||
hash: string;
|
||||
ts: string;
|
||||
}
|
||||
| false = false;
|
||||
protected _locking = false;
|
||||
protected _locked:
|
||||
| {
|
||||
hash: string;
|
||||
ts: string;
|
||||
}
|
||||
| false = false;
|
||||
|
||||
constructor(database: Knex) {
|
||||
this.db = database;
|
||||
}
|
||||
constructor(database: Knex) {
|
||||
this.db = database;
|
||||
}
|
||||
|
||||
/**
|
||||
* Acquire the lock to make updates
|
||||
* @param newHash - New hash value of latest changes
|
||||
* @param isoTS - ISO timestamp
|
||||
* @returns
|
||||
*/
|
||||
public async lockForUpdates(newHash: string, isoTS: string) {
|
||||
if (this._locked || this._locking) return false;
|
||||
this._locking = true;
|
||||
/**
|
||||
* Acquire the lock to make updates
|
||||
* @param newHash - New hash value of latest changes
|
||||
* @param isoTS - ISO timestamp
|
||||
* @returns
|
||||
*/
|
||||
public async lockForUpdates(newHash: string, isoTS: string) {
|
||||
if (this._locked || this._locking) return false;
|
||||
this._locking = true;
|
||||
|
||||
const succeeded = await this.db.transaction(async trx => {
|
||||
const rows = await trx(this.tableName)
|
||||
.select('*')
|
||||
.where('id', this.rowId)
|
||||
.where('mv_locked', false)
|
||||
// Only need to migrate if hash is different
|
||||
.andWhereNot('mv_hash', newHash)
|
||||
// And only if the previous hash is older than the current one
|
||||
.andWhere('mv_ts', '<', isoTS).orWhereNull('mv_ts')
|
||||
.forUpdate(); // This locks the row
|
||||
const succeeded = await this.db.transaction(async trx => {
|
||||
const rows = await trx(this.tableName)
|
||||
.select('*')
|
||||
.where('id', this.rowId)
|
||||
.where('mv_locked', false)
|
||||
// Only need to migrate if hash is different
|
||||
.andWhereNot('mv_hash', newHash)
|
||||
// And only if the previous hash is older than the current one
|
||||
.andWhere('mv_ts', '<', isoTS)
|
||||
.orWhereNull('mv_ts')
|
||||
.forUpdate(); // This locks the row
|
||||
|
||||
// If row is found, lock it
|
||||
if (rows.length) {
|
||||
await trx(this.tableName).where('id', this.rowId).update({
|
||||
mv_locked: true,
|
||||
});
|
||||
this._locked = {
|
||||
hash: newHash,
|
||||
ts: isoTS,
|
||||
};
|
||||
return true;
|
||||
}
|
||||
// If row is found, lock it
|
||||
if (rows.length) {
|
||||
await trx(this.tableName).where('id', this.rowId).update({
|
||||
mv_locked: true,
|
||||
});
|
||||
this._locked = {
|
||||
hash: newHash,
|
||||
ts: isoTS,
|
||||
};
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
});
|
||||
return false;
|
||||
});
|
||||
|
||||
this._locking = false;
|
||||
return succeeded;
|
||||
}
|
||||
this._locking = false;
|
||||
return succeeded;
|
||||
}
|
||||
|
||||
public async commitUpdates() {
|
||||
if (!this._locked) return false;
|
||||
public async commitUpdates() {
|
||||
if (!this._locked) return false;
|
||||
|
||||
await this.db(this.tableName).where('id', this.rowId).update({
|
||||
mv_hash: this._locked.hash,
|
||||
mv_ts: this._locked.ts,
|
||||
mv_locked: false,
|
||||
});
|
||||
await this.db(this.tableName).where('id', this.rowId).update({
|
||||
mv_hash: this._locked.hash,
|
||||
mv_ts: this._locked.ts,
|
||||
mv_locked: false,
|
||||
});
|
||||
|
||||
this._locked = false;
|
||||
return true;
|
||||
}
|
||||
this._locked = false;
|
||||
return true;
|
||||
}
|
||||
|
||||
public async forceCommitUpdates(newHash: string, isoTS: string) {
|
||||
await this.db(this.tableName).where('id', this.rowId).update({
|
||||
mv_hash: newHash,
|
||||
mv_ts: isoTS,
|
||||
mv_locked: false,
|
||||
});
|
||||
public async forceCommitUpdates(newHash: string, isoTS: string) {
|
||||
await this.db(this.tableName).where('id', this.rowId).update({
|
||||
mv_hash: newHash,
|
||||
mv_ts: isoTS,
|
||||
mv_locked: false,
|
||||
});
|
||||
|
||||
this._locked = false;
|
||||
return true;
|
||||
}
|
||||
this._locked = false;
|
||||
return true;
|
||||
}
|
||||
|
||||
public async releaseLock() {
|
||||
if (!this._locked) return false;
|
||||
public async releaseLock() {
|
||||
if (!this._locked) return false;
|
||||
|
||||
await this.db(this.tableName).where('id', this.rowId).update({
|
||||
mv_locked: false,
|
||||
});
|
||||
await this.db(this.tableName).where('id', this.rowId).update({
|
||||
mv_locked: false,
|
||||
});
|
||||
|
||||
this._locked = false;
|
||||
return true;
|
||||
}
|
||||
this._locked = false;
|
||||
return true;
|
||||
}
|
||||
|
||||
public async ensureInstalled() {
|
||||
const tableName = 'directus_settings';
|
||||
public async ensureInstalled() {
|
||||
const tableName = 'directus_settings';
|
||||
|
||||
const isInstalled = await this.db.schema.hasColumn(tableName, 'mv_hash');
|
||||
const isInstalled = await this.db.schema.hasColumn(tableName, 'mv_hash');
|
||||
|
||||
if (!isInstalled) {
|
||||
await this.db.schema.table(tableName, table => {
|
||||
table.string('mv_hash').defaultTo('').notNullable();
|
||||
table.timestamp('mv_ts', { useTz: true }).defaultTo('2020-01-01').notNullable();
|
||||
table.boolean('mv_locked').defaultTo(false).notNullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
if (!isInstalled) {
|
||||
await this.db.schema.table(tableName, table => {
|
||||
table.string('mv_hash').defaultTo('').notNullable();
|
||||
table.timestamp('mv_ts', { useTz: true }).defaultTo('2020-01-01').notNullable();
|
||||
table.boolean('mv_locked').defaultTo(false).notNullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,104 +1,104 @@
|
||||
import assert from "node:assert";
|
||||
import { describe, it } from "node:test";
|
||||
import { deepEqual, getDiff, sortObject } from "./utils.js";
|
||||
import assert from 'node:assert';
|
||||
import { describe, it } from 'node:test';
|
||||
import { deepEqual, getDiff, sortObject } from './utils.js';
|
||||
|
||||
describe('sortObject', () => {
|
||||
it('should sort object keys alphabetically', () => {
|
||||
const input = { c: 1, a: 2, b: 3 };
|
||||
const assertedOutput = { a: 2, b: 3, c: 1 };
|
||||
assert.deepStrictEqual(sortObject(input), assertedOutput);
|
||||
});
|
||||
it('should sort object keys alphabetically', () => {
|
||||
const input = { c: 1, a: 2, b: 3 };
|
||||
const assertedOutput = { a: 2, b: 3, c: 1 };
|
||||
assert.deepStrictEqual(sortObject(input), assertedOutput);
|
||||
});
|
||||
|
||||
it('should sort nested object keys alphabetically', () => {
|
||||
const input = { c: 1, a: { d: 4, b: 3 }, e: 5 };
|
||||
const assertedOutput = { a: { b: 3, d: 4 }, c: 1, e: 5 };
|
||||
assert.deepStrictEqual(sortObject(input), assertedOutput);
|
||||
});
|
||||
it('should sort nested object keys alphabetically', () => {
|
||||
const input = { c: 1, a: { d: 4, b: 3 }, e: 5 };
|
||||
const assertedOutput = { a: { b: 3, d: 4 }, c: 1, e: 5 };
|
||||
assert.deepStrictEqual(sortObject(input), assertedOutput);
|
||||
});
|
||||
|
||||
it('should sort array elements recursively', () => {
|
||||
const input = [{ c: 1, a: 2 }, { b: 3 }];
|
||||
const assertedOutput = [{ a: 2, c: 1 }, { b: 3 }];
|
||||
assert.deepStrictEqual(sortObject(input), assertedOutput);
|
||||
});
|
||||
it('should sort array elements recursively', () => {
|
||||
const input = [{ c: 1, a: 2 }, { b: 3 }];
|
||||
const assertedOutput = [{ a: 2, c: 1 }, { b: 3 }];
|
||||
assert.deepStrictEqual(sortObject(input), assertedOutput);
|
||||
});
|
||||
|
||||
it('should return input if it is not an object', () => {
|
||||
assert.deepStrictEqual(sortObject(null as any), null);
|
||||
assert.deepStrictEqual(sortObject(42 as any), 42);
|
||||
assert.deepStrictEqual(sortObject('hello' as any), 'hello');
|
||||
});
|
||||
it('should return input if it is not an object', () => {
|
||||
assert.deepStrictEqual(sortObject(null as any), null);
|
||||
assert.deepStrictEqual(sortObject(42 as any), 42);
|
||||
assert.deepStrictEqual(sortObject('hello' as any), 'hello');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDiff', () => {
|
||||
it('should return the entire new object if the old object is null', () => {
|
||||
const newObj = { a: 1, b: 2 };
|
||||
const oldObj = null;
|
||||
const assertedOutput = { a: 1, b: 2 };
|
||||
assert.deepStrictEqual(getDiff(newObj, oldObj), assertedOutput);
|
||||
});
|
||||
it('should return the entire new object if the old object is null', () => {
|
||||
const newObj = { a: 1, b: 2 };
|
||||
const oldObj = null;
|
||||
const assertedOutput = { a: 1, b: 2 };
|
||||
assert.deepStrictEqual(getDiff(newObj, oldObj), assertedOutput);
|
||||
});
|
||||
|
||||
it('should return null if the new and old objects are equal', () => {
|
||||
const newObj = { a: 1, b: 2 };
|
||||
const oldObj = { a: 1, b: 2 };
|
||||
assert.deepStrictEqual(getDiff(newObj, oldObj), null);
|
||||
});
|
||||
it('should return null if the new and old objects are equal', () => {
|
||||
const newObj = { a: 1, b: 2 };
|
||||
const oldObj = { a: 1, b: 2 };
|
||||
assert.deepStrictEqual(getDiff(newObj, oldObj), null);
|
||||
});
|
||||
|
||||
it('should return only the different properties between the new and old objects', () => {
|
||||
const newObj = { a: 1, b: 2, c: 3 };
|
||||
const oldObj = { a: 1, b: 3, d: 4 };
|
||||
const assertedOutput = { b: 2, c: 3 };
|
||||
assert.deepStrictEqual(getDiff(newObj, oldObj), assertedOutput);
|
||||
});
|
||||
it('should return only the different properties between the new and old objects', () => {
|
||||
const newObj = { a: 1, b: 2, c: 3 };
|
||||
const oldObj = { a: 1, b: 3, d: 4 };
|
||||
const assertedOutput = { b: 2, c: 3 };
|
||||
assert.deepStrictEqual(getDiff(newObj, oldObj), assertedOutput);
|
||||
});
|
||||
|
||||
it('should handle nested objects', () => {
|
||||
const newObj = { a: 1, b: { c: 2, d: 3 } };
|
||||
const oldObj = { a: 1, b: { c: 2, d: 4 } };
|
||||
const assertedOutput = { b: { d: 3 } };
|
||||
assert.deepStrictEqual(getDiff(newObj, oldObj), assertedOutput);
|
||||
});
|
||||
it('should handle nested objects', () => {
|
||||
const newObj = { a: 1, b: { c: 2, d: 3 } };
|
||||
const oldObj = { a: 1, b: { c: 2, d: 4 } };
|
||||
const assertedOutput = { b: { d: 3 } };
|
||||
assert.deepStrictEqual(getDiff(newObj, oldObj), assertedOutput);
|
||||
});
|
||||
|
||||
it('should handle arrays', () => {
|
||||
const newObj = { a: 1, b: [1, 2, 3] };
|
||||
const oldObj = { a: 1, b: [1, 2, 4] };
|
||||
const assertedOutput = { b: [1, 2, 3] };
|
||||
assert.deepStrictEqual(getDiff(newObj, oldObj), assertedOutput);
|
||||
});
|
||||
it('should handle arrays', () => {
|
||||
const newObj = { a: 1, b: [1, 2, 3] };
|
||||
const oldObj = { a: 1, b: [1, 2, 4] };
|
||||
const assertedOutput = { b: [1, 2, 3] };
|
||||
assert.deepStrictEqual(getDiff(newObj, oldObj), assertedOutput);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deepEqual', () => {
|
||||
it('should return true for equal objects', () => {
|
||||
const obj1 = { a: 1, b: { c: 2 } };
|
||||
const obj2 = { a: 1, b: { c: 2 } };
|
||||
assert.strictEqual(deepEqual(obj1, obj2), true);
|
||||
});
|
||||
it('should return true for equal objects', () => {
|
||||
const obj1 = { a: 1, b: { c: 2 } };
|
||||
const obj2 = { a: 1, b: { c: 2 } };
|
||||
assert.strictEqual(deepEqual(obj1, obj2), true);
|
||||
});
|
||||
|
||||
it('should return false for different objects', () => {
|
||||
const obj1 = { a: 1, b: { c: 2 } };
|
||||
const obj2 = { a: 1, b: { c: 3 } };
|
||||
assert.strictEqual(deepEqual(obj1, obj2), false);
|
||||
});
|
||||
it('should return false for different objects', () => {
|
||||
const obj1 = { a: 1, b: { c: 2 } };
|
||||
const obj2 = { a: 1, b: { c: 3 } };
|
||||
assert.strictEqual(deepEqual(obj1, obj2), false);
|
||||
});
|
||||
|
||||
it('should return true for equal arrays', () => {
|
||||
const arr1 = [1, 2, { a: 3 }];
|
||||
const arr2 = [1, 2, { a: 3 }];
|
||||
assert.strictEqual(deepEqual(arr1, arr2), true);
|
||||
});
|
||||
it('should return true for equal arrays', () => {
|
||||
const arr1 = [1, 2, { a: 3 }];
|
||||
const arr2 = [1, 2, { a: 3 }];
|
||||
assert.strictEqual(deepEqual(arr1, arr2), true);
|
||||
});
|
||||
|
||||
it('should return false for different arrays', () => {
|
||||
const arr1 = [1, 2, { a: 3 }];
|
||||
const arr2 = [1, 2, { a: 4 }];
|
||||
assert.strictEqual(deepEqual(arr1, arr2), false);
|
||||
});
|
||||
it('should return false for different arrays', () => {
|
||||
const arr1 = [1, 2, { a: 3 }];
|
||||
const arr2 = [1, 2, { a: 4 }];
|
||||
assert.strictEqual(deepEqual(arr1, arr2), false);
|
||||
});
|
||||
|
||||
it('should return true for equal primitives', () => {
|
||||
assert.strictEqual(deepEqual(1, 1), true);
|
||||
assert.strictEqual(deepEqual('hello', 'hello'), true);
|
||||
assert.strictEqual(deepEqual(null, null), true);
|
||||
assert.strictEqual(deepEqual(undefined, undefined), true);
|
||||
});
|
||||
it('should return true for equal primitives', () => {
|
||||
assert.strictEqual(deepEqual(1, 1), true);
|
||||
assert.strictEqual(deepEqual('hello', 'hello'), true);
|
||||
assert.strictEqual(deepEqual(null, null), true);
|
||||
assert.strictEqual(deepEqual(undefined, undefined), true);
|
||||
});
|
||||
|
||||
it('should return false for different primitives', () => {
|
||||
assert.strictEqual(deepEqual(1, 2), false);
|
||||
assert.strictEqual(deepEqual('hello', 'world'), false);
|
||||
assert.strictEqual(deepEqual(null, undefined), false);
|
||||
});
|
||||
});
|
||||
it('should return false for different primitives', () => {
|
||||
assert.strictEqual(deepEqual(1, 2), false);
|
||||
assert.strictEqual(deepEqual('hello', 'world'), false);
|
||||
assert.strictEqual(deepEqual(null, undefined), false);
|
||||
});
|
||||
});
|
||||
|
||||
30
src/utils.ts
30
src/utils.ts
@@ -22,9 +22,9 @@ export class ExportHelper {
|
||||
return resolve(ExportHelper.schemaDir, 'hash.txt');
|
||||
}
|
||||
|
||||
static utcTS(isoTimestamp: string = new Date().toISOString()) {
|
||||
return isoTimestamp.replace("T", " ").replace(/\.\d*Z/, "");
|
||||
}
|
||||
static utcTS(isoTimestamp: string = new Date().toISOString()) {
|
||||
return isoTimestamp.replace('T', ' ').replace(/\.\d*Z/, '');
|
||||
}
|
||||
|
||||
static async updateExportMeta(currentHash = '') {
|
||||
const hasher = createHash('sha256');
|
||||
@@ -41,12 +41,12 @@ export class ExportHelper {
|
||||
if (hash === currentHash) return false;
|
||||
|
||||
const ts = ExportHelper.utcTS();
|
||||
const txt = hash + '@' + ts
|
||||
|
||||
const txt = hash + '@' + ts;
|
||||
|
||||
await writeFile(this.hashFile, txt);
|
||||
return {
|
||||
hash,
|
||||
ts
|
||||
ts,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -67,7 +67,7 @@ export class ExportHelper {
|
||||
if (hash && ts && new Date(ts).toString() !== 'Invalid Date') {
|
||||
return {
|
||||
hash,
|
||||
ts
|
||||
ts,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -85,7 +85,7 @@ export function deepEqual(obj1: any, obj2: any): boolean {
|
||||
|
||||
const keys1 = Object.keys(obj1);
|
||||
const keys2 = Object.keys(obj2);
|
||||
|
||||
|
||||
if (keys1.length !== keys2.length) return false;
|
||||
|
||||
for (let key of keys1) {
|
||||
@@ -100,7 +100,7 @@ export function getDiff(newObj: Record<any, any>, oldObj: any) {
|
||||
if (!oldObj) return newObj;
|
||||
|
||||
const result: Record<any, any> = {};
|
||||
let isDifferent = false
|
||||
let isDifferent = false;
|
||||
Object.keys(newObj).forEach(key => {
|
||||
if (!deepEqual(newObj[key], oldObj[key])) {
|
||||
result[key] = newObj[key];
|
||||
@@ -114,16 +114,18 @@ export function sortObject<T extends Record<string, any>>(obj: T): T;
|
||||
export function sortObject<T>(obj: T[]): T[];
|
||||
export function sortObject<T extends Record<string, any> | T[]>(obj: T): T {
|
||||
if (typeof obj !== 'object' || obj === null) {
|
||||
return obj;
|
||||
return obj;
|
||||
}
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
return obj.map(sortObject) as unknown as T;
|
||||
return obj.map(sortObject) as unknown as T;
|
||||
}
|
||||
|
||||
const sortedObj: Record<string, any> = {};
|
||||
Object.keys(obj).sort().forEach(key => {
|
||||
Object.keys(obj)
|
||||
.sort()
|
||||
.forEach(key => {
|
||||
sortedObj[key] = sortObject((obj as Record<string, any>)[key]);
|
||||
});
|
||||
});
|
||||
return sortedObj as T;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user