mirror of
https://github.com/xfarrow/blink
synced 2025-06-27 09:03:02 +02:00
Change endpoint from persons to people
This commit is contained in:
36
backend/apis/nodejs/node_modules/knex/lib/migrations/common/MigrationsLoader.js
generated
vendored
Normal file
36
backend/apis/nodejs/node_modules/knex/lib/migrations/common/MigrationsLoader.js
generated
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
const path = require('path');
|
||||
const DEFAULT_LOAD_EXTENSIONS = Object.freeze([
|
||||
'.co',
|
||||
'.coffee',
|
||||
'.eg',
|
||||
'.iced',
|
||||
'.js',
|
||||
'.cjs',
|
||||
'.litcoffee',
|
||||
'.ls',
|
||||
'.ts',
|
||||
]);
|
||||
|
||||
class AbstractMigrationsLoader {
|
||||
constructor(migrationDirectories, sortDirsSeparately, loadExtensions) {
|
||||
this.sortDirsSeparately = sortDirsSeparately;
|
||||
|
||||
if (!Array.isArray(migrationDirectories)) {
|
||||
migrationDirectories = [migrationDirectories];
|
||||
}
|
||||
this.migrationsPaths = migrationDirectories;
|
||||
this.loadExtensions = loadExtensions || DEFAULT_LOAD_EXTENSIONS;
|
||||
}
|
||||
|
||||
getFile(migrationsInfo) {
|
||||
const absoluteDir = path.resolve(process.cwd(), migrationsInfo.directory);
|
||||
const _path = path.join(absoluteDir, migrationsInfo.file);
|
||||
const importFile = require('../util/import-file'); // late import
|
||||
return importFile(_path);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
DEFAULT_LOAD_EXTENSIONS,
|
||||
AbstractMigrationsLoader,
|
||||
};
|
84
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/MigrationGenerator.js
generated
vendored
Normal file
84
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/MigrationGenerator.js
generated
vendored
Normal file
@ -0,0 +1,84 @@
|
||||
const path = require('path');
|
||||
const { writeJsFileUsingTemplate } = require('../util/template');
|
||||
const { getMergedConfig } = require('./migrator-configuration-merger');
|
||||
const { ensureDirectoryExists } = require('../util/fs');
|
||||
const { yyyymmddhhmmss } = require('../util/timestamp');
|
||||
|
||||
class MigrationGenerator {
|
||||
constructor(migrationConfig, logger) {
|
||||
this.config = getMergedConfig(migrationConfig, undefined, logger);
|
||||
}
|
||||
|
||||
// Creates a new migration, with a given name.
|
||||
async make(name, config, logger) {
|
||||
this.config = getMergedConfig(config, this.config, logger);
|
||||
if (!name) {
|
||||
return Promise.reject(
|
||||
new Error('A name must be specified for the generated migration')
|
||||
);
|
||||
}
|
||||
await this._ensureFolder();
|
||||
const createdMigrationFilePath = await this._writeNewMigration(name);
|
||||
return createdMigrationFilePath;
|
||||
}
|
||||
|
||||
// Ensures a folder for the migrations exist, dependent on the migration
|
||||
// config settings.
|
||||
_ensureFolder() {
|
||||
const dirs = this._absoluteConfigDirs();
|
||||
|
||||
const promises = dirs.map(ensureDirectoryExists);
|
||||
|
||||
return Promise.all(promises);
|
||||
}
|
||||
|
||||
_getStubPath() {
|
||||
return (
|
||||
this.config.stub ||
|
||||
path.join(__dirname, 'stub', this.config.extension + '.stub')
|
||||
);
|
||||
}
|
||||
|
||||
_getNewMigrationName(name) {
|
||||
if (name[0] === '-') name = name.slice(1);
|
||||
return (
|
||||
yyyymmddhhmmss() + '_' + name + '.' + this.config.extension.split('-')[0]
|
||||
);
|
||||
}
|
||||
|
||||
_getNewMigrationPath(name) {
|
||||
const fileName = this._getNewMigrationName(name);
|
||||
const dirs = this._absoluteConfigDirs();
|
||||
const dir = dirs.slice(-1)[0]; // Get last specified directory
|
||||
return path.join(dir, fileName);
|
||||
}
|
||||
|
||||
// Write a new migration to disk, using the config and generated filename,
|
||||
// passing any `variables` given in the config to the template.
|
||||
async _writeNewMigration(name) {
|
||||
const migrationPath = this._getNewMigrationPath(name);
|
||||
await writeJsFileUsingTemplate(
|
||||
migrationPath,
|
||||
this._getStubPath(),
|
||||
{ variable: 'd' },
|
||||
this.config.variables || {}
|
||||
);
|
||||
return migrationPath;
|
||||
}
|
||||
|
||||
_absoluteConfigDirs() {
|
||||
const directories = Array.isArray(this.config.directory)
|
||||
? this.config.directory
|
||||
: [this.config.directory];
|
||||
return directories.map((directory) => {
|
||||
if (!directory) {
|
||||
console.warn(
|
||||
'Failed to resolve config file, knex cannot determine where to generate migrations'
|
||||
);
|
||||
}
|
||||
return path.resolve(process.cwd(), directory);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MigrationGenerator;
|
599
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/Migrator.js
generated
vendored
Normal file
599
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/Migrator.js
generated
vendored
Normal file
@ -0,0 +1,599 @@
|
||||
// Migrator
|
||||
// -------
|
||||
const differenceWith = require('lodash/differenceWith');
|
||||
const get = require('lodash/get');
|
||||
const isEmpty = require('lodash/isEmpty');
|
||||
const max = require('lodash/max');
|
||||
const {
|
||||
getLockTableName,
|
||||
getTable,
|
||||
getTableName,
|
||||
} = require('./table-resolver');
|
||||
const { getSchemaBuilder } = require('./table-creator');
|
||||
const migrationListResolver = require('./migration-list-resolver');
|
||||
const MigrationGenerator = require('./MigrationGenerator');
|
||||
const { getMergedConfig } = require('./migrator-configuration-merger');
|
||||
const { isBoolean, isFunction } = require('../../util/is');
|
||||
|
||||
class LockError extends Error {
|
||||
constructor(msg) {
|
||||
super(msg);
|
||||
this.name = 'MigrationLocked';
|
||||
}
|
||||
}
|
||||
|
||||
// The new migration we're performing, typically called from the `knex.migrate`
|
||||
// interface on the main `knex` object. Passes the `knex` instance performing
|
||||
// the migration.
|
||||
class Migrator {
|
||||
constructor(knex) {
|
||||
// Clone knex instance and remove post-processing that is unnecessary for internal queries from a cloned config
|
||||
if (isFunction(knex)) {
|
||||
if (!knex.isTransaction) {
|
||||
this.knex = knex.withUserParams({
|
||||
...knex.userParams,
|
||||
});
|
||||
} else {
|
||||
this.knex = knex;
|
||||
}
|
||||
} else {
|
||||
this.knex = Object.assign({}, knex);
|
||||
this.knex.userParams = this.knex.userParams || {};
|
||||
}
|
||||
|
||||
this.config = getMergedConfig(
|
||||
this.knex.client.config.migrations,
|
||||
undefined,
|
||||
this.knex.client.logger
|
||||
);
|
||||
this.generator = new MigrationGenerator(
|
||||
this.knex.client.config.migrations,
|
||||
this.knex.client.logger
|
||||
);
|
||||
this._activeMigration = {
|
||||
fileName: null,
|
||||
};
|
||||
}
|
||||
|
||||
// Migrators to the latest configuration.
|
||||
async latest(config) {
|
||||
this._disableProcessing();
|
||||
this.config = getMergedConfig(config, this.config, this.knex.client.logger);
|
||||
|
||||
const allAndCompleted = await migrationListResolver.listAllAndCompleted(
|
||||
this.config,
|
||||
this.knex
|
||||
);
|
||||
|
||||
if (!this.config.disableMigrationsListValidation) {
|
||||
validateMigrationList(this.config.migrationSource, allAndCompleted);
|
||||
}
|
||||
|
||||
const [all, completed] = allAndCompleted;
|
||||
|
||||
const migrations = getNewMigrations(
|
||||
this.config.migrationSource,
|
||||
all,
|
||||
completed
|
||||
);
|
||||
|
||||
const transactionForAll =
|
||||
!this.config.disableTransactions &&
|
||||
!(
|
||||
await Promise.all(
|
||||
migrations.map(async (migration) => {
|
||||
const migrationContents =
|
||||
await this.config.migrationSource.getMigration(migration);
|
||||
return !this._useTransaction(migrationContents);
|
||||
})
|
||||
)
|
||||
).some((isTransactionUsed) => isTransactionUsed);
|
||||
|
||||
if (transactionForAll) {
|
||||
return this.knex.transaction((trx) => {
|
||||
return this._runBatch(migrations, 'up', trx);
|
||||
});
|
||||
} else {
|
||||
return this._runBatch(migrations, 'up');
|
||||
}
|
||||
}
|
||||
|
||||
// Runs the next migration that has not yet been run
|
||||
async up(config) {
|
||||
this._disableProcessing();
|
||||
this.config = getMergedConfig(config, this.config, this.knex.client.logger);
|
||||
|
||||
const allAndCompleted = await migrationListResolver.listAllAndCompleted(
|
||||
this.config,
|
||||
this.knex
|
||||
);
|
||||
|
||||
if (!this.config.disableMigrationsListValidation) {
|
||||
validateMigrationList(this.config.migrationSource, allAndCompleted);
|
||||
}
|
||||
const [all, completed] = allAndCompleted;
|
||||
|
||||
const newMigrations = getNewMigrations(
|
||||
this.config.migrationSource,
|
||||
all,
|
||||
completed
|
||||
);
|
||||
|
||||
let migrationToRun;
|
||||
const name = this.config.name;
|
||||
if (name) {
|
||||
if (!completed.includes(name)) {
|
||||
migrationToRun = newMigrations.find((migration) => {
|
||||
return (
|
||||
this.config.migrationSource.getMigrationName(migration) === name
|
||||
);
|
||||
});
|
||||
if (!migrationToRun) {
|
||||
throw new Error(`Migration "${name}" not found.`);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
migrationToRun = newMigrations[0];
|
||||
}
|
||||
|
||||
const useTransaction =
|
||||
!migrationToRun ||
|
||||
this._useTransaction(
|
||||
await this.config.migrationSource.getMigration(migrationToRun)
|
||||
);
|
||||
|
||||
const migrationsToRun = [];
|
||||
if (migrationToRun) {
|
||||
migrationsToRun.push(migrationToRun);
|
||||
}
|
||||
|
||||
const transactionForAll =
|
||||
!this.config.disableTransactions && (!migrationToRun || useTransaction);
|
||||
|
||||
if (transactionForAll) {
|
||||
return await this.knex.transaction((trx) => {
|
||||
return this._runBatch(migrationsToRun, 'up', trx);
|
||||
});
|
||||
} else {
|
||||
return await this._runBatch(migrationsToRun, 'up');
|
||||
}
|
||||
}
|
||||
|
||||
// Rollback the last "batch", or all, of migrations that were run.
|
||||
rollback(config, all = false) {
|
||||
this._disableProcessing();
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
this.config = getMergedConfig(
|
||||
config,
|
||||
this.config,
|
||||
this.knex.client.logger
|
||||
);
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
}
|
||||
migrationListResolver
|
||||
.listAllAndCompleted(this.config, this.knex)
|
||||
.then((value) => {
|
||||
if (!this.config.disableMigrationsListValidation) {
|
||||
validateMigrationList(this.config.migrationSource, value);
|
||||
}
|
||||
return value;
|
||||
})
|
||||
.then((val) => {
|
||||
const [allMigrations, completedMigrations] = val;
|
||||
|
||||
return all
|
||||
? allMigrations
|
||||
.filter((migration) => {
|
||||
return completedMigrations
|
||||
.map((migration) => migration.name)
|
||||
.includes(
|
||||
this.config.migrationSource.getMigrationName(migration)
|
||||
);
|
||||
})
|
||||
.reverse()
|
||||
: this._getLastBatch(val);
|
||||
})
|
||||
.then((migrations) => {
|
||||
return this._runBatch(migrations, 'down');
|
||||
})
|
||||
.then(resolve, reject);
|
||||
});
|
||||
}
|
||||
|
||||
down(config) {
|
||||
this._disableProcessing();
|
||||
this.config = getMergedConfig(config, this.config, this.knex.client.logger);
|
||||
|
||||
return migrationListResolver
|
||||
.listAllAndCompleted(this.config, this.knex)
|
||||
.then((value) => {
|
||||
if (!this.config.disableMigrationsListValidation) {
|
||||
validateMigrationList(this.config.migrationSource, value);
|
||||
}
|
||||
return value;
|
||||
})
|
||||
.then(([all, completed]) => {
|
||||
const completedMigrations = all.filter((migration) => {
|
||||
return completed
|
||||
.map((migration) => migration.name)
|
||||
.includes(this.config.migrationSource.getMigrationName(migration));
|
||||
});
|
||||
|
||||
let migrationToRun;
|
||||
const name = this.config.name;
|
||||
if (name) {
|
||||
migrationToRun = completedMigrations.find((migration) => {
|
||||
return (
|
||||
this.config.migrationSource.getMigrationName(migration) === name
|
||||
);
|
||||
});
|
||||
if (!migrationToRun) {
|
||||
throw new Error(`Migration "${name}" was not run.`);
|
||||
}
|
||||
} else {
|
||||
migrationToRun = completedMigrations[completedMigrations.length - 1];
|
||||
}
|
||||
|
||||
const migrationsToRun = [];
|
||||
if (migrationToRun) {
|
||||
migrationsToRun.push(migrationToRun);
|
||||
}
|
||||
|
||||
return this._runBatch(migrationsToRun, 'down');
|
||||
});
|
||||
}
|
||||
|
||||
status(config) {
|
||||
this._disableProcessing();
|
||||
this.config = getMergedConfig(config, this.config, this.knex.client.logger);
|
||||
|
||||
return Promise.all([
|
||||
getTable(this.knex, this.config.tableName, this.config.schemaName).select(
|
||||
'*'
|
||||
),
|
||||
migrationListResolver.listAll(this.config.migrationSource),
|
||||
]).then(([db, code]) => db.length - code.length);
|
||||
}
|
||||
|
||||
// Retrieves and returns the current migration version we're on, as a promise.
|
||||
// If no migrations have been run yet, return "none".
|
||||
currentVersion(config) {
|
||||
this._disableProcessing();
|
||||
this.config = getMergedConfig(config, this.config, this.knex.client.logger);
|
||||
|
||||
return migrationListResolver
|
||||
.listCompleted(this.config.tableName, this.config.schemaName, this.knex)
|
||||
.then((completed) => {
|
||||
const val = max(completed.map((value) => value.name.split('_')[0]));
|
||||
return val === undefined ? 'none' : val;
|
||||
});
|
||||
}
|
||||
|
||||
// list all migrations
|
||||
async list(config) {
|
||||
this._disableProcessing();
|
||||
this.config = getMergedConfig(config, this.config, this.knex.client.logger);
|
||||
|
||||
const [all, completed] = await migrationListResolver.listAllAndCompleted(
|
||||
this.config,
|
||||
this.knex
|
||||
);
|
||||
|
||||
if (!this.config.disableMigrationsListValidation) {
|
||||
validateMigrationList(this.config.migrationSource, [all, completed]);
|
||||
}
|
||||
|
||||
const newMigrations = getNewMigrations(
|
||||
this.config.migrationSource,
|
||||
all,
|
||||
completed
|
||||
);
|
||||
return [completed, newMigrations];
|
||||
}
|
||||
|
||||
async forceFreeMigrationsLock(config) {
|
||||
this._disableProcessing();
|
||||
this.config = getMergedConfig(config, this.config, this.knex.client.logger);
|
||||
const { schemaName, tableName } = this.config;
|
||||
const lockTableName = getLockTableName(tableName);
|
||||
const { knex } = this;
|
||||
const getLockTable = () => getTable(knex, lockTableName, schemaName);
|
||||
const tableExists = await getSchemaBuilder(knex, schemaName).hasTable(
|
||||
lockTableName
|
||||
);
|
||||
if (tableExists) {
|
||||
await getLockTable().del();
|
||||
await getLockTable().insert({
|
||||
is_locked: 0,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Creates a new migration, with a given name.
|
||||
make(name, config) {
|
||||
return this.generator.make(name, config, this.knex.client.logger);
|
||||
}
|
||||
|
||||
_disableProcessing() {
|
||||
if (this.knex.disableProcessing) {
|
||||
this.knex.disableProcessing();
|
||||
}
|
||||
}
|
||||
|
||||
_lockMigrations(trx) {
|
||||
const tableName = getLockTableName(this.config.tableName);
|
||||
return getTable(this.knex, tableName, this.config.schemaName)
|
||||
.transacting(trx)
|
||||
.where('is_locked', '=', 0)
|
||||
.update({ is_locked: 1 })
|
||||
.then((rowCount) => {
|
||||
if (rowCount !== 1) {
|
||||
throw new Error('Migration table is already locked');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_getLock(trx) {
|
||||
const transact = trx ? (fn) => fn(trx) : (fn) => this.knex.transaction(fn);
|
||||
return transact((trx) => {
|
||||
return this._lockMigrations(trx);
|
||||
}).catch((err) => {
|
||||
throw new LockError(err.message);
|
||||
});
|
||||
}
|
||||
|
||||
_freeLock(trx = this.knex) {
|
||||
const tableName = getLockTableName(this.config.tableName);
|
||||
return getTable(trx, tableName, this.config.schemaName).update({
|
||||
is_locked: 0,
|
||||
});
|
||||
}
|
||||
|
||||
// Run a batch of current migrations, in sequence.
|
||||
async _runBatch(migrations, direction, trx) {
|
||||
const canGetLockInTransaction =
|
||||
this.knex.client.driverName !== 'cockroachdb';
|
||||
try {
|
||||
await this._getLock(canGetLockInTransaction ? trx : undefined);
|
||||
// When there is a wrapping transaction, some migrations
|
||||
// could have been done while waiting for the lock:
|
||||
const completed = trx
|
||||
? await migrationListResolver.listCompleted(
|
||||
this.config.tableName,
|
||||
this.config.schemaName,
|
||||
trx
|
||||
)
|
||||
: [];
|
||||
|
||||
migrations = getNewMigrations(
|
||||
this.config.migrationSource,
|
||||
migrations,
|
||||
completed
|
||||
);
|
||||
|
||||
await Promise.all(
|
||||
migrations.map(this._validateMigrationStructure.bind(this))
|
||||
);
|
||||
|
||||
let batchNo = await this._latestBatchNumber(trx);
|
||||
if (direction === 'up') batchNo++;
|
||||
const res = await this._waterfallBatch(
|
||||
batchNo,
|
||||
migrations,
|
||||
direction,
|
||||
trx
|
||||
);
|
||||
await this._freeLock(canGetLockInTransaction ? trx : undefined);
|
||||
return res;
|
||||
} catch (error) {
|
||||
let cleanupReady = Promise.resolve();
|
||||
|
||||
if (error instanceof LockError) {
|
||||
// If locking error do not free the lock.
|
||||
this.knex.client.logger.warn(
|
||||
`Can't take lock to run migrations: ${error.message}`
|
||||
);
|
||||
this.knex.client.logger.warn(
|
||||
'If you are sure migrations are not running you can release the ' +
|
||||
"lock manually by running 'knex migrate:unlock'"
|
||||
);
|
||||
} else {
|
||||
if (this._activeMigration.fileName) {
|
||||
this.knex.client.logger.warn(
|
||||
`migration file "${this._activeMigration.fileName}" failed`
|
||||
);
|
||||
}
|
||||
this.knex.client.logger.warn(
|
||||
`migration failed with error: ${error.message}`
|
||||
);
|
||||
// If the error was not due to a locking issue, then remove the lock.
|
||||
cleanupReady = this._freeLock(
|
||||
canGetLockInTransaction ? trx : undefined
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
await cleanupReady;
|
||||
// eslint-disable-next-line no-empty
|
||||
} catch (e) {}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Validates some migrations by requiring and checking for an `up` and `down`
|
||||
// function.
|
||||
async _validateMigrationStructure(migration) {
|
||||
const migrationName =
|
||||
this.config.migrationSource.getMigrationName(migration);
|
||||
// maybe promise
|
||||
const migrationContent = await this.config.migrationSource.getMigration(
|
||||
migration
|
||||
);
|
||||
if (
|
||||
typeof migrationContent.up !== 'function' ||
|
||||
typeof migrationContent.down !== 'function'
|
||||
) {
|
||||
throw new Error(
|
||||
`Invalid migration: ${migrationName} must have both an up and down function`
|
||||
);
|
||||
}
|
||||
|
||||
return migration;
|
||||
}
|
||||
|
||||
// Get the last batch of migrations, by name, ordered by insert id in reverse
|
||||
// order.
|
||||
async _getLastBatch([allMigrations]) {
|
||||
const { tableName, schemaName } = this.config;
|
||||
const migrationNames = await getTable(this.knex, tableName, schemaName)
|
||||
.where('batch', function (qb) {
|
||||
qb.max('batch').from(getTableName(tableName, schemaName));
|
||||
})
|
||||
.orderBy('id', 'desc');
|
||||
|
||||
const lastBatchMigrations = migrationNames.map((migration) => {
|
||||
return allMigrations.find((entry) => {
|
||||
return (
|
||||
this.config.migrationSource.getMigrationName(entry) === migration.name
|
||||
);
|
||||
});
|
||||
});
|
||||
return Promise.all(lastBatchMigrations);
|
||||
}
|
||||
|
||||
// Returns the latest batch number.
|
||||
_latestBatchNumber(trx = this.knex) {
|
||||
return trx
|
||||
.from(getTableName(this.config.tableName, this.config.schemaName))
|
||||
.max('batch as max_batch')
|
||||
.then((obj) => obj[0].max_batch || 0);
|
||||
}
|
||||
|
||||
// If transaction config for a single migration is defined, use that.
|
||||
// Otherwise, rely on the common config. This allows enabling/disabling
|
||||
// transaction for a single migration at will, regardless of the common
|
||||
// config.
|
||||
_useTransaction(migrationContent, allTransactionsDisabled) {
|
||||
const singleTransactionValue = get(migrationContent, 'config.transaction');
|
||||
|
||||
return isBoolean(singleTransactionValue)
|
||||
? singleTransactionValue
|
||||
: !allTransactionsDisabled;
|
||||
}
|
||||
|
||||
// Runs a batch of `migrations` in a specified `direction`, saving the
|
||||
// appropriate database information as the migrations are run.
|
||||
_waterfallBatch(batchNo, migrations, direction, trx) {
|
||||
const trxOrKnex = trx || this.knex;
|
||||
const { tableName, schemaName, disableTransactions } = this.config;
|
||||
let current = Promise.resolve();
|
||||
const log = [];
|
||||
migrations.forEach((migration) => {
|
||||
const name = this.config.migrationSource.getMigrationName(migration);
|
||||
this._activeMigration.fileName = name;
|
||||
const migrationContent =
|
||||
this.config.migrationSource.getMigration(migration);
|
||||
|
||||
// We're going to run each of the migrations in the current "up".
|
||||
current = current
|
||||
.then(async () => await migrationContent) //maybe promise
|
||||
.then((migrationContent) => {
|
||||
this._activeMigration.fileName = name;
|
||||
if (
|
||||
!trx &&
|
||||
this._useTransaction(migrationContent, disableTransactions)
|
||||
) {
|
||||
this.knex.enableProcessing();
|
||||
return this._transaction(
|
||||
this.knex,
|
||||
migrationContent,
|
||||
direction,
|
||||
name
|
||||
);
|
||||
}
|
||||
|
||||
trxOrKnex.enableProcessing();
|
||||
return checkPromise(
|
||||
this.knex.client.logger,
|
||||
migrationContent[direction](trxOrKnex),
|
||||
name
|
||||
);
|
||||
})
|
||||
.then(() => {
|
||||
trxOrKnex.disableProcessing();
|
||||
this.knex.disableProcessing();
|
||||
log.push(name);
|
||||
if (direction === 'up') {
|
||||
return trxOrKnex.into(getTableName(tableName, schemaName)).insert({
|
||||
name,
|
||||
batch: batchNo,
|
||||
migration_time: new Date(),
|
||||
});
|
||||
}
|
||||
if (direction === 'down') {
|
||||
return trxOrKnex
|
||||
.from(getTableName(tableName, schemaName))
|
||||
.where({ name })
|
||||
.del();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return current.then(() => [batchNo, log]);
|
||||
}
|
||||
|
||||
_transaction(knex, migrationContent, direction, name) {
|
||||
return knex.transaction((trx) => {
|
||||
return checkPromise(
|
||||
knex.client.logger,
|
||||
migrationContent[direction](trx),
|
||||
name,
|
||||
() => {
|
||||
trx.commit();
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Validates that migrations are present in the appropriate directories.
|
||||
function validateMigrationList(migrationSource, migrations) {
|
||||
const [all, completed] = migrations;
|
||||
const diff = getMissingMigrations(migrationSource, completed, all);
|
||||
if (!isEmpty(diff)) {
|
||||
const names = diff.map((d) => d.name);
|
||||
throw new Error(
|
||||
`The migration directory is corrupt, the following files are missing: ${names.join(
|
||||
', '
|
||||
)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function getMissingMigrations(migrationSource, completed, all) {
|
||||
return differenceWith(completed, all, (c, a) => {
|
||||
return c.name === migrationSource.getMigrationName(a);
|
||||
});
|
||||
}
|
||||
|
||||
function getNewMigrations(migrationSource, all, completed) {
|
||||
return differenceWith(all, completed, (a, c) => {
|
||||
return c.name === migrationSource.getMigrationName(a);
|
||||
});
|
||||
}
|
||||
|
||||
function checkPromise(logger, migrationPromise, name, commitFn) {
|
||||
if (!migrationPromise || typeof migrationPromise.then !== 'function') {
|
||||
logger.warn(`migration ${name} did not return a promise`);
|
||||
if (commitFn) {
|
||||
commitFn();
|
||||
}
|
||||
}
|
||||
return migrationPromise;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Migrator,
|
||||
};
|
17
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/migrate-stub.js
generated
vendored
Normal file
17
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/migrate-stub.js
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
// Stub Migrate:
|
||||
// Used for now in browser builds, where filesystem access isn't
|
||||
// available.
|
||||
const StubMigrate = (module.exports = function () {});
|
||||
|
||||
const noSuchMethod = async function () {
|
||||
throw new Error('Migrations are not supported');
|
||||
};
|
||||
|
||||
StubMigrate.prototype = {
|
||||
make: noSuchMethod,
|
||||
latest: noSuchMethod,
|
||||
rollback: noSuchMethod,
|
||||
currentVersion: noSuchMethod,
|
||||
up: noSuchMethod,
|
||||
down: noSuchMethod,
|
||||
};
|
33
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/migration-list-resolver.js
generated
vendored
Normal file
33
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/migration-list-resolver.js
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
const { getTableName } = require('./table-resolver');
|
||||
const { ensureTable } = require('./table-creator');
|
||||
|
||||
// Lists all available migration versions, as a sorted array.
|
||||
function listAll(migrationSource, loadExtensions) {
|
||||
return migrationSource.getMigrations(loadExtensions);
|
||||
}
|
||||
|
||||
// Lists all migrations that have been completed for the current db, as an
|
||||
// array.
|
||||
async function listCompleted(tableName, schemaName, trxOrKnex) {
|
||||
await ensureTable(tableName, schemaName, trxOrKnex);
|
||||
|
||||
return await trxOrKnex
|
||||
.from(getTableName(tableName, schemaName))
|
||||
.orderBy('id')
|
||||
.select('name');
|
||||
}
|
||||
|
||||
// Gets the migration list from the migration directory specified in config, as well as
|
||||
// the list of completed migrations to check what should be run.
|
||||
function listAllAndCompleted(config, trxOrKnex) {
|
||||
return Promise.all([
|
||||
listAll(config.migrationSource, config.loadExtensions),
|
||||
listCompleted(config.tableName, config.schemaName, trxOrKnex),
|
||||
]);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
listAll,
|
||||
listAllAndCompleted,
|
||||
listCompleted,
|
||||
};
|
58
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/migrator-configuration-merger.js
generated
vendored
Normal file
58
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/migrator-configuration-merger.js
generated
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
const { FsMigrations } = require('./sources/fs-migrations');
|
||||
const Logger = require('../../logger');
|
||||
const { DEFAULT_LOAD_EXTENSIONS } = require('../common/MigrationsLoader');
|
||||
const defaultLogger = new Logger();
|
||||
|
||||
const CONFIG_DEFAULT = Object.freeze({
|
||||
extension: 'js',
|
||||
loadExtensions: DEFAULT_LOAD_EXTENSIONS,
|
||||
tableName: 'knex_migrations',
|
||||
schemaName: null,
|
||||
directory: './migrations',
|
||||
disableTransactions: false,
|
||||
disableMigrationsListValidation: false,
|
||||
sortDirsSeparately: false,
|
||||
});
|
||||
|
||||
function getMergedConfig(config, currentConfig, logger = defaultLogger) {
|
||||
// config is the user specified config, mergedConfig has defaults and current config
|
||||
// applied to it.
|
||||
const mergedConfig = Object.assign(
|
||||
{},
|
||||
CONFIG_DEFAULT,
|
||||
currentConfig || {},
|
||||
config
|
||||
);
|
||||
|
||||
if (
|
||||
config &&
|
||||
// If user specifies any FS related config,
|
||||
// clear specified migrationSource to avoid ambiguity
|
||||
(config.directory ||
|
||||
config.sortDirsSeparately !== undefined ||
|
||||
config.loadExtensions)
|
||||
) {
|
||||
if (config.migrationSource) {
|
||||
logger.warn(
|
||||
'FS-related option specified for migration configuration. This resets migrationSource to default FsMigrations'
|
||||
);
|
||||
}
|
||||
mergedConfig.migrationSource = null;
|
||||
}
|
||||
|
||||
// If the user has not specified any configs, we need to
|
||||
// default to fs migrations to maintain compatibility
|
||||
if (!mergedConfig.migrationSource) {
|
||||
mergedConfig.migrationSource = new FsMigrations(
|
||||
mergedConfig.directory,
|
||||
mergedConfig.sortDirsSeparately,
|
||||
mergedConfig.loadExtensions
|
||||
);
|
||||
}
|
||||
|
||||
return mergedConfig;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getMergedConfig,
|
||||
};
|
74
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/sources/fs-migrations.js
generated
vendored
Normal file
74
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/sources/fs-migrations.js
generated
vendored
Normal file
@ -0,0 +1,74 @@
|
||||
const path = require('path');
|
||||
const sortBy = require('lodash/sortBy');
|
||||
|
||||
const { readdir } = require('../../util/fs');
|
||||
const { AbstractMigrationsLoader } = require('../../common/MigrationsLoader');
|
||||
|
||||
class FsMigrations extends AbstractMigrationsLoader {
|
||||
/**
|
||||
* Gets the migration names
|
||||
* @returns Promise<string[]>
|
||||
*/
|
||||
getMigrations(loadExtensions) {
|
||||
// Get a list of files in all specified migration directories
|
||||
const readMigrationsPromises = this.migrationsPaths.map((configDir) => {
|
||||
const absoluteDir = path.resolve(process.cwd(), configDir);
|
||||
return readdir(absoluteDir).then((files) => ({
|
||||
files,
|
||||
configDir,
|
||||
absoluteDir,
|
||||
}));
|
||||
});
|
||||
|
||||
return Promise.all(readMigrationsPromises).then((allMigrations) => {
|
||||
const migrations = allMigrations.reduce((acc, migrationDirectory) => {
|
||||
// When true, files inside the folder should be sorted
|
||||
if (this.sortDirsSeparately) {
|
||||
migrationDirectory.files = migrationDirectory.files.sort();
|
||||
}
|
||||
|
||||
migrationDirectory.files.forEach((file) =>
|
||||
acc.push({ file, directory: migrationDirectory.configDir })
|
||||
);
|
||||
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
// If true we have already sorted the migrations inside the folders
|
||||
// return the migrations fully qualified
|
||||
if (this.sortDirsSeparately) {
|
||||
return filterMigrations(
|
||||
this,
|
||||
migrations,
|
||||
loadExtensions || this.loadExtensions
|
||||
);
|
||||
}
|
||||
|
||||
return filterMigrations(
|
||||
this,
|
||||
sortBy(migrations, 'file'),
|
||||
loadExtensions || this.loadExtensions
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
getMigrationName(migration) {
|
||||
return migration.file;
|
||||
}
|
||||
|
||||
getMigration(migrationInfo) {
|
||||
return this.getFile(migrationInfo);
|
||||
}
|
||||
}
|
||||
|
||||
function filterMigrations(migrationSource, migrations, loadExtensions) {
|
||||
return migrations.filter((migration) => {
|
||||
const migrationName = migrationSource.getMigrationName(migration);
|
||||
const extension = path.extname(migrationName);
|
||||
return loadExtensions.includes(extension);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
FsMigrations,
|
||||
};
|
15
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/cjs.stub
generated
vendored
Normal file
15
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/cjs.stub
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
|
||||
exports.up = function(knex) {
|
||||
<% if (d.tableName) { %>
|
||||
return knex.schema.createTable("<%= d.tableName %>", function(t) {
|
||||
t.increments();
|
||||
t.timestamp();
|
||||
});
|
||||
<% } %>
|
||||
};
|
||||
|
||||
exports.down = function(knex) {
|
||||
<% if (d.tableName) { %>
|
||||
return knex.schema.dropTable("<%= d.tableName %>");
|
||||
<% } %>
|
||||
};
|
13
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/coffee.stub
generated
vendored
Normal file
13
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/coffee.stub
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
|
||||
exports.up = (knex) ->
|
||||
<% if (d.tableName) { %>
|
||||
knex.schema.createTable "<%= d.tableName %>", (t) ->
|
||||
t.increments()
|
||||
t.timestamp()
|
||||
<% } %>
|
||||
|
||||
|
||||
exports.down = (knex) ->
|
||||
<% if (d.tableName) { %>
|
||||
knex.schema.dropTable "<%= d.tableName %>"
|
||||
<% } %>
|
14
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/eg.stub
generated
vendored
Normal file
14
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/eg.stub
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
provide: up, down
|
||||
|
||||
up = (knex) ->
|
||||
<% if (d.tableName) { %>
|
||||
knex.schema.createTable "<%= d.tableName %>": t ->
|
||||
t.increments()
|
||||
t.timestamp()
|
||||
<% } %>
|
||||
|
||||
|
||||
down = (knex) ->
|
||||
<% if (d.tableName) { %>
|
||||
knex.schema.dropTable("<%= d.tableName %>")
|
||||
<% } %>
|
22
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/js-schema.stub
generated
vendored
Normal file
22
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/js-schema.stub
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
/**
|
||||
* @param { import("knex").Knex } knex
|
||||
* @returns { Promise<void> }
|
||||
*/
|
||||
exports.up = function({schema}) {
|
||||
<% if (d.tableName) { %>
|
||||
return schema.createTable("<%= d.tableName %>", function(t) {
|
||||
t.increments();
|
||||
t.timestamp();
|
||||
});
|
||||
<% } %>
|
||||
};
|
||||
|
||||
/**
|
||||
* @param { import("knex").Knex } knex
|
||||
* @returns { Promise<void> }
|
||||
*/
|
||||
exports.down = function({schema}) {
|
||||
<% if (d.tableName) { %>
|
||||
return schema.dropTable("<%= d.tableName %>");
|
||||
<% } %>
|
||||
};
|
22
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/js.stub
generated
vendored
Normal file
22
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/js.stub
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
/**
|
||||
* @param { import("knex").Knex } knex
|
||||
* @returns { Promise<void> }
|
||||
*/
|
||||
exports.up = function(knex) {
|
||||
<% if (d.tableName) { %>
|
||||
return knex.schema.createTable("<%= d.tableName %>", function(t) {
|
||||
t.increments();
|
||||
t.timestamp();
|
||||
});
|
||||
<% } %>
|
||||
};
|
||||
|
||||
/**
|
||||
* @param { import("knex").Knex } knex
|
||||
* @returns { Promise<void> }
|
||||
*/
|
||||
exports.down = function(knex) {
|
||||
<% if (d.tableName) { %>
|
||||
return knex.schema.dropTable("<%= d.tableName %>");
|
||||
<% } %>
|
||||
};
|
34
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/knexfile-coffee.stub
generated
vendored
Normal file
34
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/knexfile-coffee.stub
generated
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
# Update with your config settings.
|
||||
|
||||
module.exports =
|
||||
|
||||
development:
|
||||
client: 'sqlite3'
|
||||
connection:
|
||||
filename: './dev.sqlite3'
|
||||
migrations:
|
||||
tableName: 'knex_migrations'
|
||||
|
||||
staging:
|
||||
client: 'postgresql'
|
||||
connection:
|
||||
database: 'my_db'
|
||||
user: 'username'
|
||||
password: 'password'
|
||||
pool:
|
||||
min: 2
|
||||
max: 10
|
||||
migrations:
|
||||
tableName: 'knex_migrations'
|
||||
|
||||
production:
|
||||
client: 'postgresql'
|
||||
connection:
|
||||
database: 'my_db'
|
||||
user: 'username'
|
||||
password: 'password'
|
||||
pool:
|
||||
min: 2
|
||||
max: 10
|
||||
migrations:
|
||||
tableName: 'knex_migrations'
|
43
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/knexfile-eg.stub
generated
vendored
Normal file
43
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/knexfile-eg.stub
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
;; Update with your config settings.
|
||||
|
||||
module.exports = {
|
||||
development = {
|
||||
client = 'sqlite3'
|
||||
connection = {
|
||||
filename = './dev.sqlite3'
|
||||
}
|
||||
migrations = {
|
||||
tableName = 'knex_migrations'
|
||||
}
|
||||
}
|
||||
staging = {
|
||||
client = 'postgresql'
|
||||
connection = {
|
||||
database = 'my_db'
|
||||
user = 'username'
|
||||
password = 'password'
|
||||
}
|
||||
pool = {
|
||||
min = 2
|
||||
max = 10
|
||||
}
|
||||
migrations = {
|
||||
tableName = 'knex_migrations'
|
||||
}
|
||||
}
|
||||
production = {
|
||||
client = 'postgresql'
|
||||
connection = {
|
||||
database = 'my_db'
|
||||
user = 'username'
|
||||
password = 'password'
|
||||
}
|
||||
pool = {
|
||||
min = 2
|
||||
max = 10
|
||||
}
|
||||
migrations = {
|
||||
tableName = 'knex_migrations'
|
||||
}
|
||||
}
|
||||
}
|
47
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/knexfile-js.stub
generated
vendored
Normal file
47
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/knexfile-js.stub
generated
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
// Update with your config settings.
|
||||
|
||||
/**
|
||||
* @type { Object.<string, import("knex").Knex.Config> }
|
||||
*/
|
||||
module.exports = {
|
||||
|
||||
development: {
|
||||
client: 'sqlite3',
|
||||
connection: {
|
||||
filename: './dev.sqlite3'
|
||||
}
|
||||
},
|
||||
|
||||
staging: {
|
||||
client: 'postgresql',
|
||||
connection: {
|
||||
database: 'my_db',
|
||||
user: 'username',
|
||||
password: 'password'
|
||||
},
|
||||
pool: {
|
||||
min: 2,
|
||||
max: 10
|
||||
},
|
||||
migrations: {
|
||||
tableName: 'knex_migrations'
|
||||
}
|
||||
},
|
||||
|
||||
production: {
|
||||
client: 'postgresql',
|
||||
connection: {
|
||||
database: 'my_db',
|
||||
user: 'username',
|
||||
password: 'password'
|
||||
},
|
||||
pool: {
|
||||
min: 2,
|
||||
max: 10
|
||||
},
|
||||
migrations: {
|
||||
tableName: 'knex_migrations'
|
||||
}
|
||||
}
|
||||
|
||||
};
|
35
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/knexfile-ls.stub
generated
vendored
Normal file
35
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/knexfile-ls.stub
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
# Update with your config settings.
|
||||
|
||||
module.exports =
|
||||
|
||||
development:
|
||||
client: 'sqlite3'
|
||||
connection:
|
||||
filename: './dev.sqlite3'
|
||||
migrations:
|
||||
tableName: 'knex_migrations'
|
||||
|
||||
staging:
|
||||
client: 'postgresql'
|
||||
connection:
|
||||
database: 'my_db'
|
||||
user: 'username'
|
||||
password: 'password'
|
||||
pool:
|
||||
min: 2
|
||||
max: 10
|
||||
migrations:
|
||||
tableName: 'knex_migrations'
|
||||
|
||||
production:
|
||||
client: 'postgresql'
|
||||
connection:
|
||||
database: 'my_db'
|
||||
user: 'username'
|
||||
password: 'password'
|
||||
pool:
|
||||
min: 2
|
||||
max: 10
|
||||
migrations:
|
||||
tableName: 'knex_migrations'
|
||||
|
47
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/knexfile-ts.stub
generated
vendored
Normal file
47
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/knexfile-ts.stub
generated
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
import type { Knex } from "knex";
|
||||
|
||||
// Update with your config settings.
|
||||
|
||||
const config: { [key: string]: Knex.Config } = {
|
||||
development: {
|
||||
client: "sqlite3",
|
||||
connection: {
|
||||
filename: "./dev.sqlite3"
|
||||
}
|
||||
},
|
||||
|
||||
staging: {
|
||||
client: "postgresql",
|
||||
connection: {
|
||||
database: "my_db",
|
||||
user: "username",
|
||||
password: "password"
|
||||
},
|
||||
pool: {
|
||||
min: 2,
|
||||
max: 10
|
||||
},
|
||||
migrations: {
|
||||
tableName: "knex_migrations"
|
||||
}
|
||||
},
|
||||
|
||||
production: {
|
||||
client: "postgresql",
|
||||
connection: {
|
||||
database: "my_db",
|
||||
user: "username",
|
||||
password: "password"
|
||||
},
|
||||
pool: {
|
||||
min: 2,
|
||||
max: 10
|
||||
},
|
||||
migrations: {
|
||||
tableName: "knex_migrations"
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
module.exports = config;
|
14
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/ls.stub
generated
vendored
Normal file
14
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/ls.stub
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
|
||||
exports.up = (knex, Promise) ->
|
||||
<% if (d.tableName) { %>
|
||||
knex.schema.create-table "<%= d.tableName %>", (t) ->
|
||||
t.increments!
|
||||
t.timestamp!
|
||||
<% } %>
|
||||
|
||||
|
||||
exports.down = (knex, Promise) ->
|
||||
<% if (d.tableName) { %>
|
||||
knex.schema.drop-table "<%= d.tableName %>"
|
||||
<% } %>
|
||||
|
23
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/mjs.stub
generated
vendored
Normal file
23
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/mjs.stub
generated
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
|
||||
/**
|
||||
* @param { import("knex").Knex } knex
|
||||
* @returns { Promise<void> }
|
||||
*/
|
||||
export const up = async (knex) => {
|
||||
<% if (d.tableName) { %>
|
||||
await knex.schema.createTable("<%= d.tableName %>", function(t) {
|
||||
t.increments();
|
||||
t.timestamp();
|
||||
});
|
||||
<% } %>
|
||||
};
|
||||
|
||||
/**
|
||||
* @param { import("knex").Knex } knex
|
||||
* @returns { Promise<void> }
|
||||
*/
|
||||
export const down = async (knex) => {
|
||||
<% if (d.tableName) { %>
|
||||
await knex.schema.dropTable("<%= d.tableName %>");
|
||||
<% } %>
|
||||
};
|
21
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/ts-schema.stub
generated
vendored
Normal file
21
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/ts-schema.stub
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
<% if (d.tableName) { %>
|
||||
export async function up({schema}: Knex): Promise<Knex.SchemaBuilder> {
|
||||
return schema.createTable("<%= d.tableName %>", (t) => {
|
||||
t.increments();
|
||||
t.timestamps();
|
||||
});
|
||||
}
|
||||
<% } else { %>
|
||||
export async function up({schema}: Knex): Promise<void> {
|
||||
}
|
||||
<% } %>
|
||||
<% if (d.tableName) { %>
|
||||
export async function down({schema}: Knex): Promise<Knex.SchemaBuilder> {
|
||||
return schema.dropTable("<%= d.tableName %>");
|
||||
}
|
||||
<% } else { %>
|
||||
export async function down({schema}: Knex): Promise<void> {
|
||||
}
|
||||
<% } %>
|
21
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/ts.stub
generated
vendored
Normal file
21
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/stub/ts.stub
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
import type { Knex } from "knex";
|
||||
|
||||
<% if (d.tableName) { %>
|
||||
export async function up(knex: Knex): Promise<Knex.SchemaBuilder> {
|
||||
return knex.schema.createTable("<%= d.tableName %>", (t) => {
|
||||
t.increments();
|
||||
t.timestamps();
|
||||
});
|
||||
}
|
||||
<% } else { %>
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
}
|
||||
<% } %>
|
||||
<% if (d.tableName) { %>
|
||||
export async function down(knex: Knex): Promise<Knex.SchemaBuilder> {
|
||||
return knex.schema.dropTable("<%= d.tableName %>");
|
||||
}
|
||||
<% } else { %>
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
}
|
||||
<% } %>
|
77
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/table-creator.js
generated
vendored
Normal file
77
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/table-creator.js
generated
vendored
Normal file
@ -0,0 +1,77 @@
|
||||
const {
|
||||
getTable,
|
||||
getLockTableName,
|
||||
getLockTableNameWithSchema,
|
||||
getTableName,
|
||||
} = require('./table-resolver');
|
||||
|
||||
function ensureTable(tableName, schemaName, trxOrKnex) {
|
||||
const lockTable = getLockTableName(tableName);
|
||||
return getSchemaBuilder(trxOrKnex, schemaName)
|
||||
.hasTable(tableName)
|
||||
.then((exists) => {
|
||||
return !exists && _createMigrationTable(tableName, schemaName, trxOrKnex);
|
||||
})
|
||||
.then(() => {
|
||||
return getSchemaBuilder(trxOrKnex, schemaName).hasTable(lockTable);
|
||||
})
|
||||
.then((exists) => {
|
||||
return (
|
||||
!exists && _createMigrationLockTable(lockTable, schemaName, trxOrKnex)
|
||||
);
|
||||
})
|
||||
.then(() => {
|
||||
return getTable(trxOrKnex, lockTable, schemaName).select('*');
|
||||
})
|
||||
.then((data) => {
|
||||
return (
|
||||
!data.length && _insertLockRowIfNeeded(tableName, schemaName, trxOrKnex)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function _createMigrationTable(tableName, schemaName, trxOrKnex) {
|
||||
return getSchemaBuilder(trxOrKnex, schemaName).createTable(
|
||||
getTableName(tableName),
|
||||
function (t) {
|
||||
t.increments();
|
||||
t.string('name');
|
||||
t.integer('batch');
|
||||
t.timestamp('migration_time');
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
function _createMigrationLockTable(tableName, schemaName, trxOrKnex) {
|
||||
return getSchemaBuilder(trxOrKnex, schemaName).createTable(
|
||||
tableName,
|
||||
function (t) {
|
||||
t.increments('index').primary();
|
||||
t.integer('is_locked');
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
function _insertLockRowIfNeeded(tableName, schemaName, trxOrKnex) {
|
||||
const lockTableWithSchema = getLockTableNameWithSchema(tableName, schemaName);
|
||||
return trxOrKnex
|
||||
.select('*')
|
||||
.from(lockTableWithSchema)
|
||||
.then((data) => {
|
||||
return !data.length
|
||||
? trxOrKnex.from(lockTableWithSchema).insert({ is_locked: 0 })
|
||||
: null;
|
||||
});
|
||||
}
|
||||
|
||||
//Get schema-aware schema builder for a given schema nam
|
||||
function getSchemaBuilder(trxOrKnex, schemaName) {
|
||||
return schemaName
|
||||
? trxOrKnex.schema.withSchema(schemaName)
|
||||
: trxOrKnex.schema;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
ensureTable,
|
||||
getSchemaBuilder,
|
||||
};
|
27
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/table-resolver.js
generated
vendored
Normal file
27
backend/apis/nodejs/node_modules/knex/lib/migrations/migrate/table-resolver.js
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
//Get schema-aware table name
|
||||
function getTableName(tableName, schemaName) {
|
||||
return schemaName ? `${schemaName}.${tableName}` : tableName;
|
||||
}
|
||||
|
||||
//Get schema-aware query builder for a given table and schema name
|
||||
function getTable(trxOrKnex, tableName, schemaName) {
|
||||
return schemaName
|
||||
? trxOrKnex(tableName).withSchema(schemaName)
|
||||
: trxOrKnex(tableName);
|
||||
}
|
||||
function getLockTableName(tableName) {
|
||||
return tableName + '_lock';
|
||||
}
|
||||
|
||||
function getLockTableNameWithSchema(tableName, schemaName) {
|
||||
return schemaName
|
||||
? schemaName + '.' + getLockTableName(tableName)
|
||||
: getLockTableName(tableName);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getLockTableName,
|
||||
getLockTableNameWithSchema,
|
||||
getTable,
|
||||
getTableName,
|
||||
};
|
137
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/Seeder.js
generated
vendored
Normal file
137
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/Seeder.js
generated
vendored
Normal file
@ -0,0 +1,137 @@
|
||||
// Seeder
|
||||
// -------
|
||||
|
||||
const path = require('path');
|
||||
const { ensureDirectoryExists } = require('../util/fs');
|
||||
const { writeJsFileUsingTemplate } = require('../util/template');
|
||||
const { yyyymmddhhmmss } = require('../util/timestamp');
|
||||
const { getMergedConfig } = require('./seeder-configuration-merger');
|
||||
|
||||
// The new seeds we're performing, typically called from the `knex.seed`
|
||||
// interface on the main `knex` object. Passes the `knex` instance performing
|
||||
// the seeds.
|
||||
class Seeder {
|
||||
constructor(knex) {
|
||||
this.knex = knex;
|
||||
this.config = this.resolveConfig(knex.client.config.seeds);
|
||||
}
|
||||
|
||||
// Runs seed files for the given environment.
|
||||
async run(config) {
|
||||
this.config = this.resolveConfig(config);
|
||||
const files = await this.config.seedSource.getSeeds(this.config);
|
||||
return this._runSeeds(files);
|
||||
}
|
||||
|
||||
// Creates a new seed file, with a given name.
|
||||
async make(name, config) {
|
||||
this.config = this.resolveConfig(config);
|
||||
if (!name)
|
||||
throw new Error('A name must be specified for the generated seed');
|
||||
await this._ensureFolder(config);
|
||||
const seedPath = await this._writeNewSeed(name);
|
||||
return seedPath;
|
||||
}
|
||||
|
||||
// Ensures a folder for the seeds exist, dependent on the
|
||||
// seed config settings.
|
||||
_ensureFolder() {
|
||||
const dirs = this.config.seedSource._getConfigDirectories(
|
||||
this.config.logger
|
||||
);
|
||||
const promises = dirs.map(ensureDirectoryExists);
|
||||
return Promise.all(promises);
|
||||
}
|
||||
|
||||
// Run seed files, in sequence.
|
||||
async _runSeeds(seeds) {
|
||||
for (const seed of seeds) {
|
||||
await this._validateSeedStructure(seed);
|
||||
}
|
||||
return this._waterfallBatch(seeds);
|
||||
}
|
||||
|
||||
async _validateSeedStructure(filepath) {
|
||||
const seed = await this.config.seedSource.getSeed(filepath);
|
||||
if (typeof seed.seed !== 'function') {
|
||||
throw new Error(
|
||||
`Invalid seed file: ${filepath} must have a seed function`
|
||||
);
|
||||
}
|
||||
return filepath;
|
||||
}
|
||||
|
||||
_getStubPath() {
|
||||
return (
|
||||
this.config.stub ||
|
||||
path.join(__dirname, 'stub', this.config.extension + '.stub')
|
||||
);
|
||||
}
|
||||
|
||||
_getNewStubFileName(name) {
|
||||
if (name[0] === '-') name = name.slice(1);
|
||||
|
||||
if (this.config.timestampFilenamePrefix === true) {
|
||||
name = `${yyyymmddhhmmss()}_${name}`;
|
||||
}
|
||||
|
||||
return `${name}.${this.config.extension}`;
|
||||
}
|
||||
|
||||
_getNewStubFilePath(name) {
|
||||
const fileName = this._getNewStubFileName(name);
|
||||
const dirs = this.config.seedSource._getConfigDirectories(
|
||||
this.config.logger
|
||||
);
|
||||
const dir = dirs.slice(-1)[0]; // Get last specified directory
|
||||
return path.join(dir, fileName);
|
||||
}
|
||||
|
||||
// Write a new seed to disk, using the config and generated filename,
|
||||
// passing any `variables` given in the config to the template.
|
||||
async _writeNewSeed(name) {
|
||||
const seedPath = this._getNewStubFilePath(name);
|
||||
await writeJsFileUsingTemplate(
|
||||
seedPath,
|
||||
this._getStubPath(),
|
||||
{ variable: 'd' },
|
||||
this.config.variables || {}
|
||||
);
|
||||
return seedPath;
|
||||
}
|
||||
|
||||
async _listAll(config) {
|
||||
this.config = this.resolveConfig(config);
|
||||
return this.config.seedSource.getSeeds(this.config);
|
||||
}
|
||||
|
||||
// Runs a batch of seed files.
|
||||
async _waterfallBatch(seeds) {
|
||||
const { knex } = this;
|
||||
const log = [];
|
||||
for (const seedPath of seeds) {
|
||||
const seed = await this.config.seedSource.getSeed(seedPath);
|
||||
try {
|
||||
await seed.seed(knex);
|
||||
log.push(seedPath);
|
||||
} catch (originalError) {
|
||||
const error = new Error(
|
||||
`Error while executing "${seedPath}" seed: ${originalError.message}`
|
||||
);
|
||||
error.original = originalError;
|
||||
error.stack =
|
||||
error.stack.split('\n').slice(0, 2).join('\n') +
|
||||
'\n' +
|
||||
originalError.stack;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
return [log];
|
||||
}
|
||||
|
||||
resolveConfig(config) {
|
||||
return getMergedConfig(config, this.config, this.knex.client.logger);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Seeder;
|
13
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/seed-stub.js
generated
vendored
Normal file
13
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/seed-stub.js
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
// Stub Seed:
|
||||
// Used for now in browser builds, where filesystem access isn't
|
||||
// available.
|
||||
const StubSeed = (module.exports = function () {});
|
||||
|
||||
const noSuchMethod = async function () {
|
||||
throw new Error('Seeds are not supported');
|
||||
};
|
||||
|
||||
StubSeed.prototype = {
|
||||
make: noSuchMethod,
|
||||
run: noSuchMethod,
|
||||
};
|
60
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/seeder-configuration-merger.js
generated
vendored
Normal file
60
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/seeder-configuration-merger.js
generated
vendored
Normal file
@ -0,0 +1,60 @@
|
||||
const { FsSeeds } = require('./sources/fs-seeds');
|
||||
const Logger = require('../../logger');
|
||||
const { DEFAULT_LOAD_EXTENSIONS } = require('../common/MigrationsLoader');
|
||||
const defaultLogger = new Logger();
|
||||
|
||||
const CONFIG_DEFAULT = Object.freeze({
|
||||
extension: 'js',
|
||||
directory: './seeds',
|
||||
loadExtensions: DEFAULT_LOAD_EXTENSIONS,
|
||||
specific: null,
|
||||
timestampFilenamePrefix: false,
|
||||
recursive: false,
|
||||
sortDirsSeparately: false,
|
||||
});
|
||||
|
||||
function getMergedConfig(config, currentConfig, logger = defaultLogger) {
|
||||
// config is the user specified config, mergedConfig has defaults and current config
|
||||
// applied to it.
|
||||
const mergedConfig = Object.assign(
|
||||
{},
|
||||
CONFIG_DEFAULT,
|
||||
currentConfig || {},
|
||||
config,
|
||||
{
|
||||
logger,
|
||||
}
|
||||
);
|
||||
|
||||
if (
|
||||
config &&
|
||||
// If user specifies any FS related config,
|
||||
// clear specified migrationSource to avoid ambiguity
|
||||
(config.directory ||
|
||||
config.sortDirsSeparately !== undefined ||
|
||||
config.loadExtensions)
|
||||
) {
|
||||
if (config.seedSource) {
|
||||
logger.warn(
|
||||
'FS-related option specified for seed configuration. This resets seedSource to default FsMigrations'
|
||||
);
|
||||
}
|
||||
mergedConfig.seedSource = null;
|
||||
}
|
||||
|
||||
// If the user has not specified any configs, we need to
|
||||
// default to fs migrations to maintain compatibility
|
||||
if (!mergedConfig.seedSource) {
|
||||
mergedConfig.seedSource = new FsSeeds(
|
||||
mergedConfig.directory,
|
||||
mergedConfig.sortDirsSeparately,
|
||||
mergedConfig.loadExtensions
|
||||
);
|
||||
}
|
||||
|
||||
return mergedConfig;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getMergedConfig,
|
||||
};
|
65
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/sources/fs-seeds.js
generated
vendored
Normal file
65
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/sources/fs-seeds.js
generated
vendored
Normal file
@ -0,0 +1,65 @@
|
||||
const path = require('path');
|
||||
const flatten = require('lodash/flatten');
|
||||
const includes = require('lodash/includes');
|
||||
const { AbstractMigrationsLoader } = require('../../common/MigrationsLoader');
|
||||
const { getFilepathsInFolder } = require('../../util/fs');
|
||||
|
||||
const filterByLoadExtensions = (extensions) => (value) => {
|
||||
const extension = path.extname(value);
|
||||
return includes(extensions, extension);
|
||||
};
|
||||
|
||||
class FsSeeds extends AbstractMigrationsLoader {
|
||||
_getConfigDirectories(logger) {
|
||||
const directories = this.migrationsPaths;
|
||||
return directories.map((directory) => {
|
||||
if (!directory) {
|
||||
logger.warn(
|
||||
'Empty value passed as a directory for Seeder, this is not supported.'
|
||||
);
|
||||
}
|
||||
return path.resolve(process.cwd(), directory);
|
||||
});
|
||||
}
|
||||
|
||||
async getSeeds(config) {
|
||||
const { loadExtensions, recursive, specific } = config;
|
||||
|
||||
const seeds = flatten(
|
||||
await Promise.all(
|
||||
this._getConfigDirectories(config.logger).map((d) =>
|
||||
getFilepathsInFolder(d, recursive)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
// if true, each dir are already sorted
|
||||
// (getFilepathsInFolderRecursively does this)
|
||||
// if false, we need to sort all the seeds
|
||||
let files = seeds.filter(filterByLoadExtensions(loadExtensions));
|
||||
if (!this.sortDirsSeparately) {
|
||||
files.sort();
|
||||
}
|
||||
|
||||
if (specific) {
|
||||
files = files.filter((file) => path.basename(file) === specific);
|
||||
if (files.length === 0) {
|
||||
throw new Error(
|
||||
`Invalid argument provided: the specific seed "${specific}" does not exist.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
async getSeed(filepath) {
|
||||
const importFile = require('../../util/import-file'); // late import
|
||||
const seed = await importFile(filepath);
|
||||
return seed;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
FsSeeds,
|
||||
};
|
9
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/stub/coffee.stub
generated
vendored
Normal file
9
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/stub/coffee.stub
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
exports.seed = (knex) ->
|
||||
knex('table_name').del()
|
||||
.then () ->
|
||||
# Inserts seed entries
|
||||
knex('table_name').insert([
|
||||
{id: 1, colName: 'rowValue'}
|
||||
{id: 2, colName: 'rowValue2'}
|
||||
{id: 3, colName: 'rowValue3'}
|
||||
])
|
11
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/stub/eg.stub
generated
vendored
Normal file
11
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/stub/eg.stub
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
provide: seed
|
||||
seed = (knex) ->
|
||||
;; Deletes ALL existing entries
|
||||
knex(.table_name).del()
|
||||
.then(() ->
|
||||
;; Inserts seed entries
|
||||
knex(.table_name).insert with [
|
||||
{ id = 1, col-name = .row-value-1 }
|
||||
{ id = 2, col-name = .row-value-2 }
|
||||
{ id = 3, col-name = .row-value-3 }
|
||||
]
|
13
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/stub/js.stub
generated
vendored
Normal file
13
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/stub/js.stub
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
/**
|
||||
* @param { import("knex").Knex } knex
|
||||
* @returns { Promise<void> }
|
||||
*/
|
||||
exports.seed = async function(knex) {
|
||||
// Deletes ALL existing entries
|
||||
await knex('table_name').del()
|
||||
await knex('table_name').insert([
|
||||
{id: 1, colName: 'rowValue1'},
|
||||
{id: 2, colName: 'rowValue2'},
|
||||
{id: 3, colName: 'rowValue3'}
|
||||
]);
|
||||
};
|
11
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/stub/ls.stub
generated
vendored
Normal file
11
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/stub/ls.stub
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
exports.seed = (knex) ->
|
||||
# Deletes ALL existing entries
|
||||
knex('table_name').del()
|
||||
.then(() ->
|
||||
# Inserts seed entries
|
||||
knex('table_name').insert([
|
||||
{id: 1, colName: 'rowValue1'},
|
||||
{id: 2, colName: 'rowValue2'},
|
||||
{id: 3, colName: 'rowValue3'}
|
||||
])
|
||||
)
|
12
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/stub/mjs.stub
generated
vendored
Normal file
12
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/stub/mjs.stub
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
|
||||
export const seed = async (knex) => {
|
||||
// Deletes ALL existing entries
|
||||
await knex('table_name').del();
|
||||
|
||||
// Inserts seed entries
|
||||
await knex('table_name').insert([
|
||||
{id: 1, colName: 'rowValue1'},
|
||||
{id: 2, colName: 'rowValue2'},
|
||||
{id: 3, colName: 'rowValue3'}
|
||||
]);
|
||||
};
|
13
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/stub/ts.stub
generated
vendored
Normal file
13
backend/apis/nodejs/node_modules/knex/lib/migrations/seed/stub/ts.stub
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
export async function seed(knex: Knex): Promise<void> {
|
||||
// Deletes ALL existing entries
|
||||
await knex("table_name").del();
|
||||
|
||||
// Inserts seed entries
|
||||
await knex("table_name").insert([
|
||||
{ id: 1, colName: "rowValue1" },
|
||||
{ id: 2, colName: "rowValue2" },
|
||||
{ id: 3, colName: "rowValue3" }
|
||||
]);
|
||||
};
|
86
backend/apis/nodejs/node_modules/knex/lib/migrations/util/fs.js
generated
vendored
Normal file
86
backend/apis/nodejs/node_modules/knex/lib/migrations/util/fs.js
generated
vendored
Normal file
@ -0,0 +1,86 @@
|
||||
const fs = require('fs');
|
||||
const flatten = require('lodash/flatten');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const { promisify } = require('util');
|
||||
|
||||
// Promisify common fs functions.
|
||||
const stat = promisify(fs.stat);
|
||||
const readFile = promisify(fs.readFile);
|
||||
const writeFile = promisify(fs.writeFile);
|
||||
const readdir = promisify(fs.readdir);
|
||||
const mkdir = promisify(fs.mkdir);
|
||||
|
||||
function existsSync(path) {
|
||||
try {
|
||||
fs.accessSync(path);
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a temporary directory and returns it path.
|
||||
*
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
function createTemp() {
|
||||
return promisify(fs.mkdtemp)(`${os.tmpdir()}${path.sep}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures the given path exists.
|
||||
* - If the path already exist, it's fine - it does nothing.
|
||||
* - If the path doesn't exist, it will create it.
|
||||
*
|
||||
* @param {string} path
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function ensureDirectoryExists(dir) {
|
||||
return stat(dir).catch(() => mkdir(dir, { recursive: true }));
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a directory,
|
||||
* sorting folders and files by alphabetically order.
|
||||
* Can be browsed recursively.
|
||||
*
|
||||
* @param {string} dir
|
||||
* The directory to analyse
|
||||
*
|
||||
* @param {boolean} recursive
|
||||
* Browse directory recursively
|
||||
*
|
||||
* @returns {Promise<[string]>}
|
||||
* All found files, concatenated to the current dir
|
||||
*/
|
||||
async function getFilepathsInFolder(dir, recursive = false) {
|
||||
const pathsList = await readdir(dir);
|
||||
return flatten(
|
||||
await Promise.all(
|
||||
pathsList.sort().map(async (currentPath) => {
|
||||
const currentFile = path.resolve(dir, currentPath);
|
||||
const statFile = await stat(currentFile);
|
||||
if (statFile && statFile.isDirectory()) {
|
||||
if (recursive) {
|
||||
return await getFilepathsInFolder(currentFile, true);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
return [currentFile];
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
existsSync,
|
||||
stat,
|
||||
readdir,
|
||||
readFile,
|
||||
writeFile,
|
||||
createTemp,
|
||||
ensureDirectoryExists,
|
||||
getFilepathsInFolder,
|
||||
};
|
12
backend/apis/nodejs/node_modules/knex/lib/migrations/util/import-file.js
generated
vendored
Normal file
12
backend/apis/nodejs/node_modules/knex/lib/migrations/util/import-file.js
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
const isModuleType = require('./is-module-type');
|
||||
|
||||
/**
|
||||
* imports 'mjs', else requires.
|
||||
* NOTE: require me late!
|
||||
* @param {string} filepath
|
||||
*/
|
||||
module.exports = async function importFile(filepath) {
|
||||
return (await isModuleType(filepath))
|
||||
? import(require('url').pathToFileURL(filepath))
|
||||
: require(filepath);
|
||||
};
|
9
backend/apis/nodejs/node_modules/knex/lib/migrations/util/is-module-type.js
generated
vendored
Normal file
9
backend/apis/nodejs/node_modules/knex/lib/migrations/util/is-module-type.js
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
const getPackageType = require('get-package-type');
|
||||
|
||||
module.exports = async function isModuleType(filepath) {
|
||||
return (
|
||||
filepath.endsWith('.mjs') ||
|
||||
(!filepath.endsWith('.cjs') &&
|
||||
(await getPackageType(filepath)) === 'module')
|
||||
);
|
||||
};
|
52
backend/apis/nodejs/node_modules/knex/lib/migrations/util/template.js
generated
vendored
Normal file
52
backend/apis/nodejs/node_modules/knex/lib/migrations/util/template.js
generated
vendored
Normal file
@ -0,0 +1,52 @@
|
||||
const template = require('lodash/template');
|
||||
|
||||
const { readFile, writeFile } = require('./fs');
|
||||
|
||||
/**
|
||||
* Light wrapper over lodash templates making it safer to be used with javascript source code.
|
||||
*
|
||||
* In particular, doesn't interfere with use of interpolated strings in javascript.
|
||||
*
|
||||
* @param {string} content Template source
|
||||
* @param {_.TemplateOptions} options Template options
|
||||
*/
|
||||
const jsSourceTemplate = (content, options) =>
|
||||
template(content, {
|
||||
interpolate: /<%=([\s\S]+?)%>/g,
|
||||
...options,
|
||||
});
|
||||
|
||||
/**
|
||||
* Compile the contents of specified (javascript) file as a lodash template
|
||||
*
|
||||
* @param {string} filePath Path of file to be used as template
|
||||
* @param {_.TemplateOptions} options Lodash template options
|
||||
*/
|
||||
const jsFileTemplate = async (filePath, options) => {
|
||||
const contentBuffer = await readFile(filePath);
|
||||
return jsSourceTemplate(contentBuffer.toString(), options);
|
||||
};
|
||||
|
||||
/**
|
||||
* Write a javascript file using another file as a (lodash) template
|
||||
*
|
||||
* @param {string} targetFilePath
|
||||
* @param {string} sourceFilePath
|
||||
* @param {_.TemplateOptions} options options passed to lodash templates
|
||||
*/
|
||||
const writeJsFileUsingTemplate = async (
|
||||
targetFilePath,
|
||||
sourceFilePath,
|
||||
options,
|
||||
variables
|
||||
) =>
|
||||
writeFile(
|
||||
targetFilePath,
|
||||
(await jsFileTemplate(sourceFilePath, options))(variables)
|
||||
);
|
||||
|
||||
module.exports = {
|
||||
jsSourceTemplate,
|
||||
jsFileTemplate,
|
||||
writeJsFileUsingTemplate,
|
||||
};
|
14
backend/apis/nodejs/node_modules/knex/lib/migrations/util/timestamp.js
generated
vendored
Normal file
14
backend/apis/nodejs/node_modules/knex/lib/migrations/util/timestamp.js
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
function yyyymmddhhmmss() {
|
||||
const now = new Date();
|
||||
|
||||
return (
|
||||
now.getUTCFullYear().toString() +
|
||||
(now.getUTCMonth() + 1).toString().padStart(2, '0') +
|
||||
now.getUTCDate().toString().padStart(2, '0') +
|
||||
now.getUTCHours().toString().padStart(2, '0') +
|
||||
now.getUTCMinutes().toString().padStart(2, '0') +
|
||||
now.getUTCSeconds().toString().padStart(2, '0')
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = { yyyymmddhhmmss };
|
Reference in New Issue
Block a user