mirror of
https://github.com/xfarrow/blink
synced 2025-06-27 09:03:02 +02:00
Change endpoint from persons to people
This commit is contained in:
19
backend/apis/nodejs/node_modules/knex/lib/dialects/postgres/execution/pg-transaction.js
generated
vendored
Normal file
19
backend/apis/nodejs/node_modules/knex/lib/dialects/postgres/execution/pg-transaction.js
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
const Transaction = require('../../../execution/transaction');
|
||||
|
||||
class Transaction_PG extends Transaction {
|
||||
begin(conn) {
|
||||
const trxMode = [
|
||||
this.isolationLevel ? `ISOLATION LEVEL ${this.isolationLevel}` : '',
|
||||
this.readOnly ? 'READ ONLY' : '',
|
||||
]
|
||||
.join(' ')
|
||||
.trim();
|
||||
|
||||
if (trxMode.length === 0) {
|
||||
return this.query(conn, 'BEGIN;');
|
||||
}
|
||||
return this.query(conn, `BEGIN TRANSACTION ${trxMode};`);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Transaction_PG;
|
361
backend/apis/nodejs/node_modules/knex/lib/dialects/postgres/index.js
generated
vendored
Normal file
361
backend/apis/nodejs/node_modules/knex/lib/dialects/postgres/index.js
generated
vendored
Normal file
@ -0,0 +1,361 @@
|
||||
// PostgreSQL
|
||||
// -------
|
||||
const extend = require('lodash/extend');
|
||||
const map = require('lodash/map');
|
||||
const { promisify } = require('util');
|
||||
const Client = require('../../client');
|
||||
|
||||
const Transaction = require('./execution/pg-transaction');
|
||||
const QueryCompiler = require('./query/pg-querycompiler');
|
||||
const QueryBuilder = require('./query/pg-querybuilder');
|
||||
const ColumnCompiler = require('./schema/pg-columncompiler');
|
||||
const TableCompiler = require('./schema/pg-tablecompiler');
|
||||
const ViewCompiler = require('./schema/pg-viewcompiler');
|
||||
const ViewBuilder = require('./schema/pg-viewbuilder');
|
||||
const SchemaCompiler = require('./schema/pg-compiler');
|
||||
const { makeEscape } = require('../../util/string');
|
||||
const { isString } = require('../../util/is');
|
||||
|
||||
class Client_PG extends Client {
|
||||
constructor(config) {
|
||||
super(config);
|
||||
if (config.returning) {
|
||||
this.defaultReturning = config.returning;
|
||||
}
|
||||
|
||||
if (config.searchPath) {
|
||||
this.searchPath = config.searchPath;
|
||||
}
|
||||
}
|
||||
transaction() {
|
||||
return new Transaction(this, ...arguments);
|
||||
}
|
||||
|
||||
queryBuilder() {
|
||||
return new QueryBuilder(this);
|
||||
}
|
||||
|
||||
queryCompiler(builder, formatter) {
|
||||
return new QueryCompiler(this, builder, formatter);
|
||||
}
|
||||
|
||||
columnCompiler() {
|
||||
return new ColumnCompiler(this, ...arguments);
|
||||
}
|
||||
|
||||
schemaCompiler() {
|
||||
return new SchemaCompiler(this, ...arguments);
|
||||
}
|
||||
|
||||
tableCompiler() {
|
||||
return new TableCompiler(this, ...arguments);
|
||||
}
|
||||
|
||||
viewCompiler() {
|
||||
return new ViewCompiler(this, ...arguments);
|
||||
}
|
||||
|
||||
viewBuilder() {
|
||||
return new ViewBuilder(this, ...arguments);
|
||||
}
|
||||
|
||||
_driver() {
|
||||
return require('pg');
|
||||
}
|
||||
|
||||
wrapIdentifierImpl(value) {
|
||||
if (value === '*') return value;
|
||||
|
||||
let arrayAccessor = '';
|
||||
const arrayAccessorMatch = value.match(/(.*?)(\[[0-9]+\])/);
|
||||
|
||||
if (arrayAccessorMatch) {
|
||||
value = arrayAccessorMatch[1];
|
||||
arrayAccessor = arrayAccessorMatch[2];
|
||||
}
|
||||
|
||||
return `"${value.replace(/"/g, '""')}"${arrayAccessor}`;
|
||||
}
|
||||
|
||||
_acquireOnlyConnection() {
|
||||
const connection = new this.driver.Client(this.connectionSettings);
|
||||
|
||||
connection.on('error', (err) => {
|
||||
connection.__knex__disposed = err;
|
||||
});
|
||||
|
||||
connection.on('end', (err) => {
|
||||
connection.__knex__disposed = err || 'Connection ended unexpectedly';
|
||||
});
|
||||
|
||||
return connection.connect().then(() => connection);
|
||||
}
|
||||
|
||||
// Get a raw connection, called by the `pool` whenever a new
|
||||
// connection needs to be added to the pool.
|
||||
acquireRawConnection() {
|
||||
const client = this;
|
||||
|
||||
return this._acquireOnlyConnection()
|
||||
.then(function (connection) {
|
||||
if (!client.version) {
|
||||
return client.checkVersion(connection).then(function (version) {
|
||||
client.version = version;
|
||||
return connection;
|
||||
});
|
||||
}
|
||||
|
||||
return connection;
|
||||
})
|
||||
.then(async function setSearchPath(connection) {
|
||||
await client.setSchemaSearchPath(connection);
|
||||
return connection;
|
||||
});
|
||||
}
|
||||
|
||||
// Used to explicitly close a connection, called internally by the pool
|
||||
// when a connection times out or the pool is shutdown.
|
||||
async destroyRawConnection(connection) {
|
||||
const end = promisify((cb) => connection.end(cb));
|
||||
return end();
|
||||
}
|
||||
|
||||
// In PostgreSQL, we need to do a version check to do some feature
|
||||
// checking on the database.
|
||||
checkVersion(connection) {
|
||||
return new Promise((resolve, reject) => {
|
||||
connection.query('select version();', (err, resp) => {
|
||||
if (err) return reject(err);
|
||||
resolve(this._parseVersion(resp.rows[0].version));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_parseVersion(versionString) {
|
||||
return /^PostgreSQL (.*?)( |$)/.exec(versionString)[1];
|
||||
}
|
||||
|
||||
// Position the bindings for the query. The escape sequence for question mark
|
||||
// is \? (e.g. knex.raw("\\?") since javascript requires '\' to be escaped too...)
|
||||
positionBindings(sql) {
|
||||
let questionCount = 0;
|
||||
return sql.replace(/(\\*)(\?)/g, function (match, escapes) {
|
||||
if (escapes.length % 2) {
|
||||
return '?';
|
||||
} else {
|
||||
questionCount++;
|
||||
return `$${questionCount}`;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
setSchemaSearchPath(connection, searchPath) {
|
||||
let path = searchPath || this.searchPath;
|
||||
|
||||
if (!path) return Promise.resolve(true);
|
||||
|
||||
if (!Array.isArray(path) && !isString(path)) {
|
||||
throw new TypeError(
|
||||
`knex: Expected searchPath to be Array/String, got: ${typeof path}`
|
||||
);
|
||||
}
|
||||
|
||||
if (isString(path)) {
|
||||
if (path.includes(',')) {
|
||||
const parts = path.split(',');
|
||||
const arraySyntax = `[${parts
|
||||
.map((searchPath) => `'${searchPath}'`)
|
||||
.join(', ')}]`;
|
||||
this.logger.warn(
|
||||
`Detected comma in searchPath "${path}".` +
|
||||
`If you are trying to specify multiple schemas, use Array syntax: ${arraySyntax}`
|
||||
);
|
||||
}
|
||||
path = [path];
|
||||
}
|
||||
|
||||
path = path.map((schemaName) => `"${schemaName}"`).join(',');
|
||||
|
||||
return new Promise(function (resolver, rejecter) {
|
||||
connection.query(`set search_path to ${path}`, function (err) {
|
||||
if (err) return rejecter(err);
|
||||
resolver(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_stream(connection, obj, stream, options) {
|
||||
if (!obj.sql) throw new Error('The query is empty');
|
||||
|
||||
const PGQueryStream = process.browser
|
||||
? undefined
|
||||
: require('pg-query-stream');
|
||||
const sql = obj.sql;
|
||||
|
||||
return new Promise(function (resolver, rejecter) {
|
||||
const queryStream = connection.query(
|
||||
new PGQueryStream(sql, obj.bindings, options),
|
||||
(err) => {
|
||||
rejecter(err);
|
||||
}
|
||||
);
|
||||
|
||||
queryStream.on('error', function (error) {
|
||||
rejecter(error);
|
||||
stream.emit('error', error);
|
||||
});
|
||||
|
||||
// 'end' IS propagated by .pipe, by default
|
||||
stream.on('end', resolver);
|
||||
queryStream.pipe(stream);
|
||||
});
|
||||
}
|
||||
|
||||
// Runs the query on the specified connection, providing the bindings
|
||||
// and any other necessary prep work.
|
||||
_query(connection, obj) {
|
||||
if (!obj.sql) throw new Error('The query is empty');
|
||||
|
||||
let queryConfig = {
|
||||
text: obj.sql,
|
||||
values: obj.bindings || [],
|
||||
};
|
||||
|
||||
if (obj.options) {
|
||||
queryConfig = extend(queryConfig, obj.options);
|
||||
}
|
||||
|
||||
return new Promise(function (resolver, rejecter) {
|
||||
connection.query(queryConfig, function (err, response) {
|
||||
if (err) return rejecter(err);
|
||||
obj.response = response;
|
||||
resolver(obj);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Ensures the response is returned in the same format as other clients.
|
||||
processResponse(obj, runner) {
|
||||
const resp = obj.response;
|
||||
if (obj.output) return obj.output.call(runner, resp);
|
||||
if (obj.method === 'raw') return resp;
|
||||
const { returning } = obj;
|
||||
if (resp.command === 'SELECT') {
|
||||
if (obj.method === 'first') return resp.rows[0];
|
||||
if (obj.method === 'pluck') return map(resp.rows, obj.pluck);
|
||||
return resp.rows;
|
||||
}
|
||||
if (returning) {
|
||||
const returns = [];
|
||||
for (let i = 0, l = resp.rows.length; i < l; i++) {
|
||||
const row = resp.rows[i];
|
||||
returns[i] = row;
|
||||
}
|
||||
return returns;
|
||||
}
|
||||
if (resp.command === 'UPDATE' || resp.command === 'DELETE') {
|
||||
return resp.rowCount;
|
||||
}
|
||||
return resp;
|
||||
}
|
||||
|
||||
async cancelQuery(connectionToKill) {
|
||||
const conn = await this.acquireRawConnection();
|
||||
|
||||
try {
|
||||
return await this._wrappedCancelQueryCall(conn, connectionToKill);
|
||||
} finally {
|
||||
await this.destroyRawConnection(conn).catch((err) => {
|
||||
this.logger.warn(`Connection Error: ${err}`);
|
||||
});
|
||||
}
|
||||
}
|
||||
_wrappedCancelQueryCall(conn, connectionToKill) {
|
||||
return this._query(conn, {
|
||||
sql: 'SELECT pg_cancel_backend($1);',
|
||||
bindings: [connectionToKill.processID],
|
||||
options: {},
|
||||
});
|
||||
}
|
||||
|
||||
toPathForJson(jsonPath) {
|
||||
const PG_PATH_REGEX = /^{.*}$/;
|
||||
if (jsonPath.match(PG_PATH_REGEX)) {
|
||||
return jsonPath;
|
||||
}
|
||||
return (
|
||||
'{' +
|
||||
jsonPath
|
||||
.replace(/^(\$\.)/, '') // remove the first dollar
|
||||
.replace('.', ',')
|
||||
.replace(/\[([0-9]+)]/, ',$1') + // transform [number] to ,number
|
||||
'}'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Object.assign(Client_PG.prototype, {
|
||||
dialect: 'postgresql',
|
||||
|
||||
driverName: 'pg',
|
||||
canCancelQuery: true,
|
||||
|
||||
_escapeBinding: makeEscape({
|
||||
escapeArray(val, esc) {
|
||||
return esc(arrayString(val, esc));
|
||||
},
|
||||
escapeString(str) {
|
||||
let hasBackslash = false;
|
||||
let escaped = "'";
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
const c = str[i];
|
||||
if (c === "'") {
|
||||
escaped += c + c;
|
||||
} else if (c === '\\') {
|
||||
escaped += c + c;
|
||||
hasBackslash = true;
|
||||
} else {
|
||||
escaped += c;
|
||||
}
|
||||
}
|
||||
escaped += "'";
|
||||
if (hasBackslash === true) {
|
||||
escaped = 'E' + escaped;
|
||||
}
|
||||
return escaped;
|
||||
},
|
||||
escapeObject(val, prepareValue, timezone, seen = []) {
|
||||
if (val && typeof val.toPostgres === 'function') {
|
||||
seen = seen || [];
|
||||
if (seen.indexOf(val) !== -1) {
|
||||
throw new Error(
|
||||
`circular reference detected while preparing "${val}" for query`
|
||||
);
|
||||
}
|
||||
seen.push(val);
|
||||
return prepareValue(val.toPostgres(prepareValue), seen);
|
||||
}
|
||||
return JSON.stringify(val);
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
function arrayString(arr, esc) {
|
||||
let result = '{';
|
||||
for (let i = 0; i < arr.length; i++) {
|
||||
if (i > 0) result += ',';
|
||||
const val = arr[i];
|
||||
if (val === null || typeof val === 'undefined') {
|
||||
result += 'NULL';
|
||||
} else if (Array.isArray(val)) {
|
||||
result += arrayString(val, esc);
|
||||
} else if (typeof val === 'number') {
|
||||
result += val;
|
||||
} else {
|
||||
result += JSON.stringify(typeof val === 'string' ? val : esc(val));
|
||||
}
|
||||
}
|
||||
return result + '}';
|
||||
}
|
||||
|
||||
module.exports = Client_PG;
|
43
backend/apis/nodejs/node_modules/knex/lib/dialects/postgres/query/pg-querybuilder.js
generated
vendored
Normal file
43
backend/apis/nodejs/node_modules/knex/lib/dialects/postgres/query/pg-querybuilder.js
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
const QueryBuilder = require('../../../query/querybuilder.js');
|
||||
|
||||
module.exports = class QueryBuilder_PostgreSQL extends QueryBuilder {
|
||||
updateFrom(name) {
|
||||
this._single.updateFrom = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
using(tables) {
|
||||
this._single.using = tables;
|
||||
return this;
|
||||
}
|
||||
|
||||
withMaterialized(alias, statementOrColumnList, nothingOrStatement) {
|
||||
this._validateWithArgs(
|
||||
alias,
|
||||
statementOrColumnList,
|
||||
nothingOrStatement,
|
||||
'with'
|
||||
);
|
||||
return this.withWrapped(
|
||||
alias,
|
||||
statementOrColumnList,
|
||||
nothingOrStatement,
|
||||
true
|
||||
);
|
||||
}
|
||||
|
||||
withNotMaterialized(alias, statementOrColumnList, nothingOrStatement) {
|
||||
this._validateWithArgs(
|
||||
alias,
|
||||
statementOrColumnList,
|
||||
nothingOrStatement,
|
||||
'with'
|
||||
);
|
||||
return this.withWrapped(
|
||||
alias,
|
||||
statementOrColumnList,
|
||||
nothingOrStatement,
|
||||
false
|
||||
);
|
||||
}
|
||||
};
|
400
backend/apis/nodejs/node_modules/knex/lib/dialects/postgres/query/pg-querycompiler.js
generated
vendored
Normal file
400
backend/apis/nodejs/node_modules/knex/lib/dialects/postgres/query/pg-querycompiler.js
generated
vendored
Normal file
@ -0,0 +1,400 @@
|
||||
// PostgreSQL Query Builder & Compiler
|
||||
// ------
|
||||
const identity = require('lodash/identity');
|
||||
const reduce = require('lodash/reduce');
|
||||
|
||||
const QueryCompiler = require('../../../query/querycompiler');
|
||||
const {
|
||||
wrapString,
|
||||
columnize: columnize_,
|
||||
operator: operator_,
|
||||
wrap: wrap_,
|
||||
} = require('../../../formatter/wrappingFormatter');
|
||||
|
||||
class QueryCompiler_PG extends QueryCompiler {
|
||||
constructor(client, builder, formatter) {
|
||||
super(client, builder, formatter);
|
||||
this._defaultInsertValue = 'default';
|
||||
}
|
||||
|
||||
// Compiles a truncate query.
|
||||
truncate() {
|
||||
return `truncate ${this.tableName} restart identity`;
|
||||
}
|
||||
|
||||
// is used if the an array with multiple empty values supplied
|
||||
|
||||
// Compiles an `insert` query, allowing for multiple
|
||||
// inserts using a single query statement.
|
||||
insert() {
|
||||
let sql = super.insert();
|
||||
if (sql === '') return sql;
|
||||
|
||||
const { returning, onConflict, ignore, merge, insert } = this.single;
|
||||
if (onConflict && ignore) sql += this._ignore(onConflict);
|
||||
if (onConflict && merge) {
|
||||
sql += this._merge(merge.updates, onConflict, insert);
|
||||
const wheres = this.where();
|
||||
if (wheres) sql += ` ${wheres}`;
|
||||
}
|
||||
if (returning) sql += this._returning(returning);
|
||||
|
||||
return {
|
||||
sql,
|
||||
returning,
|
||||
};
|
||||
}
|
||||
|
||||
// Compiles an `update` query, allowing for a return value.
|
||||
update() {
|
||||
const withSQL = this.with();
|
||||
const updateData = this._prepUpdate(this.single.update);
|
||||
const wheres = this.where();
|
||||
const { returning, updateFrom } = this.single;
|
||||
return {
|
||||
sql:
|
||||
withSQL +
|
||||
`update ${this.single.only ? 'only ' : ''}${this.tableName} ` +
|
||||
`set ${updateData.join(', ')}` +
|
||||
this._updateFrom(updateFrom) +
|
||||
(wheres ? ` ${wheres}` : '') +
|
||||
this._returning(returning),
|
||||
returning,
|
||||
};
|
||||
}
|
||||
|
||||
using() {
|
||||
const usingTables = this.single.using;
|
||||
if (!usingTables) return;
|
||||
let sql = 'using ';
|
||||
if (Array.isArray(usingTables)) {
|
||||
sql += usingTables
|
||||
.map((table) => {
|
||||
return this.formatter.wrap(table);
|
||||
})
|
||||
.join(',');
|
||||
} else {
|
||||
sql += this.formatter.wrap(usingTables);
|
||||
}
|
||||
return sql;
|
||||
}
|
||||
|
||||
// Compiles an `delete` query, allowing for a return value.
|
||||
del() {
|
||||
// Make sure tableName is processed by the formatter first.
|
||||
const { tableName } = this;
|
||||
const withSQL = this.with();
|
||||
let wheres = this.where() || '';
|
||||
let using = this.using() || '';
|
||||
const joins = this.grouped.join;
|
||||
|
||||
const tableJoins = [];
|
||||
if (Array.isArray(joins)) {
|
||||
for (const join of joins) {
|
||||
tableJoins.push(
|
||||
wrap_(
|
||||
this._joinTable(join),
|
||||
undefined,
|
||||
this.builder,
|
||||
this.client,
|
||||
this.bindingsHolder
|
||||
)
|
||||
);
|
||||
|
||||
const joinWheres = [];
|
||||
for (const clause of join.clauses) {
|
||||
joinWheres.push(
|
||||
this.whereBasic({
|
||||
column: clause.column,
|
||||
operator: '=',
|
||||
value: clause.value,
|
||||
asColumn: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
if (joinWheres.length > 0) {
|
||||
wheres += (wheres ? ' and ' : 'where ') + joinWheres.join(' and ');
|
||||
}
|
||||
}
|
||||
if (tableJoins.length > 0) {
|
||||
using += (using ? ',' : 'using ') + tableJoins.join(',');
|
||||
}
|
||||
}
|
||||
|
||||
// With 'using' syntax, no tablename between DELETE and FROM.
|
||||
const sql =
|
||||
withSQL +
|
||||
`delete from ${this.single.only ? 'only ' : ''}${tableName}` +
|
||||
(using ? ` ${using}` : '') +
|
||||
(wheres ? ` ${wheres}` : '');
|
||||
const { returning } = this.single;
|
||||
return {
|
||||
sql: sql + this._returning(returning),
|
||||
returning,
|
||||
};
|
||||
}
|
||||
|
||||
aggregate(stmt) {
|
||||
return this._aggregate(stmt, { distinctParentheses: true });
|
||||
}
|
||||
|
||||
_returning(value) {
|
||||
return value ? ` returning ${this.formatter.columnize(value)}` : '';
|
||||
}
|
||||
|
||||
_updateFrom(name) {
|
||||
return name ? ` from ${this.formatter.wrap(name)}` : '';
|
||||
}
|
||||
|
||||
_ignore(columns) {
|
||||
if (columns === true) {
|
||||
return ' on conflict do nothing';
|
||||
}
|
||||
return ` on conflict ${this._onConflictClause(columns)} do nothing`;
|
||||
}
|
||||
|
||||
_merge(updates, columns, insert) {
|
||||
let sql = ` on conflict ${this._onConflictClause(columns)} do update set `;
|
||||
if (updates && Array.isArray(updates)) {
|
||||
sql += updates
|
||||
.map((column) =>
|
||||
wrapString(
|
||||
column.split('.').pop(),
|
||||
this.formatter.builder,
|
||||
this.client,
|
||||
this.formatter
|
||||
)
|
||||
)
|
||||
.map((column) => `${column} = excluded.${column}`)
|
||||
.join(', ');
|
||||
|
||||
return sql;
|
||||
} else if (updates && typeof updates === 'object') {
|
||||
const updateData = this._prepUpdate(updates);
|
||||
if (typeof updateData === 'string') {
|
||||
sql += updateData;
|
||||
} else {
|
||||
sql += updateData.join(',');
|
||||
}
|
||||
|
||||
return sql;
|
||||
} else {
|
||||
const insertData = this._prepInsert(insert);
|
||||
if (typeof insertData === 'string') {
|
||||
throw new Error(
|
||||
'If using merge with a raw insert query, then updates must be provided'
|
||||
);
|
||||
}
|
||||
|
||||
sql += insertData.columns
|
||||
.map((column) =>
|
||||
wrapString(column.split('.').pop(), this.builder, this.client)
|
||||
)
|
||||
.map((column) => `${column} = excluded.${column}`)
|
||||
.join(', ');
|
||||
|
||||
return sql;
|
||||
}
|
||||
}
|
||||
|
||||
// Join array of table names and apply default schema.
|
||||
_tableNames(tables) {
|
||||
const schemaName = this.single.schema;
|
||||
const sql = [];
|
||||
|
||||
for (let i = 0; i < tables.length; i++) {
|
||||
let tableName = tables[i];
|
||||
|
||||
if (tableName) {
|
||||
if (schemaName) {
|
||||
tableName = `${schemaName}.${tableName}`;
|
||||
}
|
||||
sql.push(this.formatter.wrap(tableName));
|
||||
}
|
||||
}
|
||||
|
||||
return sql.join(', ');
|
||||
}
|
||||
|
||||
_lockingClause(lockMode) {
|
||||
const tables = this.single.lockTables || [];
|
||||
|
||||
return lockMode + (tables.length ? ' of ' + this._tableNames(tables) : '');
|
||||
}
|
||||
|
||||
_groupOrder(item, type) {
|
||||
return super._groupOrderNulls(item, type);
|
||||
}
|
||||
|
||||
forUpdate() {
|
||||
return this._lockingClause('for update');
|
||||
}
|
||||
|
||||
forShare() {
|
||||
return this._lockingClause('for share');
|
||||
}
|
||||
|
||||
forNoKeyUpdate() {
|
||||
return this._lockingClause('for no key update');
|
||||
}
|
||||
|
||||
forKeyShare() {
|
||||
return this._lockingClause('for key share');
|
||||
}
|
||||
|
||||
skipLocked() {
|
||||
return 'skip locked';
|
||||
}
|
||||
|
||||
noWait() {
|
||||
return 'nowait';
|
||||
}
|
||||
|
||||
// Compiles a columnInfo query
|
||||
columnInfo() {
|
||||
const column = this.single.columnInfo;
|
||||
let schema = this.single.schema;
|
||||
|
||||
// The user may have specified a custom wrapIdentifier function in the config. We
|
||||
// need to run the identifiers through that function, but not format them as
|
||||
// identifiers otherwise.
|
||||
const table = this.client.customWrapIdentifier(this.single.table, identity);
|
||||
|
||||
if (schema) {
|
||||
schema = this.client.customWrapIdentifier(schema, identity);
|
||||
}
|
||||
|
||||
const sql =
|
||||
'select * from information_schema.columns where table_name = ? and table_catalog = current_database()';
|
||||
const bindings = [table];
|
||||
|
||||
return this._buildColumnInfoQuery(schema, sql, bindings, column);
|
||||
}
|
||||
|
||||
_buildColumnInfoQuery(schema, sql, bindings, column) {
|
||||
if (schema) {
|
||||
sql += ' and table_schema = ?';
|
||||
bindings.push(schema);
|
||||
} else {
|
||||
sql += ' and table_schema = current_schema()';
|
||||
}
|
||||
|
||||
return {
|
||||
sql,
|
||||
bindings,
|
||||
output(resp) {
|
||||
const out = reduce(
|
||||
resp.rows,
|
||||
function (columns, val) {
|
||||
columns[val.column_name] = {
|
||||
type: val.data_type,
|
||||
maxLength: val.character_maximum_length,
|
||||
nullable: val.is_nullable === 'YES',
|
||||
defaultValue: val.column_default,
|
||||
};
|
||||
return columns;
|
||||
},
|
||||
{}
|
||||
);
|
||||
return (column && out[column]) || out;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
distinctOn(value) {
|
||||
return 'distinct on (' + this.formatter.columnize(value) + ') ';
|
||||
}
|
||||
|
||||
// Json functions
|
||||
jsonExtract(params) {
|
||||
return this._jsonExtract('jsonb_path_query', params);
|
||||
}
|
||||
|
||||
jsonSet(params) {
|
||||
return this._jsonSet(
|
||||
'jsonb_set',
|
||||
Object.assign({}, params, {
|
||||
path: this.client.toPathForJson(params.path),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
jsonInsert(params) {
|
||||
return this._jsonSet(
|
||||
'jsonb_insert',
|
||||
Object.assign({}, params, {
|
||||
path: this.client.toPathForJson(params.path),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
jsonRemove(params) {
|
||||
const jsonCol = `${columnize_(
|
||||
params.column,
|
||||
this.builder,
|
||||
this.client,
|
||||
this.bindingsHolder
|
||||
)} #- ${this.client.parameter(
|
||||
this.client.toPathForJson(params.path),
|
||||
this.builder,
|
||||
this.bindingsHolder
|
||||
)}`;
|
||||
return params.alias
|
||||
? this.client.alias(jsonCol, this.formatter.wrap(params.alias))
|
||||
: jsonCol;
|
||||
}
|
||||
|
||||
whereJsonPath(statement) {
|
||||
let castValue = '';
|
||||
if (!isNaN(statement.value) && parseInt(statement.value)) {
|
||||
castValue = '::int';
|
||||
} else if (!isNaN(statement.value) && parseFloat(statement.value)) {
|
||||
castValue = '::float';
|
||||
} else {
|
||||
castValue = " #>> '{}'";
|
||||
}
|
||||
return `jsonb_path_query_first(${this._columnClause(
|
||||
statement
|
||||
)}, ${this.client.parameter(
|
||||
statement.jsonPath,
|
||||
this.builder,
|
||||
this.bindingsHolder
|
||||
)})${castValue} ${operator_(
|
||||
statement.operator,
|
||||
this.builder,
|
||||
this.client,
|
||||
this.bindingsHolder
|
||||
)} ${this._jsonValueClause(statement)}`;
|
||||
}
|
||||
|
||||
whereJsonSupersetOf(statement) {
|
||||
return this._not(
|
||||
statement,
|
||||
`${wrap_(
|
||||
statement.column,
|
||||
undefined,
|
||||
this.builder,
|
||||
this.client,
|
||||
this.bindingsHolder
|
||||
)} @> ${this._jsonValueClause(statement)}`
|
||||
);
|
||||
}
|
||||
|
||||
whereJsonSubsetOf(statement) {
|
||||
return this._not(
|
||||
statement,
|
||||
`${columnize_(
|
||||
statement.column,
|
||||
this.builder,
|
||||
this.client,
|
||||
this.bindingsHolder
|
||||
)} <@ ${this._jsonValueClause(statement)}`
|
||||
);
|
||||
}
|
||||
|
||||
onJsonPathEquals(clause) {
|
||||
return this._onJsonPathEquals('jsonb_path_query_first', clause);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = QueryCompiler_PG;
|
156
backend/apis/nodejs/node_modules/knex/lib/dialects/postgres/schema/pg-columncompiler.js
generated
vendored
Normal file
156
backend/apis/nodejs/node_modules/knex/lib/dialects/postgres/schema/pg-columncompiler.js
generated
vendored
Normal file
@ -0,0 +1,156 @@
|
||||
// PostgreSQL Column Compiler
|
||||
// -------
|
||||
|
||||
const ColumnCompiler = require('../../../schema/columncompiler');
|
||||
const { isObject } = require('../../../util/is');
|
||||
const { toNumber } = require('../../../util/helpers');
|
||||
const commentEscapeRegex = /(?<!')'(?!')/g;
|
||||
|
||||
class ColumnCompiler_PG extends ColumnCompiler {
|
||||
constructor(client, tableCompiler, columnBuilder) {
|
||||
super(client, tableCompiler, columnBuilder);
|
||||
this.modifiers = ['nullable', 'defaultTo', 'comment'];
|
||||
this._addCheckModifiers();
|
||||
}
|
||||
|
||||
// Types
|
||||
// ------
|
||||
|
||||
bit(column) {
|
||||
return column.length !== false ? `bit(${column.length})` : 'bit';
|
||||
}
|
||||
|
||||
// Create the column definition for an enum type.
|
||||
// Using method "2" here: http://stackoverflow.com/a/10984951/525714
|
||||
enu(allowed, options) {
|
||||
options = options || {};
|
||||
|
||||
const values =
|
||||
options.useNative && options.existingType
|
||||
? undefined
|
||||
: allowed.join("', '");
|
||||
|
||||
if (options.useNative) {
|
||||
let enumName = '';
|
||||
const schemaName = options.schemaName || this.tableCompiler.schemaNameRaw;
|
||||
|
||||
if (schemaName) {
|
||||
enumName += `"${schemaName}".`;
|
||||
}
|
||||
|
||||
enumName += `"${options.enumName}"`;
|
||||
|
||||
if (!options.existingType) {
|
||||
this.tableCompiler.unshiftQuery(
|
||||
`create type ${enumName} as enum ('${values}')`
|
||||
);
|
||||
}
|
||||
|
||||
return enumName;
|
||||
}
|
||||
return `text check (${this.formatter.wrap(this.args[0])} in ('${values}'))`;
|
||||
}
|
||||
|
||||
decimal(precision, scale) {
|
||||
if (precision === null) return 'decimal';
|
||||
return `decimal(${toNumber(precision, 8)}, ${toNumber(scale, 2)})`;
|
||||
}
|
||||
|
||||
json(jsonb) {
|
||||
if (jsonb) this.client.logger.deprecate('json(true)', 'jsonb()');
|
||||
return jsonColumn(this.client, jsonb);
|
||||
}
|
||||
|
||||
jsonb() {
|
||||
return jsonColumn(this.client, true);
|
||||
}
|
||||
|
||||
checkRegex(regex, constraintName) {
|
||||
return this._check(
|
||||
`${this.formatter.wrap(
|
||||
this.getColumnName()
|
||||
)} ~ ${this.client._escapeBinding(regex)}`,
|
||||
constraintName
|
||||
);
|
||||
}
|
||||
|
||||
datetime(withoutTz = false, precision) {
|
||||
let useTz;
|
||||
if (isObject(withoutTz)) {
|
||||
({ useTz, precision } = withoutTz);
|
||||
} else {
|
||||
useTz = !withoutTz;
|
||||
}
|
||||
useTz = typeof useTz === 'boolean' ? useTz : true;
|
||||
precision =
|
||||
precision !== undefined && precision !== null
|
||||
? '(' + precision + ')'
|
||||
: '';
|
||||
|
||||
return `${useTz ? 'timestamptz' : 'timestamp'}${precision}`;
|
||||
}
|
||||
|
||||
timestamp(withoutTz = false, precision) {
|
||||
return this.datetime(withoutTz, precision);
|
||||
}
|
||||
|
||||
// Modifiers:
|
||||
// ------
|
||||
comment(comment) {
|
||||
const columnName = this.args[0] || this.defaults('columnName');
|
||||
const escapedComment = comment
|
||||
? `'${comment.replace(commentEscapeRegex, "''")}'`
|
||||
: 'NULL';
|
||||
|
||||
this.pushAdditional(function () {
|
||||
this.pushQuery(
|
||||
`comment on column ${this.tableCompiler.tableName()}.` +
|
||||
this.formatter.wrap(columnName) +
|
||||
` is ${escapedComment}`
|
||||
);
|
||||
}, comment);
|
||||
}
|
||||
|
||||
increments(options = { primaryKey: true }) {
|
||||
return (
|
||||
'serial' +
|
||||
(this.tableCompiler._canBeAddPrimaryKey(options) ? ' primary key' : '')
|
||||
);
|
||||
}
|
||||
|
||||
bigincrements(options = { primaryKey: true }) {
|
||||
return (
|
||||
'bigserial' +
|
||||
(this.tableCompiler._canBeAddPrimaryKey(options) ? ' primary key' : '')
|
||||
);
|
||||
}
|
||||
|
||||
uuid(options = { primaryKey: false }) {
|
||||
return (
|
||||
'uuid' +
|
||||
(this.tableCompiler._canBeAddPrimaryKey(options) ? ' primary key' : '')
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
ColumnCompiler_PG.prototype.bigint = 'bigint';
|
||||
ColumnCompiler_PG.prototype.binary = 'bytea';
|
||||
ColumnCompiler_PG.prototype.bool = 'boolean';
|
||||
ColumnCompiler_PG.prototype.double = 'double precision';
|
||||
ColumnCompiler_PG.prototype.floating = 'real';
|
||||
ColumnCompiler_PG.prototype.smallint = 'smallint';
|
||||
ColumnCompiler_PG.prototype.tinyint = 'smallint';
|
||||
|
||||
function jsonColumn(client, jsonb) {
|
||||
if (
|
||||
!client.version ||
|
||||
client.config.client === 'cockroachdb' ||
|
||||
client.config.jsonbSupport === true ||
|
||||
parseFloat(client.version) >= 9.2
|
||||
) {
|
||||
return jsonb ? 'jsonb' : 'json';
|
||||
}
|
||||
return 'text';
|
||||
}
|
||||
|
||||
module.exports = ColumnCompiler_PG;
|
138
backend/apis/nodejs/node_modules/knex/lib/dialects/postgres/schema/pg-compiler.js
generated
vendored
Normal file
138
backend/apis/nodejs/node_modules/knex/lib/dialects/postgres/schema/pg-compiler.js
generated
vendored
Normal file
@ -0,0 +1,138 @@
|
||||
// PostgreSQL Schema Compiler
|
||||
// -------
|
||||
|
||||
const SchemaCompiler = require('../../../schema/compiler');
|
||||
|
||||
class SchemaCompiler_PG extends SchemaCompiler {
|
||||
constructor(client, builder) {
|
||||
super(client, builder);
|
||||
}
|
||||
|
||||
// Check whether the current table
|
||||
hasTable(tableName) {
|
||||
let sql = 'select * from information_schema.tables where table_name = ?';
|
||||
const bindings = [tableName];
|
||||
|
||||
if (this.schema) {
|
||||
sql += ' and table_schema = ?';
|
||||
bindings.push(this.schema);
|
||||
} else {
|
||||
sql += ' and table_schema = current_schema()';
|
||||
}
|
||||
|
||||
this.pushQuery({
|
||||
sql,
|
||||
bindings,
|
||||
output(resp) {
|
||||
return resp.rows.length > 0;
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Compile the query to determine if a column exists in a table.
|
||||
hasColumn(tableName, columnName) {
|
||||
let sql =
|
||||
'select * from information_schema.columns where table_name = ? and column_name = ?';
|
||||
const bindings = [tableName, columnName];
|
||||
|
||||
if (this.schema) {
|
||||
sql += ' and table_schema = ?';
|
||||
bindings.push(this.schema);
|
||||
} else {
|
||||
sql += ' and table_schema = current_schema()';
|
||||
}
|
||||
|
||||
this.pushQuery({
|
||||
sql,
|
||||
bindings,
|
||||
output(resp) {
|
||||
return resp.rows.length > 0;
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
qualifiedTableName(tableName) {
|
||||
const name = this.schema ? `${this.schema}.${tableName}` : tableName;
|
||||
return this.formatter.wrap(name);
|
||||
}
|
||||
|
||||
// Compile a rename table command.
|
||||
renameTable(from, to) {
|
||||
this.pushQuery(
|
||||
`alter table ${this.qualifiedTableName(
|
||||
from
|
||||
)} rename to ${this.formatter.wrap(to)}`
|
||||
);
|
||||
}
|
||||
|
||||
createSchema(schemaName) {
|
||||
this.pushQuery(`create schema ${this.formatter.wrap(schemaName)}`);
|
||||
}
|
||||
|
||||
createSchemaIfNotExists(schemaName) {
|
||||
this.pushQuery(
|
||||
`create schema if not exists ${this.formatter.wrap(schemaName)}`
|
||||
);
|
||||
}
|
||||
|
||||
dropSchema(schemaName, cascade = false) {
|
||||
this.pushQuery(
|
||||
`drop schema ${this.formatter.wrap(schemaName)}${
|
||||
cascade ? ' cascade' : ''
|
||||
}`
|
||||
);
|
||||
}
|
||||
|
||||
dropSchemaIfExists(schemaName, cascade = false) {
|
||||
this.pushQuery(
|
||||
`drop schema if exists ${this.formatter.wrap(schemaName)}${
|
||||
cascade ? ' cascade' : ''
|
||||
}`
|
||||
);
|
||||
}
|
||||
|
||||
dropExtension(extensionName) {
|
||||
this.pushQuery(`drop extension ${this.formatter.wrap(extensionName)}`);
|
||||
}
|
||||
|
||||
dropExtensionIfExists(extensionName) {
|
||||
this.pushQuery(
|
||||
`drop extension if exists ${this.formatter.wrap(extensionName)}`
|
||||
);
|
||||
}
|
||||
|
||||
createExtension(extensionName) {
|
||||
this.pushQuery(`create extension ${this.formatter.wrap(extensionName)}`);
|
||||
}
|
||||
|
||||
createExtensionIfNotExists(extensionName) {
|
||||
this.pushQuery(
|
||||
`create extension if not exists ${this.formatter.wrap(extensionName)}`
|
||||
);
|
||||
}
|
||||
|
||||
renameView(from, to) {
|
||||
this.pushQuery(
|
||||
this.alterViewPrefix +
|
||||
`${this.formatter.wrap(from)} rename to ${this.formatter.wrap(to)}`
|
||||
);
|
||||
}
|
||||
|
||||
refreshMaterializedView(viewName, concurrently = false) {
|
||||
this.pushQuery({
|
||||
sql: `refresh materialized view${
|
||||
concurrently ? ' concurrently' : ''
|
||||
} ${this.formatter.wrap(viewName)}`,
|
||||
});
|
||||
}
|
||||
|
||||
dropMaterializedView(viewName) {
|
||||
this._dropView(viewName, false, true);
|
||||
}
|
||||
|
||||
dropMaterializedViewIfExists(viewName) {
|
||||
this._dropView(viewName, true, true);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SchemaCompiler_PG;
|
304
backend/apis/nodejs/node_modules/knex/lib/dialects/postgres/schema/pg-tablecompiler.js
generated
vendored
Normal file
304
backend/apis/nodejs/node_modules/knex/lib/dialects/postgres/schema/pg-tablecompiler.js
generated
vendored
Normal file
@ -0,0 +1,304 @@
|
||||
/* eslint max-len: 0 */
|
||||
|
||||
// PostgreSQL Table Builder & Compiler
|
||||
// -------
|
||||
|
||||
const has = require('lodash/has');
|
||||
const TableCompiler = require('../../../schema/tablecompiler');
|
||||
const { isObject, isString } = require('../../../util/is');
|
||||
|
||||
class TableCompiler_PG extends TableCompiler {
|
||||
constructor(client, tableBuilder) {
|
||||
super(client, tableBuilder);
|
||||
}
|
||||
|
||||
// Compile a rename column command.
|
||||
renameColumn(from, to) {
|
||||
return this.pushQuery({
|
||||
sql: `alter table ${this.tableName()} rename ${this.formatter.wrap(
|
||||
from
|
||||
)} to ${this.formatter.wrap(to)}`,
|
||||
});
|
||||
}
|
||||
|
||||
_setNullableState(column, isNullable) {
|
||||
const constraintAction = isNullable ? 'drop not null' : 'set not null';
|
||||
const sql = `alter table ${this.tableName()} alter column ${this.formatter.wrap(
|
||||
column
|
||||
)} ${constraintAction}`;
|
||||
return this.pushQuery({
|
||||
sql: sql,
|
||||
});
|
||||
}
|
||||
|
||||
compileAdd(builder) {
|
||||
const table = this.formatter.wrap(builder);
|
||||
const columns = this.prefixArray('add column', this.getColumns(builder));
|
||||
return this.pushQuery({
|
||||
sql: `alter table ${table} ${columns.join(', ')}`,
|
||||
});
|
||||
}
|
||||
|
||||
// Adds the "create" query to the query sequence.
|
||||
createQuery(columns, ifNot, like) {
|
||||
const createStatement = ifNot
|
||||
? 'create table if not exists '
|
||||
: 'create table ';
|
||||
const columnsSql = ` (${columns.sql.join(', ')}${
|
||||
this.primaryKeys() || ''
|
||||
}${this._addChecks()})`;
|
||||
|
||||
let sql =
|
||||
createStatement +
|
||||
this.tableName() +
|
||||
(like && this.tableNameLike()
|
||||
? ' (like ' +
|
||||
this.tableNameLike() +
|
||||
' including all' +
|
||||
(columns.sql.length ? ', ' + columns.sql.join(', ') : '') +
|
||||
')'
|
||||
: columnsSql);
|
||||
if (this.single.inherits)
|
||||
sql += ` inherits (${this.formatter.wrap(this.single.inherits)})`;
|
||||
this.pushQuery({
|
||||
sql,
|
||||
bindings: columns.bindings,
|
||||
});
|
||||
const hasComment = has(this.single, 'comment');
|
||||
if (hasComment) this.comment(this.single.comment);
|
||||
}
|
||||
|
||||
primaryKeys() {
|
||||
const pks = (this.grouped.alterTable || []).filter(
|
||||
(k) => k.method === 'primary'
|
||||
);
|
||||
if (pks.length > 0 && pks[0].args.length > 0) {
|
||||
const columns = pks[0].args[0];
|
||||
let constraintName = pks[0].args[1] || '';
|
||||
let deferrable;
|
||||
if (isObject(constraintName)) {
|
||||
({ constraintName, deferrable } = constraintName);
|
||||
}
|
||||
deferrable = deferrable ? ` deferrable initially ${deferrable}` : '';
|
||||
constraintName = constraintName
|
||||
? this.formatter.wrap(constraintName)
|
||||
: this.formatter.wrap(`${this.tableNameRaw}_pkey`);
|
||||
|
||||
return `, constraint ${constraintName} primary key (${this.formatter.columnize(
|
||||
columns
|
||||
)})${deferrable}`;
|
||||
}
|
||||
}
|
||||
|
||||
addColumns(columns, prefix, colCompilers) {
|
||||
if (prefix === this.alterColumnsPrefix) {
|
||||
// alter columns
|
||||
for (const col of colCompilers) {
|
||||
this._addColumn(col);
|
||||
}
|
||||
} else {
|
||||
// base class implementation for normal add
|
||||
super.addColumns(columns, prefix);
|
||||
}
|
||||
}
|
||||
|
||||
_addColumn(col) {
|
||||
const quotedTableName = this.tableName();
|
||||
const type = col.getColumnType();
|
||||
// We'd prefer to call this.formatter.wrapAsIdentifier here instead, however the context passed to
|
||||
// `this` instance is not that of the column, but of the table. Thus, we unfortunately have to call
|
||||
// `wrapIdentifier` here as well (it is already called once on the initial column operation) to give
|
||||
// our `alter` operation the correct `queryContext`. Refer to issue #2606 and PR #2612.
|
||||
const colName = this.client.wrapIdentifier(
|
||||
col.getColumnName(),
|
||||
col.columnBuilder.queryContext()
|
||||
);
|
||||
|
||||
// To alter enum columns they must be cast to text first
|
||||
const isEnum = col.type === 'enu';
|
||||
this.pushQuery({
|
||||
sql: `alter table ${quotedTableName} alter column ${colName} drop default`,
|
||||
bindings: [],
|
||||
});
|
||||
|
||||
const alterNullable = col.columnBuilder.alterNullable;
|
||||
if (alterNullable) {
|
||||
this.pushQuery({
|
||||
sql: `alter table ${quotedTableName} alter column ${colName} drop not null`,
|
||||
bindings: [],
|
||||
});
|
||||
}
|
||||
|
||||
const alterType = col.columnBuilder.alterType;
|
||||
if (alterType) {
|
||||
this.pushQuery({
|
||||
sql: `alter table ${quotedTableName} alter column ${colName} type ${type} using (${colName}${
|
||||
isEnum ? '::text::' : '::'
|
||||
}${type})`,
|
||||
bindings: [],
|
||||
});
|
||||
}
|
||||
|
||||
const defaultTo = col.modified['defaultTo'];
|
||||
if (defaultTo) {
|
||||
const modifier = col.defaultTo.apply(col, defaultTo);
|
||||
this.pushQuery({
|
||||
sql: `alter table ${quotedTableName} alter column ${colName} set ${modifier}`,
|
||||
bindings: [],
|
||||
});
|
||||
}
|
||||
|
||||
if (alterNullable) {
|
||||
const nullable = col.modified['nullable'];
|
||||
if (nullable && nullable[0] === false) {
|
||||
this.pushQuery({
|
||||
sql: `alter table ${quotedTableName} alter column ${colName} set not null`,
|
||||
bindings: [],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Compiles the comment on the table.
|
||||
comment(comment) {
|
||||
this.pushQuery(
|
||||
`comment on table ${this.tableName()} is '${this.single.comment}'`
|
||||
);
|
||||
}
|
||||
|
||||
// Indexes:
|
||||
// -------
|
||||
|
||||
primary(columns, constraintName) {
|
||||
let deferrable;
|
||||
if (isObject(constraintName)) {
|
||||
({ constraintName, deferrable } = constraintName);
|
||||
}
|
||||
deferrable = deferrable ? ` deferrable initially ${deferrable}` : '';
|
||||
constraintName = constraintName
|
||||
? this.formatter.wrap(constraintName)
|
||||
: this.formatter.wrap(`${this.tableNameRaw}_pkey`);
|
||||
if (this.method !== 'create' && this.method !== 'createIfNot') {
|
||||
this.pushQuery(
|
||||
`alter table ${this.tableName()} add constraint ${constraintName} primary key (${this.formatter.columnize(
|
||||
columns
|
||||
)})${deferrable}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
unique(columns, indexName) {
|
||||
let deferrable;
|
||||
let useConstraint = true;
|
||||
let predicate;
|
||||
if (isObject(indexName)) {
|
||||
({ indexName, deferrable, useConstraint, predicate } = indexName);
|
||||
if (useConstraint === undefined) {
|
||||
useConstraint = !!deferrable || !predicate;
|
||||
}
|
||||
}
|
||||
if (!useConstraint && deferrable && deferrable !== 'not deferrable') {
|
||||
throw new Error('postgres cannot create deferrable index');
|
||||
}
|
||||
if (useConstraint && predicate) {
|
||||
throw new Error('postgres cannot create constraint with predicate');
|
||||
}
|
||||
deferrable = deferrable ? ` deferrable initially ${deferrable}` : '';
|
||||
indexName = indexName
|
||||
? this.formatter.wrap(indexName)
|
||||
: this._indexCommand('unique', this.tableNameRaw, columns);
|
||||
|
||||
if (useConstraint) {
|
||||
this.pushQuery(
|
||||
`alter table ${this.tableName()} add constraint ${indexName}` +
|
||||
' unique (' +
|
||||
this.formatter.columnize(columns) +
|
||||
')' +
|
||||
deferrable
|
||||
);
|
||||
} else {
|
||||
const predicateQuery = predicate
|
||||
? ' ' + this.client.queryCompiler(predicate).where()
|
||||
: '';
|
||||
|
||||
this.pushQuery(
|
||||
`create unique index ${indexName} on ${this.tableName()} (${this.formatter.columnize(
|
||||
columns
|
||||
)})${predicateQuery}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
index(columns, indexName, options) {
|
||||
indexName = indexName
|
||||
? this.formatter.wrap(indexName)
|
||||
: this._indexCommand('index', this.tableNameRaw, columns);
|
||||
|
||||
let predicate;
|
||||
let storageEngineIndexType;
|
||||
let indexType;
|
||||
|
||||
if (isString(options)) {
|
||||
storageEngineIndexType = options;
|
||||
} else if (isObject(options)) {
|
||||
({ indexType, storageEngineIndexType, predicate } = options);
|
||||
}
|
||||
|
||||
const predicateQuery = predicate
|
||||
? ' ' + this.client.queryCompiler(predicate).where()
|
||||
: '';
|
||||
|
||||
this.pushQuery(
|
||||
`create${
|
||||
typeof indexType === 'string' && indexType.toLowerCase() === 'unique'
|
||||
? ' unique'
|
||||
: ''
|
||||
} index ${indexName} on ${this.tableName()}${
|
||||
(storageEngineIndexType && ` using ${storageEngineIndexType}`) || ''
|
||||
}` +
|
||||
' (' +
|
||||
this.formatter.columnize(columns) +
|
||||
')' +
|
||||
`${predicateQuery}`
|
||||
);
|
||||
}
|
||||
|
||||
dropPrimary(constraintName) {
|
||||
constraintName = constraintName
|
||||
? this.formatter.wrap(constraintName)
|
||||
: this.formatter.wrap(this.tableNameRaw + '_pkey');
|
||||
this.pushQuery(
|
||||
`alter table ${this.tableName()} drop constraint ${constraintName}`
|
||||
);
|
||||
}
|
||||
|
||||
dropIndex(columns, indexName) {
|
||||
indexName = indexName
|
||||
? this.formatter.wrap(indexName)
|
||||
: this._indexCommand('index', this.tableNameRaw, columns);
|
||||
indexName = this.schemaNameRaw
|
||||
? `${this.formatter.wrap(this.schemaNameRaw)}.${indexName}`
|
||||
: indexName;
|
||||
this.pushQuery(`drop index ${indexName}`);
|
||||
}
|
||||
|
||||
dropUnique(columns, indexName) {
|
||||
indexName = indexName
|
||||
? this.formatter.wrap(indexName)
|
||||
: this._indexCommand('unique', this.tableNameRaw, columns);
|
||||
this.pushQuery(
|
||||
`alter table ${this.tableName()} drop constraint ${indexName}`
|
||||
);
|
||||
}
|
||||
|
||||
dropForeign(columns, indexName) {
|
||||
indexName = indexName
|
||||
? this.formatter.wrap(indexName)
|
||||
: this._indexCommand('foreign', this.tableNameRaw, columns);
|
||||
this.pushQuery(
|
||||
`alter table ${this.tableName()} drop constraint ${indexName}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TableCompiler_PG;
|
21
backend/apis/nodejs/node_modules/knex/lib/dialects/postgres/schema/pg-viewbuilder.js
generated
vendored
Normal file
21
backend/apis/nodejs/node_modules/knex/lib/dialects/postgres/schema/pg-viewbuilder.js
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
const ViewBuilder = require('../../../schema/viewbuilder.js');
|
||||
|
||||
class ViewBuilder_PG extends ViewBuilder {
|
||||
constructor() {
|
||||
super(...arguments);
|
||||
}
|
||||
|
||||
checkOption() {
|
||||
this._single.checkOption = 'default_option';
|
||||
}
|
||||
|
||||
localCheckOption() {
|
||||
this._single.checkOption = 'local';
|
||||
}
|
||||
|
||||
cascadedCheckOption() {
|
||||
this._single.checkOption = 'cascaded';
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ViewBuilder_PG;
|
35
backend/apis/nodejs/node_modules/knex/lib/dialects/postgres/schema/pg-viewcompiler.js
generated
vendored
Normal file
35
backend/apis/nodejs/node_modules/knex/lib/dialects/postgres/schema/pg-viewcompiler.js
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
/* eslint max-len: 0 */
|
||||
|
||||
const ViewCompiler = require('../../../schema/viewcompiler.js');
|
||||
|
||||
class ViewCompiler_PG extends ViewCompiler {
|
||||
constructor(client, viewCompiler) {
|
||||
super(client, viewCompiler);
|
||||
}
|
||||
|
||||
renameColumn(from, to) {
|
||||
return this.pushQuery({
|
||||
sql: `alter view ${this.viewName()} rename ${this.formatter.wrap(
|
||||
from
|
||||
)} to ${this.formatter.wrap(to)}`,
|
||||
});
|
||||
}
|
||||
|
||||
defaultTo(column, defaultValue) {
|
||||
return this.pushQuery({
|
||||
sql: `alter view ${this.viewName()} alter ${this.formatter.wrap(
|
||||
column
|
||||
)} set default ${defaultValue}`,
|
||||
});
|
||||
}
|
||||
|
||||
createOrReplace() {
|
||||
this.createQuery(this.columns, this.selectQuery, false, true);
|
||||
}
|
||||
|
||||
createMaterializedView() {
|
||||
this.createQuery(this.columns, this.selectQuery, true);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ViewCompiler_PG;
|
Reference in New Issue
Block a user