mirror of
https://github.com/Fabio286/antares.git
synced 2025-02-17 04:00:48 +01:00
Merge pull request #208 from antares-sql/feat/postgre-import-export
feat(PostgreSQL): import/export
This commit is contained in:
commit
40828cb3ff
@ -120,6 +120,7 @@
|
||||
"moment": "^2.29.1",
|
||||
"mysql2": "^2.3.2",
|
||||
"pg": "^8.7.1",
|
||||
"pg-query-stream": "^4.2.3",
|
||||
"pgsql-ast-parser": "^7.2.1",
|
||||
"source-map-support": "^0.5.20",
|
||||
"spectre.css": "^0.5.9",
|
||||
|
@ -39,6 +39,7 @@ module.exports = {
|
||||
schemaEdit: false,
|
||||
schemaDrop: false,
|
||||
schemaExport: false,
|
||||
exportByChunks: false,
|
||||
schemaImport: false,
|
||||
tableSettings: false,
|
||||
tableOptions: false,
|
||||
|
@ -35,6 +35,7 @@ module.exports = {
|
||||
schemaEdit: true,
|
||||
schemaDrop: true,
|
||||
schemaExport: true,
|
||||
exportByChunks: true,
|
||||
schemaImport: true,
|
||||
tableSettings: true,
|
||||
viewSettings: true,
|
||||
|
@ -31,9 +31,9 @@ module.exports = {
|
||||
routineAdd: true,
|
||||
functionAdd: true,
|
||||
schemaDrop: true,
|
||||
schemaExport: true,
|
||||
schemaImport: true,
|
||||
databaseEdit: false,
|
||||
schemaExport: false,
|
||||
schemaImport: false,
|
||||
tableSettings: true,
|
||||
viewSettings: true,
|
||||
triggerSettings: true,
|
||||
|
@ -572,7 +572,7 @@ export class PostgreSQLClient extends AntaresCore {
|
||||
* @memberof MySQLClient
|
||||
*/
|
||||
async dropSchema (params) {
|
||||
return await this.raw(`DROP SCHEMA "${params.database}"`);
|
||||
return await this.raw(`DROP SCHEMA "${params.database}" CASCADE`);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -6,6 +6,12 @@ import moment from 'moment';
|
||||
import { lineString, point, polygon } from '@turf/helpers';
|
||||
|
||||
export default class MysqlExporter extends SqlExporter {
|
||||
constructor (...args) {
|
||||
super(...args);
|
||||
|
||||
this._commentChar = '#';
|
||||
}
|
||||
|
||||
async getSqlHeader () {
|
||||
let dump = await super.getSqlHeader();
|
||||
dump += `
|
||||
|
495
src/main/libs/exporters/sql/PostgreSQLExporter.js
Normal file
495
src/main/libs/exporters/sql/PostgreSQLExporter.js
Normal file
@ -0,0 +1,495 @@
|
||||
import { SqlExporter } from './SqlExporter';
|
||||
import { BLOB, BIT, DATE, DATETIME, FLOAT, NUMBER, TEXT_SEARCH } from 'common/fieldTypes';
|
||||
import hexToBinary from 'common/libs/hexToBinary';
|
||||
import { getArrayDepth } from 'common/libs/getArrayDepth';
|
||||
import moment from 'moment';
|
||||
import { lineString, point, polygon } from '@turf/helpers';
|
||||
import QueryStream from 'pg-query-stream';
|
||||
|
||||
export default class PostgreSQLExporter extends SqlExporter {
|
||||
async getSqlHeader () {
|
||||
let dump = await super.getSqlHeader();
|
||||
dump += `
|
||||
|
||||
|
||||
SET statement_timeout = 0;
|
||||
SET lock_timeout = 0;
|
||||
SET idle_in_transaction_session_timeout = 0;
|
||||
SET client_encoding = 'UTF8';
|
||||
SET standard_conforming_strings = on;
|
||||
SELECT pg_catalog.set_config('search_path', '', false);
|
||||
SET check_function_bodies = false;
|
||||
SET xmloption = content;
|
||||
SET client_min_messages = warning;
|
||||
SET row_security = off;\n\n\n`;
|
||||
|
||||
if (this.schemaName !== 'public') dump += `CREATE SCHEMA "${this.schemaName}";\n\n`;
|
||||
|
||||
dump += await this.getCreateTypes();
|
||||
|
||||
return dump;
|
||||
}
|
||||
|
||||
async getCreateTable (tableName) {
|
||||
let createSql = '';
|
||||
const sequences = [];
|
||||
const columnsSql = [];
|
||||
const arrayTypes = {
|
||||
_int2: 'smallint',
|
||||
_int4: 'integer',
|
||||
_int8: 'bigint',
|
||||
_float4: 'real',
|
||||
_float8: 'double precision',
|
||||
_char: '"char"',
|
||||
_varchar: 'character varying'
|
||||
};
|
||||
|
||||
// Table columns
|
||||
const { rows } = await this._client.raw(`
|
||||
SELECT *
|
||||
FROM "information_schema"."columns"
|
||||
WHERE "table_schema" = '${this.schemaName}'
|
||||
AND "table_name" = '${tableName}'
|
||||
ORDER BY "ordinal_position" ASC
|
||||
`, { schema: 'information_schema' });
|
||||
|
||||
if (!rows.length) return '';
|
||||
|
||||
for (const column of rows) {
|
||||
let fieldType = column.data_type;
|
||||
if (fieldType === 'USER-DEFINED') fieldType = `"${this.schemaName}".${column.udt_name}`;
|
||||
else if (fieldType === 'ARRAY') {
|
||||
if (Object.keys(arrayTypes).includes(fieldType))
|
||||
fieldType = arrayTypes[type] + '[]';
|
||||
else
|
||||
fieldType = column.udt_name.replaceAll('_', '') + '[]';
|
||||
}
|
||||
|
||||
const columnArr = [
|
||||
`"${column.column_name}"`,
|
||||
`${fieldType}${column.character_maximum_length ? `(${column.character_maximum_length})` : ''}`
|
||||
];
|
||||
|
||||
if (column.column_default) {
|
||||
columnArr.push(`DEFAULT ${column.column_default}`);
|
||||
if (column.column_default.includes('nextval')) {
|
||||
const sequenceName = column.column_default.split('\'')[1];
|
||||
sequences.push(sequenceName);
|
||||
}
|
||||
}
|
||||
if (column.is_nullable === 'NO') columnArr.push('NOT NULL');
|
||||
|
||||
columnsSql.push(columnArr.join(' '));
|
||||
}
|
||||
|
||||
// Table sequences
|
||||
for (let sequence of sequences) {
|
||||
if (sequence.includes('.')) sequence = sequence.split('.')[1];
|
||||
|
||||
const { rows } = await this._client
|
||||
.select('*')
|
||||
.schema('information_schema')
|
||||
.from('sequences')
|
||||
.where({ sequence_schema: `= '${this.schemaName}'`, sequence_name: `= '${sequence}'` })
|
||||
.run();
|
||||
|
||||
if (rows.length) {
|
||||
createSql += `CREATE SEQUENCE "${this.schemaName}"."${sequence}"
|
||||
START WITH ${rows[0].start_value}
|
||||
INCREMENT BY ${rows[0].increment}
|
||||
MINVALUE ${rows[0].minimum_value}
|
||||
MAXVALUE ${rows[0].maximum_value}
|
||||
CACHE 1;\n`;
|
||||
|
||||
// createSql += `\nALTER TABLE "${sequence}" OWNER TO ${this._client._params.user};\n\n`;
|
||||
}
|
||||
}
|
||||
|
||||
// Table create
|
||||
createSql += `\nCREATE TABLE "${this.schemaName}"."${tableName}"(
|
||||
${columnsSql.join(',\n ')}
|
||||
);\n`;
|
||||
|
||||
// createSql += `\nALTER TABLE "${tableName}" OWNER TO ${this._client._params.user};\n\n`;
|
||||
|
||||
// Table indexes
|
||||
createSql += '\n';
|
||||
const { rows: indexes } = await this._client
|
||||
.select('*')
|
||||
.schema('pg_catalog')
|
||||
.from('pg_indexes')
|
||||
.where({ schemaname: `= '${this.schemaName}'`, tablename: `= '${tableName}'` })
|
||||
.run();
|
||||
|
||||
for (const index of indexes)
|
||||
createSql += `${index.indexdef};\n`;
|
||||
|
||||
// Table foreigns
|
||||
const { rows: foreigns } = await this._client.raw(`
|
||||
SELECT
|
||||
tc.table_schema,
|
||||
tc.constraint_name,
|
||||
tc.table_name,
|
||||
kcu.column_name,
|
||||
ccu.table_schema AS foreign_table_schema,
|
||||
ccu.table_name AS foreign_table_name,
|
||||
ccu.column_name AS foreign_column_name,
|
||||
rc.update_rule,
|
||||
rc.delete_rule
|
||||
FROM information_schema.table_constraints AS tc
|
||||
JOIN information_schema.key_column_usage AS kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
JOIN information_schema.constraint_column_usage AS ccu
|
||||
ON ccu.constraint_name = tc.constraint_name
|
||||
AND ccu.table_schema = tc.table_schema
|
||||
JOIN information_schema.referential_constraints AS rc
|
||||
ON rc.constraint_name = kcu.constraint_name
|
||||
WHERE tc.constraint_type = 'FOREIGN KEY' AND tc.table_schema = '${this.schemaName}'
|
||||
AND tc.table_name = '${tableName}'
|
||||
`);
|
||||
|
||||
for (const foreign of foreigns) {
|
||||
this._postTablesSql += `\nALTER TABLE ONLY "${this.schemaName}"."${tableName}"
|
||||
ADD CONSTRAINT "${foreign.constraint_name}" FOREIGN KEY ("${foreign.column_name}") REFERENCES "${this.schemaName}"."${foreign.foreign_table_name}" ("${foreign.foreign_column_name}") ON UPDATE ${foreign.update_rule} ON DELETE ${foreign.delete_rule};\n`;
|
||||
}
|
||||
|
||||
return createSql;
|
||||
}
|
||||
|
||||
getDropTable (tableName) {
|
||||
return `DROP TABLE IF EXISTS "${this.schemaName}"."${tableName}";`;
|
||||
}
|
||||
|
||||
async * getTableInsert (tableName) {
|
||||
let rowCount = 0;
|
||||
const sqlStr = '';
|
||||
|
||||
const countResults = await this._client.raw(`SELECT COUNT(1) as count FROM "${this.schemaName}"."${tableName}"`);
|
||||
if (countResults.rows.length === 1) rowCount = countResults.rows[0].count;
|
||||
|
||||
if (rowCount > 0) {
|
||||
const columns = await this._client.getTableColumns({
|
||||
table: tableName,
|
||||
schema: this.schemaName
|
||||
});
|
||||
|
||||
const columnNames = columns.map(col => '"' + col.name + '"').join(', ');
|
||||
|
||||
yield sqlStr;
|
||||
|
||||
const stream = await this._queryStream(
|
||||
`SELECT ${columnNames} FROM "${this.schemaName}"."${tableName}"`
|
||||
);
|
||||
|
||||
for await (const row of stream) {
|
||||
if (this.isCancelled) {
|
||||
stream.destroy();
|
||||
yield null;
|
||||
return;
|
||||
}
|
||||
|
||||
let sqlInsertString = `INSERT INTO "${this.schemaName}"."${tableName}" (${columnNames}) VALUES`;
|
||||
|
||||
sqlInsertString += ' (';
|
||||
|
||||
for (const i in columns) {
|
||||
const column = columns[i];
|
||||
const val = row[column.name];
|
||||
|
||||
if (val === null) sqlInsertString += 'NULL';
|
||||
else if (DATE.includes(column.type)) {
|
||||
sqlInsertString += moment(val).isValid()
|
||||
? this.escapeAndQuote(moment(val).format('YYYY-MM-DD'))
|
||||
: val;
|
||||
}
|
||||
else if (DATETIME.includes(column.type)) {
|
||||
let datePrecision = '';
|
||||
for (let i = 0; i < column.precision; i++)
|
||||
datePrecision += i === 0 ? '.S' : 'S';
|
||||
|
||||
sqlInsertString += moment(val).isValid()
|
||||
? this.escapeAndQuote(moment(val).format(`YYYY-MM-DD HH:mm:ss${datePrecision}`))
|
||||
: this.escapeAndQuote(val);
|
||||
}
|
||||
else if (column.isArray) {
|
||||
let parsedVal;
|
||||
if (Array.isArray(val))
|
||||
parsedVal = JSON.stringify(val).replaceAll('[', '{').replaceAll(']', '}');
|
||||
else
|
||||
parsedVal = typeof val === 'string' ? val.replaceAll('[', '{').replaceAll(']', '}') : '';
|
||||
sqlInsertString += `'${parsedVal}'`;
|
||||
}
|
||||
else if (TEXT_SEARCH.includes(column.type))
|
||||
sqlInsertString += `'${val.replaceAll('\'', '\'\'')}'`;
|
||||
else if (BIT.includes(column.type))
|
||||
sqlInsertString += `b'${hexToBinary(Buffer.from(val).toString('hex'))}'`;
|
||||
else if (BLOB.includes(column.type))
|
||||
sqlInsertString += `decode('${val.toString('hex').toUpperCase()}', 'hex')`;
|
||||
else if (NUMBER.includes(column.type))
|
||||
sqlInsertString += val;
|
||||
else if (FLOAT.includes(column.type))
|
||||
sqlInsertString += parseFloat(val);
|
||||
else if (val === '') sqlInsertString += '\'\'';
|
||||
else {
|
||||
sqlInsertString += typeof val === 'string'
|
||||
? this.escapeAndQuote(val)
|
||||
: typeof val === 'object'
|
||||
? this.escapeAndQuote(JSON.stringify(val))
|
||||
: val;
|
||||
}
|
||||
|
||||
if (parseInt(i) !== columns.length - 1)
|
||||
sqlInsertString += ', ';
|
||||
}
|
||||
|
||||
sqlInsertString += ');\n';
|
||||
|
||||
yield sqlInsertString;
|
||||
}
|
||||
|
||||
yield sqlStr;
|
||||
}
|
||||
}
|
||||
|
||||
async getCreateTypes () {
|
||||
let sqlString = '';
|
||||
const { rows: types } = await this._client.raw(`
|
||||
SELECT pg_type.typname, pg_enum.enumlabel
|
||||
FROM pg_type
|
||||
JOIN pg_enum ON pg_enum.enumtypid = pg_type.oid;
|
||||
`);
|
||||
|
||||
if (types.length) { // TODO: refactor
|
||||
sqlString += this.buildComment('Dump of types\n------------------------------------------------------------') + '\n\n';
|
||||
|
||||
const typesArr = types.reduce((arr, type) => {
|
||||
if (arr.every(el => el.name !== type.typname))
|
||||
arr.push({ name: type.typname, enums: [this.escapeAndQuote(type.enumlabel)] });
|
||||
else {
|
||||
const i = arr.findIndex(el => el.name === type.typname);
|
||||
arr[i].enums.push(this.escapeAndQuote(type.enumlabel));
|
||||
}
|
||||
|
||||
return arr;
|
||||
}, []);
|
||||
|
||||
for (const type of typesArr) {
|
||||
sqlString += `CREATE TYPE "${this.schemaName}"."${type.name}" AS ENUM (
|
||||
${type.enums.join(',\n\t')}
|
||||
);`;
|
||||
}
|
||||
|
||||
// sqlString += `\nALTER TYPE "${tableName}" OWNER TO ${this._client._params.user};\n`
|
||||
}
|
||||
|
||||
return sqlString;
|
||||
}
|
||||
|
||||
async getCreateAggregates () {
|
||||
let sqlString = '';
|
||||
|
||||
const { rows: aggregates } = await this._client.raw(`
|
||||
SELECT proname
|
||||
FROM pg_proc
|
||||
WHERE prokind = 'a'
|
||||
AND pronamespace::regnamespace::text = '${this.schemaName}'
|
||||
ORDER BY 1;
|
||||
`);
|
||||
|
||||
if (aggregates.length) {
|
||||
for (const aggregate of aggregates) {
|
||||
const { rows: aggregateDef } = await this._client.raw(
|
||||
`SELECT
|
||||
format(
|
||||
E'CREATE AGGREGATE %s (\n%s\n);'
|
||||
, (pg_identify_object('pg_proc'::regclass, aggfnoid, 0)).identity
|
||||
, array_to_string(
|
||||
ARRAY[
|
||||
format(E'\tSFUNC = %s', aggtransfn::regproc)
|
||||
, format(E'\tSTYPE = %s', format_type(aggtranstype, NULL))
|
||||
, CASE aggfinalfn WHEN '-'::regproc THEN NULL ELSE format(E'\tFINALFUNC = %s',aggfinalfn::text) END
|
||||
, CASE aggsortop WHEN 0 THEN NULL ELSE format(E'\tSORTOP = %s', oprname) END
|
||||
, CASE WHEN agginitval IS NULL THEN NULL ELSE format(E'\tINITCOND = %s', agginitval) END
|
||||
]
|
||||
, E',\n'
|
||||
)
|
||||
)
|
||||
FROM pg_aggregate
|
||||
LEFT JOIN pg_operator ON pg_operator.oid = aggsortop
|
||||
WHERE aggfnoid = '${this.schemaName}.${aggregate.proname}'::regproc;`
|
||||
);
|
||||
|
||||
if (aggregateDef.length)
|
||||
sqlString += '\n\n' + aggregateDef[0].format;
|
||||
}
|
||||
}
|
||||
|
||||
return sqlString + '\n\n\n';
|
||||
}
|
||||
|
||||
async getViews () {
|
||||
const { rows: views } = await this._client.raw(`SELECT * FROM "pg_views" WHERE "schemaname"='${this.schemaName}'`);
|
||||
let sqlString = '';
|
||||
|
||||
for (const view of views) {
|
||||
sqlString += `\nDROP VIEW IF EXISTS "${view.viewname}";\n`;
|
||||
|
||||
// const { rows: columns } = await this._client
|
||||
// .select('*')
|
||||
// .schema('information_schema')
|
||||
// .from('columns')
|
||||
// .where({ table_schema: `= '${this.schemaName}'`, table_name: `= '${view.viewname}'` })
|
||||
// .orderBy({ ordinal_position: 'ASC' })
|
||||
// .run();
|
||||
|
||||
// sqlString += `
|
||||
// CREATE VIEW "${this.schemaName}"."${view.viewname}" AS
|
||||
// SELECT
|
||||
// ${columns.reduce((acc, curr) => {
|
||||
// const fieldType = curr.data_type === 'USER-DEFINED' ? curr.udt_name : curr.data_type;
|
||||
// acc.push(`NULL::${fieldType}${curr.character_maximum_length ? `(${curr.character_maximum_length})` : ''} AS "${curr.column_name}"`);
|
||||
// return acc;
|
||||
// }, []).join(',\n ')};
|
||||
// `;
|
||||
|
||||
sqlString += `\nCREATE OR REPLACE VIEW "${this.schemaName}"."${view.viewname}" AS \n${view.definition}\n`;
|
||||
}
|
||||
|
||||
return sqlString;
|
||||
}
|
||||
|
||||
async getTriggers () {
|
||||
let sqlString = '';
|
||||
|
||||
// Trigger functions
|
||||
const { rows: triggerFunctions } = await this._client.raw(
|
||||
`SELECT DISTINCT routine_name AS name FROM information_schema.routines WHERE routine_type = 'FUNCTION' AND routine_schema = '${this.schemaName}' AND data_type = 'trigger'`
|
||||
);
|
||||
|
||||
for (const func of triggerFunctions) {
|
||||
const { rows: functionDef } = await this._client.raw(
|
||||
`SELECT pg_get_functiondef((SELECT oid FROM pg_proc WHERE proname = '${func.name}')) AS definition`
|
||||
);
|
||||
sqlString += `\n${functionDef[0].definition};\n`;
|
||||
}
|
||||
|
||||
const { rows: triggers } = await this._client.raw(
|
||||
`SELECT * FROM "information_schema"."triggers" WHERE "trigger_schema"='${this.schemaName}'`
|
||||
);
|
||||
|
||||
const remappedTriggers = triggers.reduce((acc, trigger) => {
|
||||
const i = acc.findIndex(t => t.trigger_name === trigger.trigger_name && t.event_object_table === trigger.event_object_table);
|
||||
if (i === -1) {
|
||||
trigger.events = [trigger.event_manipulation];
|
||||
acc.push(trigger);
|
||||
}
|
||||
else
|
||||
acc[i].events.push(trigger.event_manipulation);
|
||||
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
for (const trigger of remappedTriggers)
|
||||
sqlString += `\nCREATE TRIGGER "${trigger.trigger_name}" ${trigger.action_timing} ${trigger.events.join(' OR ')} ON "${this.schemaName}"."${trigger.event_object_table}" FOR EACH ${trigger.action_orientation} ${trigger.action_statement};\n`;
|
||||
|
||||
return sqlString;
|
||||
}
|
||||
|
||||
async getFunctions () {
|
||||
let sqlString = '';
|
||||
const { rows: functions } = await this._client.raw(
|
||||
`SELECT DISTINCT routine_name AS name FROM information_schema.routines WHERE routine_type = 'FUNCTION' AND routine_schema = '${this.schemaName}' AND data_type != 'trigger'`
|
||||
);
|
||||
|
||||
for (const func of functions) {
|
||||
const { rows: functionDef } = await this._client.raw(
|
||||
`SELECT pg_get_functiondef((SELECT oid FROM pg_proc WHERE proname = '${func.name}')) AS definition`
|
||||
);
|
||||
sqlString += `\n${functionDef[0].definition};\n`;
|
||||
}
|
||||
|
||||
sqlString += await this.getCreateAggregates();
|
||||
|
||||
return sqlString;
|
||||
}
|
||||
|
||||
async getRoutines () {
|
||||
let sqlString = '';
|
||||
const { rows: functions } = await this._client.raw(
|
||||
`SELECT DISTINCT routine_name AS name FROM information_schema.routines WHERE routine_type = 'PROCEDURE' AND routine_schema = '${this.schemaName}'`
|
||||
);
|
||||
|
||||
for (const func of functions) {
|
||||
const { rows: functionDef } = await this._client.raw(
|
||||
`SELECT pg_get_functiondef((SELECT oid FROM pg_proc WHERE proname = '${func.name}')) AS definition`
|
||||
);
|
||||
sqlString += `\n${functionDef[0].definition};\n`;
|
||||
}
|
||||
|
||||
return sqlString;
|
||||
}
|
||||
|
||||
async _queryStream (sql) {
|
||||
if (process.env.NODE_ENV === 'development') console.log('EXPORTER:', sql);
|
||||
const connection = await this._client.getConnection();
|
||||
const query = new QueryStream(sql, null);
|
||||
const stream = connection.query(query);
|
||||
const dispose = () => connection.end();
|
||||
|
||||
stream.on('end', dispose);
|
||||
stream.on('error', dispose);
|
||||
stream.on('close', dispose);
|
||||
return stream;
|
||||
}
|
||||
|
||||
getEscapedDefiner (definer) {
|
||||
return definer
|
||||
.split('@')
|
||||
.map(part => '`' + part + '`')
|
||||
.join('@');
|
||||
}
|
||||
|
||||
escapeAndQuote (val) {
|
||||
// eslint-disable-next-line no-control-regex
|
||||
const CHARS_TO_ESCAPE = /[\0\b\t\n\r\x1a"'\\]/g;
|
||||
const CHARS_ESCAPE_MAP = {
|
||||
'\0': '\\0',
|
||||
'\b': '\\b',
|
||||
'\t': '\\t',
|
||||
'\n': '\\n',
|
||||
'\r': '\\r',
|
||||
'\x1a': '\\Z',
|
||||
'"': '\\"',
|
||||
'\'': '\\\'',
|
||||
'\\': '\\\\'
|
||||
};
|
||||
let chunkIndex = CHARS_TO_ESCAPE.lastIndex = 0;
|
||||
let escapedVal = '';
|
||||
let match;
|
||||
|
||||
while ((match = CHARS_TO_ESCAPE.exec(val))) {
|
||||
escapedVal += val.slice(chunkIndex, match.index) + CHARS_ESCAPE_MAP[match[0]];
|
||||
chunkIndex = CHARS_TO_ESCAPE.lastIndex;
|
||||
}
|
||||
|
||||
if (chunkIndex === 0)
|
||||
return `'${val}'`;
|
||||
|
||||
if (chunkIndex < val.length)
|
||||
return `'${escapedVal + val.slice(chunkIndex)}'`;
|
||||
|
||||
return `'${escapedVal}'`;
|
||||
}
|
||||
|
||||
_getGeoJSON (val) {
|
||||
if (Array.isArray(val)) {
|
||||
if (getArrayDepth(val) === 1)
|
||||
return lineString(val.reduce((acc, curr) => [...acc, [curr.x, curr.y]], []));
|
||||
else
|
||||
return polygon(val.map(arr => arr.reduce((acc, curr) => [...acc, [curr.x, curr.y]], [])));
|
||||
}
|
||||
else
|
||||
return point([val.x, val.y]);
|
||||
}
|
||||
}
|
@ -5,7 +5,8 @@ export class SqlExporter extends BaseExporter {
|
||||
constructor (client, tables, options) {
|
||||
super(tables, options);
|
||||
this._client = client;
|
||||
this._commentChar = '#';
|
||||
this._commentChar = '--';
|
||||
this._postTablesSql = '';
|
||||
}
|
||||
|
||||
get schemaName () {
|
||||
@ -90,6 +91,12 @@ export class SqlExporter extends BaseExporter {
|
||||
this.writeString('\n\n');
|
||||
}
|
||||
|
||||
// SQL to execute after tables creation
|
||||
if (this._postTablesSql) {
|
||||
this.writeString(this._postTablesSql);
|
||||
this.writeString('\n\n');
|
||||
}
|
||||
|
||||
for (const item of extraItems) {
|
||||
const processingMethod = `get${item.charAt(0).toUpperCase() + item.slice(1)}`;
|
||||
exportState.currentItemIndex++;
|
||||
|
@ -1,8 +1,8 @@
|
||||
import fs from 'fs/promises';
|
||||
import SqlParser from '../../../../common/libs/sqlParser';
|
||||
import MySQLParser from '../../parsers/MySQLParser';
|
||||
import { BaseImporter } from '../BaseImporter';
|
||||
|
||||
export default class MysqlImporter extends BaseImporter {
|
||||
export default class MySQLImporter extends BaseImporter {
|
||||
constructor (client, options) {
|
||||
super(options);
|
||||
this._client = client;
|
||||
@ -11,7 +11,7 @@ export default class MysqlImporter extends BaseImporter {
|
||||
async import () {
|
||||
try {
|
||||
const { size: totalFileSize } = await fs.stat(this._options.file);
|
||||
const parser = new SqlParser();
|
||||
const parser = new MySQLParser();
|
||||
let readPosition = 0;
|
||||
let queryCount = 0;
|
||||
|
||||
@ -22,11 +22,6 @@ export default class MysqlImporter extends BaseImporter {
|
||||
queryCount: 0
|
||||
});
|
||||
|
||||
// 1. detect file encoding
|
||||
// 2. set fh encoding
|
||||
// 3. detect sql mode
|
||||
// 4. restore sql mode in case of exception
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this._fileHandler.pipe(parser);
|
||||
|
||||
|
80
src/main/libs/importers/sql/PostgreSQLImporter.js
Normal file
80
src/main/libs/importers/sql/PostgreSQLImporter.js
Normal file
@ -0,0 +1,80 @@
|
||||
import fs from 'fs/promises';
|
||||
import PostgreSQLParser from '../../parsers/PostgreSQLParser';
|
||||
import { BaseImporter } from '../BaseImporter';
|
||||
|
||||
export default class PostgreSQLImporter extends BaseImporter {
|
||||
constructor (client, options) {
|
||||
super(options);
|
||||
this._client = client;
|
||||
}
|
||||
|
||||
async import () {
|
||||
try {
|
||||
const { size: totalFileSize } = await fs.stat(this._options.file);
|
||||
const parser = new PostgreSQLParser();
|
||||
let readPosition = 0;
|
||||
let queryCount = 0;
|
||||
|
||||
this.emitUpdate({
|
||||
fileSize: totalFileSize,
|
||||
readPosition: 0,
|
||||
percentage: 0,
|
||||
queryCount: 0
|
||||
});
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this._fileHandler.pipe(parser);
|
||||
|
||||
parser.on('error', reject);
|
||||
|
||||
parser.on('close', async () => {
|
||||
console.log('TOTAL QUERIES', queryCount);
|
||||
console.log('import end');
|
||||
resolve();
|
||||
});
|
||||
|
||||
parser.on('data', async (query) => {
|
||||
queryCount++;
|
||||
parser.pause();
|
||||
|
||||
try {
|
||||
await this._client.query(query);
|
||||
}
|
||||
catch (error) {
|
||||
this.emit('query-error', {
|
||||
sql: query,
|
||||
message: error.hint || error.toString(),
|
||||
sqlSnippet: error.sql,
|
||||
time: new Date().getTime()
|
||||
});
|
||||
}
|
||||
|
||||
this.emitUpdate({
|
||||
queryCount,
|
||||
readPosition,
|
||||
percentage: readPosition / totalFileSize * 100
|
||||
});
|
||||
this._fileHandler.pipe(parser);
|
||||
parser.resume();
|
||||
});
|
||||
|
||||
parser.on('pause', () => {
|
||||
this._fileHandler.unpipe(parser);
|
||||
this._fileHandler.readableFlowing = false;
|
||||
});
|
||||
|
||||
this._fileHandler.on('data', (chunk) => {
|
||||
readPosition += chunk.length;
|
||||
});
|
||||
|
||||
this._fileHandler.on('error', (err) => {
|
||||
console.log(err);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
catch (err) {
|
||||
console.log(err);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
import { Transform } from 'stream';
|
||||
|
||||
export default class SqlParser extends Transform {
|
||||
export default class MySQLParser extends Transform {
|
||||
constructor (opts) {
|
||||
opts = {
|
||||
delimiter: ';',
|
142
src/main/libs/parsers/PostgreSQLParser.js
Normal file
142
src/main/libs/parsers/PostgreSQLParser.js
Normal file
@ -0,0 +1,142 @@
|
||||
import { Transform } from 'stream';
|
||||
|
||||
export default class PostgreSQLParser extends Transform {
|
||||
constructor (opts) {
|
||||
opts = {
|
||||
delimiter: ';',
|
||||
encoding: 'utf8',
|
||||
writableObjectMode: true,
|
||||
readableObjectMode: true,
|
||||
...opts
|
||||
};
|
||||
super(opts);
|
||||
this._buffer = '';
|
||||
this._lastChar = '';
|
||||
this._lastChars = '';
|
||||
this.encoding = opts.encoding;
|
||||
this.delimiter = opts.delimiter;// ';'
|
||||
this._bodyWrapper = '';
|
||||
this._bodyWrapperBuffer = '';
|
||||
|
||||
this.isEscape = false;
|
||||
this.currentQuote = null;
|
||||
this._firstDollarFound = false;
|
||||
this._isBody = false;
|
||||
this._isSingleLineComment = false;
|
||||
this._isMultiLineComment = false;
|
||||
}
|
||||
|
||||
get _isComment () {
|
||||
return this._isSingleLineComment || this._isMultiLineComment;
|
||||
}
|
||||
|
||||
_transform (chunk, encoding, next) {
|
||||
for (const char of chunk.toString(this.encoding)) {
|
||||
this.checkEscape();
|
||||
this._buffer += char;
|
||||
this._lastChar = char;
|
||||
this._lastChars += char;
|
||||
|
||||
if (this._lastChars.length > this._bodyWrapper.length)
|
||||
this._lastChars = this._lastChars.slice(-(this._bodyWrapper.length || 2));
|
||||
|
||||
this.checkBodyWrapper(char);
|
||||
this.checkQuote(char);
|
||||
this.checkCommentRow();
|
||||
const query = this.getQuery();
|
||||
|
||||
if (query)
|
||||
this.push(query);
|
||||
}
|
||||
next();
|
||||
}
|
||||
|
||||
checkEscape () {
|
||||
if (this._buffer.length > 0) {
|
||||
this.isEscape = this._lastChar === '\\'
|
||||
? !this.isEscape
|
||||
: false;
|
||||
}
|
||||
}
|
||||
|
||||
checkCommentRow () {
|
||||
if (this._isBody) return;
|
||||
|
||||
if (!this._isComment) {
|
||||
if (this.currentQuote === null && this._lastChars.includes('--'))
|
||||
this._isSingleLineComment = true;
|
||||
|
||||
if (this.currentQuote === null && this._lastChars.includes('/*'))
|
||||
this._isMultiLineComment = true;
|
||||
}
|
||||
else {
|
||||
if (this._isSingleLineComment && (this._lastChar === '\n' || this._lastChar === '\r')) {
|
||||
this._buffer = '';
|
||||
this._isSingleLineComment = false;
|
||||
}
|
||||
|
||||
if (this._isMultiLineComment && this._lastChars.includes('*/')) {
|
||||
this._buffer = '';
|
||||
this._isMultiLineComment = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
checkBodyWrapper (char) {
|
||||
if (this._isBody)
|
||||
this._isBody = this._lastChars !== this._bodyWrapper;
|
||||
|
||||
if (this.currentQuote === null && char === '$' && !this._firstDollarFound && !this._bodyWrapper) {
|
||||
this._firstDollarFound = true;
|
||||
this._bodyWrapperBuffer += char;
|
||||
this._isBody = true;
|
||||
}
|
||||
else if (this._firstDollarFound) {
|
||||
if (char === '\n' || char === ' ') {
|
||||
this._firstDollarFound = false;
|
||||
this._bodyWrapperBuffer = '';
|
||||
this._bodyWrapper = '';
|
||||
this._isBody = false;
|
||||
return;
|
||||
}
|
||||
|
||||
this._bodyWrapperBuffer += char;
|
||||
const isEndDollar = char === '$';
|
||||
|
||||
if (isEndDollar) {
|
||||
this._firstDollarFound = false;
|
||||
this._bodyWrapper = this._bodyWrapperBuffer;
|
||||
this._bodyWrapperBuffer = '';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
checkQuote (char) {
|
||||
const isQuote = !this.isEscape && (char === '\'' || char === '"');
|
||||
if (isQuote && this.currentQuote === char)
|
||||
this.currentQuote = null;
|
||||
|
||||
else if (isQuote && this.currentQuote === null)
|
||||
this.currentQuote = char;
|
||||
}
|
||||
|
||||
getQuery () {
|
||||
if (this._isBody || this._isComment)
|
||||
return false;
|
||||
|
||||
let query = false;
|
||||
let demiliterFound = false;
|
||||
|
||||
if (this.currentQuote === null && this._buffer.length >= this.delimiter.length)
|
||||
demiliterFound = this._lastChars.slice(-this.delimiter.length) === this.delimiter;
|
||||
|
||||
if (demiliterFound) {
|
||||
const parsedStr = this._buffer.trim();
|
||||
query = parsedStr;
|
||||
this._buffer = '';
|
||||
this._bodyWrapper = '';
|
||||
}
|
||||
|
||||
return query;
|
||||
}
|
||||
}
|
@ -1,6 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import { ClientsFactory } from '../libs/ClientsFactory';
|
||||
import MysqlExporter from '../libs/exporters/sql/MysqlExporter.js';
|
||||
import fs from 'fs';
|
||||
import PostgreSQLExporter from '../libs/exporters/sql/PostgreSQLExporter';
|
||||
let exporter;
|
||||
|
||||
process.on('message', async ({ type, client, tables, options }) => {
|
||||
@ -17,6 +18,9 @@ process.on('message', async ({ type, client, tables, options }) => {
|
||||
case 'maria':
|
||||
exporter = new MysqlExporter(connection, tables, options);
|
||||
break;
|
||||
case 'pg':
|
||||
exporter = new PostgreSQLExporter(connection, tables, options);
|
||||
break;
|
||||
default:
|
||||
process.send({
|
||||
type: 'error',
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { ClientsFactory } from '../libs/ClientsFactory';
|
||||
import MysqlImporter from '../libs/importers/sql/MysqlImporter';
|
||||
import MySQLImporter from '../libs/importers/sql/MysqlImporter';
|
||||
import PostgreSQLImporter from '../libs/importers/sql/PostgreSQLImporter';
|
||||
let importer;
|
||||
|
||||
process.on('message', async ({ type, dbConfig, options }) => {
|
||||
@ -18,7 +19,10 @@ process.on('message', async ({ type, dbConfig, options }) => {
|
||||
switch (options.type) {
|
||||
case 'mysql':
|
||||
case 'maria':
|
||||
importer = new MysqlImporter(pool, options);
|
||||
importer = new MySQLImporter(pool, options);
|
||||
break;
|
||||
case 'pg':
|
||||
importer = new PostgreSQLImporter(pool, options);
|
||||
break;
|
||||
default:
|
||||
process.send({
|
||||
|
@ -190,30 +190,31 @@
|
||||
:key="key"
|
||||
class="form-checkbox"
|
||||
>
|
||||
<input v-model="options.includes[key]" type="checkbox"><i class="form-icon" /> {{ $t(`word.${key}`) }}
|
||||
<input v-model="options.includes[key]" type="checkbox"><i class="form-icon" /> {{ $tc(`word.${key}`, 2) }}
|
||||
</label>
|
||||
|
||||
<div class="h6 mt-4 mb-2">
|
||||
{{ $t('message.newInserStmtEvery') }}:
|
||||
</div>
|
||||
<div class="columns">
|
||||
<div class="column col-6">
|
||||
<input
|
||||
v-model.number="options.sqlInsertAfter"
|
||||
type="number"
|
||||
class="form-input"
|
||||
value="250"
|
||||
>
|
||||
<div v-if="customizations.exportByChunks">
|
||||
<div class="h6 mt-4 mb-2">
|
||||
{{ $t('message.newInserStmtEvery') }}:
|
||||
</div>
|
||||
<div class="column col-6">
|
||||
<select v-model="options.sqlInsertDivider" class="form-select">
|
||||
<option value="bytes">
|
||||
KiB
|
||||
</option>
|
||||
<option value="rows">
|
||||
{{ $tc('word.row', 2) }}
|
||||
</option>
|
||||
</select>
|
||||
<div class="columns">
|
||||
<div class="column col-6">
|
||||
<input
|
||||
v-model.number="options.sqlInsertAfter"
|
||||
type="number"
|
||||
class="form-input"
|
||||
value="250"
|
||||
>
|
||||
</div>
|
||||
<div class="column col-6">
|
||||
<select v-model="options.sqlInsertDivider" class="form-select">
|
||||
<option value="bytes">
|
||||
KiB
|
||||
</option>
|
||||
<option value="rows">
|
||||
{{ $tc('word.row', 2) }}
|
||||
</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@ -306,6 +307,9 @@ export default {
|
||||
currentWorkspace () {
|
||||
return this.getWorkspace(this.selectedWorkspace);
|
||||
},
|
||||
customizations () {
|
||||
return this.currentWorkspace.customizations;
|
||||
},
|
||||
schemaItems () {
|
||||
const db = this.currentWorkspace.structure.find(db => db.name === this.selectedSchema);
|
||||
if (db)
|
||||
@ -349,7 +353,7 @@ export default {
|
||||
includeDropStatement: true
|
||||
}));
|
||||
|
||||
const structure = ['views', 'triggers', 'routines', 'functions', 'schedulers', 'triggerFunctions'];
|
||||
const structure = ['functions', 'views', 'triggers', 'routines', 'schedulers'];
|
||||
|
||||
structure.forEach(feat => {
|
||||
const val = customizations[this.currentWorkspace.client][feat];
|
||||
|
Loading…
x
Reference in New Issue
Block a user