mirror of
https://github.com/Fabio286/antares.git
synced 2025-02-10 08:40:42 +01:00
feat: export table content as SQL INSERT
This commit is contained in:
parent
d4b6d2e9d1
commit
f3b5de38c4
@ -1,14 +0,0 @@
|
|||||||
/* eslint-disable no-useless-escape */
|
|
||||||
// eslint-disable-next-line no-control-regex
|
|
||||||
const pattern = /[\0\x08\x09\x1a\n\r"'\\\%]/gm;
|
|
||||||
const regex = new RegExp(pattern);
|
|
||||||
|
|
||||||
function sqlEscaper (string: string) {
|
|
||||||
return string.replace(regex, char => {
|
|
||||||
const m = ['\\0', '\\x08', '\\x09', '\\x1a', '\\n', '\\r', '\'', '\"', '\\', '\\\\', '%'];
|
|
||||||
const r = ['\\\\0', '\\\\b', '\\\\t', '\\\\z', '\\\\n', '\\\\r', '\\\'', '\\\"', '\\\\', '\\\\\\\\', '\%'];
|
|
||||||
return r[m.indexOf(char)] || char;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export { sqlEscaper };
|
|
162
src/common/libs/sqlUtils.ts
Normal file
162
src/common/libs/sqlUtils.ts
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
|
/* eslint-disable no-useless-escape */
|
||||||
|
import * as moment from 'moment';
|
||||||
|
import { lineString, point, polygon } from '@turf/helpers';
|
||||||
|
import customizations from '../customizations';
|
||||||
|
import { ClientCode } from '../interfaces/antares';
|
||||||
|
import { BLOB, BIT, DATE, DATETIME, FLOAT, SPATIAL, IS_MULTI_SPATIAL, NUMBER, TEXT_SEARCH } from 'common/fieldTypes';
|
||||||
|
import hexToBinary, { HexChar } from './hexToBinary';
|
||||||
|
import { getArrayDepth } from './getArrayDepth';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Escapes a string fo SQL use
|
||||||
|
*
|
||||||
|
* @param { String } string
|
||||||
|
* @returns { String } Escaped string
|
||||||
|
*/
|
||||||
|
export const sqlEscaper = (string: string): string => {
|
||||||
|
// eslint-disable-next-line no-control-regex
|
||||||
|
const pattern = /[\0\x08\x09\x1a\n\r"'\\\%]/gm;
|
||||||
|
const regex = new RegExp(pattern);
|
||||||
|
return string.replace(regex, char => {
|
||||||
|
const m = ['\\0', '\\x08', '\\x09', '\\x1a', '\\n', '\\r', '\'', '\"', '\\', '\\\\', '%'];
|
||||||
|
const r = ['\\\\0', '\\\\b', '\\\\t', '\\\\z', '\\\\n', '\\\\r', '\\\'', '\\\"', '\\\\', '\\\\\\\\', '\%'];
|
||||||
|
return r[m.indexOf(char)] || char;
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
export const objectToGeoJSON = (val: any) => {
|
||||||
|
if (Array.isArray(val)) {
|
||||||
|
if (getArrayDepth(val) === 1)
|
||||||
|
return lineString(val.reduce((acc, curr) => [...acc, [curr.x, curr.y]], []));
|
||||||
|
else
|
||||||
|
return polygon(val.map(arr => arr.reduce((acc: any, curr: any) => [...acc, [curr.x, curr.y]], [])));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
return point([val.x, val.y]);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const escapeAndQuote = (val: string, client: ClientCode) => {
|
||||||
|
const { stringsWrapper: sw } = customizations[client];
|
||||||
|
// eslint-disable-next-line no-control-regex
|
||||||
|
const CHARS_TO_ESCAPE = /[\0\b\t\n\r\x1a"'\\]/g;
|
||||||
|
const CHARS_ESCAPE_MAP: {[key: string]: string} = {
|
||||||
|
'\0': '\\0',
|
||||||
|
'\b': '\\b',
|
||||||
|
'\t': '\\t',
|
||||||
|
'\n': '\\n',
|
||||||
|
'\r': '\\r',
|
||||||
|
'\x1a': '\\Z',
|
||||||
|
'"': '\\"',
|
||||||
|
'\'': '\\\'',
|
||||||
|
'\\': '\\\\'
|
||||||
|
};
|
||||||
|
let chunkIndex = CHARS_TO_ESCAPE.lastIndex = 0;
|
||||||
|
let escapedVal = '';
|
||||||
|
let match;
|
||||||
|
|
||||||
|
while ((match = CHARS_TO_ESCAPE.exec(val))) {
|
||||||
|
escapedVal += val.slice(chunkIndex, match.index) + CHARS_ESCAPE_MAP[match[0]];
|
||||||
|
chunkIndex = CHARS_TO_ESCAPE.lastIndex;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (chunkIndex === 0)
|
||||||
|
return `${sw}${val}${sw}`;
|
||||||
|
|
||||||
|
if (chunkIndex < val.length)
|
||||||
|
return `${sw}${escapedVal + val.slice(chunkIndex)}${sw}`;
|
||||||
|
|
||||||
|
return `${sw}${escapedVal}${sw}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const valueToSqlString = (args: {
|
||||||
|
val: any;
|
||||||
|
client: ClientCode;
|
||||||
|
field: {type: string; datePrecision: number};
|
||||||
|
}): string => {
|
||||||
|
let parsedValue;
|
||||||
|
const { val, client, field } = args;
|
||||||
|
const { stringsWrapper: sw } = customizations[client];
|
||||||
|
|
||||||
|
if (val === null)
|
||||||
|
parsedValue = 'NULL';
|
||||||
|
else if (DATE.includes(field.type)) {
|
||||||
|
parsedValue = moment(val).isValid()
|
||||||
|
? escapeAndQuote(moment(val).format('YYYY-MM-DD'), client)
|
||||||
|
: val;
|
||||||
|
}
|
||||||
|
else if (DATETIME.includes(field.type)) {
|
||||||
|
let datePrecision = '';
|
||||||
|
for (let i = 0; i < field.datePrecision; i++)
|
||||||
|
datePrecision += i === 0 ? '.S' : 'S';
|
||||||
|
|
||||||
|
parsedValue = moment(val).isValid()
|
||||||
|
? escapeAndQuote(moment(val).format(`YYYY-MM-DD HH:mm:ss${datePrecision}`), client)
|
||||||
|
: escapeAndQuote(val, client);
|
||||||
|
}
|
||||||
|
else if ('isArray' in field) {
|
||||||
|
let localVal;
|
||||||
|
if (Array.isArray(val))
|
||||||
|
localVal = JSON.stringify(val).replaceAll('[', '{').replaceAll(']', '}');
|
||||||
|
else
|
||||||
|
localVal = typeof val === 'string' ? val.replaceAll('[', '{').replaceAll(']', '}') : '';
|
||||||
|
parsedValue = `'${localVal}'`;
|
||||||
|
}
|
||||||
|
else if (TEXT_SEARCH.includes(field.type))
|
||||||
|
parsedValue = `'${val.replaceAll('\'', '\'\'')}'`;
|
||||||
|
else if (BIT.includes(field.type))
|
||||||
|
parsedValue = `b'${hexToBinary(Buffer.from(val).toString('hex') as undefined as HexChar[])}'`;
|
||||||
|
else if (BLOB.includes(field.type)) {
|
||||||
|
if (['mysql', 'maria'].includes(client))
|
||||||
|
parsedValue = `X'${val.toString('hex').toUpperCase()}'`;
|
||||||
|
else if (client === 'pg')
|
||||||
|
parsedValue = `decode('${val.toString('hex').toUpperCase()}', 'hex')`;
|
||||||
|
}
|
||||||
|
else if (NUMBER.includes(field.type))
|
||||||
|
parsedValue = val;
|
||||||
|
else if (FLOAT.includes(field.type))
|
||||||
|
parsedValue = parseFloat(val);
|
||||||
|
else if (SPATIAL.includes(field.type)) {
|
||||||
|
let geoJson;
|
||||||
|
if (IS_MULTI_SPATIAL.includes(field.type)) {
|
||||||
|
const features = [];
|
||||||
|
for (const element of val)
|
||||||
|
features.push(objectToGeoJSON(element));
|
||||||
|
|
||||||
|
geoJson = {
|
||||||
|
type: 'FeatureCollection',
|
||||||
|
features
|
||||||
|
};
|
||||||
|
}
|
||||||
|
else
|
||||||
|
geoJson = objectToGeoJSON(val);
|
||||||
|
|
||||||
|
parsedValue = `ST_GeomFromGeoJSON('${JSON.stringify(geoJson)}')`;
|
||||||
|
}
|
||||||
|
else if (val === '') parsedValue = `${sw}${sw}`;
|
||||||
|
else {
|
||||||
|
parsedValue = typeof val === 'string'
|
||||||
|
? escapeAndQuote(val, client)
|
||||||
|
: typeof val === 'object'
|
||||||
|
? escapeAndQuote(JSON.stringify(val), client)
|
||||||
|
: val;
|
||||||
|
}
|
||||||
|
|
||||||
|
return parsedValue;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const jsonToSqlInsert = (args: {
|
||||||
|
json: { [key: string]: any};
|
||||||
|
client: ClientCode;
|
||||||
|
fields: { [key: string]: {type: string; datePrecision: number}};
|
||||||
|
table: string;
|
||||||
|
}) => {
|
||||||
|
const { client, json, fields, table } = args;
|
||||||
|
const { elementsWrapper: ew } = customizations[client];
|
||||||
|
const fieldNames = Object.keys(json).map(key => `${ew}${key}${ew}`);
|
||||||
|
const values = Object.keys(json).map(key => (
|
||||||
|
valueToSqlString({ val: json[key], client, field: fields[key] })
|
||||||
|
));
|
||||||
|
|
||||||
|
return `INSERT INTO ${ew}${table}${ew} (${fieldNames.join(', ')}) VALUES (${values.join(', ')});`;
|
||||||
|
};
|
@ -4,7 +4,7 @@ import { InsertRowsParams } from 'common/interfaces/tableApis';
|
|||||||
import { ipcMain } from 'electron';
|
import { ipcMain } from 'electron';
|
||||||
import { faker } from '@faker-js/faker';
|
import { faker } from '@faker-js/faker';
|
||||||
import * as moment from 'moment';
|
import * as moment from 'moment';
|
||||||
import { sqlEscaper } from 'common/libs/sqlEscaper';
|
import { sqlEscaper } from 'common/libs/sqlUtils';
|
||||||
import { TEXT, LONG_TEXT, ARRAY, TEXT_SEARCH, NUMBER, FLOAT, BLOB, BIT, DATE, DATETIME } from 'common/fieldTypes';
|
import { TEXT, LONG_TEXT, ARRAY, TEXT_SEARCH, NUMBER, FLOAT, BLOB, BIT, DATE, DATETIME } from 'common/fieldTypes';
|
||||||
import customizations from 'common/customizations';
|
import customizations from 'common/customizations';
|
||||||
|
|
||||||
|
@ -1,12 +1,8 @@
|
|||||||
import * as exporter from 'common/interfaces/exporter';
|
import * as exporter from 'common/interfaces/exporter';
|
||||||
import * as mysql from 'mysql2/promise';
|
import * as mysql from 'mysql2/promise';
|
||||||
import { SqlExporter } from './SqlExporter';
|
import { SqlExporter } from './SqlExporter';
|
||||||
import { BLOB, BIT, DATE, DATETIME, FLOAT, SPATIAL, IS_MULTI_SPATIAL, NUMBER } from 'common/fieldTypes';
|
|
||||||
import hexToBinary, { HexChar } from 'common/libs/hexToBinary';
|
|
||||||
import { getArrayDepth } from 'common/libs/getArrayDepth';
|
|
||||||
import * as moment from 'moment';
|
|
||||||
import { lineString, point, polygon } from '@turf/helpers';
|
|
||||||
import { MySQLClient } from '../../clients/MySQLClient';
|
import { MySQLClient } from '../../clients/MySQLClient';
|
||||||
|
import { valueToSqlString } from 'common/libs/sqlUtils';
|
||||||
|
|
||||||
export default class MysqlExporter extends SqlExporter {
|
export default class MysqlExporter extends SqlExporter {
|
||||||
protected _client: MySQLClient;
|
protected _client: MySQLClient;
|
||||||
@ -122,54 +118,7 @@ ${footer}
|
|||||||
const column = notGeneratedColumns[i];
|
const column = notGeneratedColumns[i];
|
||||||
const val = row[column.name];
|
const val = row[column.name];
|
||||||
|
|
||||||
if (val === null) sqlInsertString += 'NULL';
|
sqlInsertString += valueToSqlString({ val, client: 'mysql', field: column });
|
||||||
else if (DATE.includes(column.type)) {
|
|
||||||
sqlInsertString += moment(val).isValid()
|
|
||||||
? this.escapeAndQuote(moment(val).format('YYYY-MM-DD'))
|
|
||||||
: val;
|
|
||||||
}
|
|
||||||
else if (DATETIME.includes(column.type)) {
|
|
||||||
let datePrecision = '';
|
|
||||||
for (let i = 0; i < column.datePrecision; i++)
|
|
||||||
datePrecision += i === 0 ? '.S' : 'S';
|
|
||||||
|
|
||||||
sqlInsertString += moment(val).isValid()
|
|
||||||
? this.escapeAndQuote(moment(val).format(`YYYY-MM-DD HH:mm:ss${datePrecision}`))
|
|
||||||
: this.escapeAndQuote(val);
|
|
||||||
}
|
|
||||||
else if (BIT.includes(column.type))
|
|
||||||
sqlInsertString += `b'${hexToBinary(Buffer.from(val).toString('hex') as undefined as HexChar[])}'`;
|
|
||||||
else if (BLOB.includes(column.type))
|
|
||||||
sqlInsertString += `X'${val.toString('hex').toUpperCase()}'`;
|
|
||||||
else if (NUMBER.includes(column.type))
|
|
||||||
sqlInsertString += val;
|
|
||||||
else if (FLOAT.includes(column.type))
|
|
||||||
sqlInsertString += parseFloat(val);
|
|
||||||
else if (SPATIAL.includes(column.type)) {
|
|
||||||
let geoJson;
|
|
||||||
if (IS_MULTI_SPATIAL.includes(column.type)) {
|
|
||||||
const features = [];
|
|
||||||
for (const element of val)
|
|
||||||
features.push(this._getGeoJSON(element));
|
|
||||||
|
|
||||||
geoJson = {
|
|
||||||
type: 'FeatureCollection',
|
|
||||||
features
|
|
||||||
};
|
|
||||||
}
|
|
||||||
else
|
|
||||||
geoJson = this._getGeoJSON(val);
|
|
||||||
|
|
||||||
sqlInsertString += `ST_GeomFromGeoJSON('${JSON.stringify(geoJson)}')`;
|
|
||||||
}
|
|
||||||
else if (val === '') sqlInsertString += '\'\'';
|
|
||||||
else {
|
|
||||||
sqlInsertString += typeof val === 'string'
|
|
||||||
? this.escapeAndQuote(val)
|
|
||||||
: typeof val === 'object'
|
|
||||||
? this.escapeAndQuote(JSON.stringify(val))
|
|
||||||
: val;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (parseInt(i) !== notGeneratedColumns.length - 1)
|
if (parseInt(i) !== notGeneratedColumns.length - 1)
|
||||||
sqlInsertString += ', ';
|
sqlInsertString += ', ';
|
||||||
@ -435,17 +384,4 @@ CREATE TABLE \`${view.Name}\`(
|
|||||||
|
|
||||||
return `'${escapedVal}'`;
|
return `'${escapedVal}'`;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
||||||
_getGeoJSON (val: any) {
|
|
||||||
if (Array.isArray(val)) {
|
|
||||||
if (getArrayDepth(val) === 1)
|
|
||||||
return lineString(val.reduce((acc, curr) => [...acc, [curr.x, curr.y]], []));
|
|
||||||
else
|
|
||||||
return polygon(val.map(arr => arr.reduce((acc: any, curr: any) => [...acc, [curr.x, curr.y]], [])));
|
|
||||||
}
|
|
||||||
else
|
|
||||||
return point([val.x, val.y]);
|
|
||||||
}
|
|
||||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
|
||||||
}
|
}
|
||||||
|
@ -1,13 +1,11 @@
|
|||||||
import * as antares from 'common/interfaces/antares';
|
import * as antares from 'common/interfaces/antares';
|
||||||
import * as exporter from 'common/interfaces/exporter';
|
import * as exporter from 'common/interfaces/exporter';
|
||||||
import { SqlExporter } from './SqlExporter';
|
import { SqlExporter } from './SqlExporter';
|
||||||
import { BLOB, BIT, DATE, DATETIME, FLOAT, NUMBER, TEXT_SEARCH } from 'common/fieldTypes';
|
|
||||||
import hexToBinary, { HexChar } from 'common/libs/hexToBinary';
|
|
||||||
import * as moment from 'moment';
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
import * as QueryStream from 'pg-query-stream';
|
import * as QueryStream from 'pg-query-stream';
|
||||||
import { PostgreSQLClient } from '../../clients/PostgreSQLClient';
|
import { PostgreSQLClient } from '../../clients/PostgreSQLClient';
|
||||||
|
import { valueToSqlString } from 'common/libs/sqlUtils';
|
||||||
|
|
||||||
export default class PostgreSQLExporter extends SqlExporter {
|
export default class PostgreSQLExporter extends SqlExporter {
|
||||||
constructor (client: PostgreSQLClient, tables: exporter.TableParams[], options: exporter.ExportOptions) {
|
constructor (client: PostgreSQLClient, tables: exporter.TableParams[], options: exporter.ExportOptions) {
|
||||||
@ -223,47 +221,7 @@ SET row_security = off;\n\n\n`;
|
|||||||
const column = columns[i];
|
const column = columns[i];
|
||||||
const val = row[column.name];
|
const val = row[column.name];
|
||||||
|
|
||||||
if (val === null) sqlInsertString += 'NULL';
|
sqlInsertString += valueToSqlString({ val, client: 'pg', field: column });
|
||||||
else if (DATE.includes(column.type)) {
|
|
||||||
sqlInsertString += moment(val).isValid()
|
|
||||||
? this.escapeAndQuote(moment(val).format('YYYY-MM-DD'))
|
|
||||||
: val;
|
|
||||||
}
|
|
||||||
else if (DATETIME.includes(column.type)) {
|
|
||||||
let datePrecision = '';
|
|
||||||
for (let i = 0; i < column.datePrecision; i++)
|
|
||||||
datePrecision += i === 0 ? '.S' : 'S';
|
|
||||||
|
|
||||||
sqlInsertString += moment(val).isValid()
|
|
||||||
? this.escapeAndQuote(moment(val).format(`YYYY-MM-DD HH:mm:ss${datePrecision}`))
|
|
||||||
: this.escapeAndQuote(val);
|
|
||||||
}
|
|
||||||
else if ('isArray' in column) {
|
|
||||||
let parsedVal;
|
|
||||||
if (Array.isArray(val))
|
|
||||||
parsedVal = JSON.stringify(val).replaceAll('[', '{').replaceAll(']', '}');
|
|
||||||
else
|
|
||||||
parsedVal = typeof val === 'string' ? val.replaceAll('[', '{').replaceAll(']', '}') : '';
|
|
||||||
sqlInsertString += `'${parsedVal}'`;
|
|
||||||
}
|
|
||||||
else if (TEXT_SEARCH.includes(column.type))
|
|
||||||
sqlInsertString += `'${val.replaceAll('\'', '\'\'')}'`;
|
|
||||||
else if (BIT.includes(column.type))
|
|
||||||
sqlInsertString += `b'${hexToBinary(Buffer.from(val).toString('hex') as undefined as HexChar[])}'`;
|
|
||||||
else if (BLOB.includes(column.type))
|
|
||||||
sqlInsertString += `decode('${val.toString('hex').toUpperCase()}', 'hex')`;
|
|
||||||
else if (NUMBER.includes(column.type))
|
|
||||||
sqlInsertString += val;
|
|
||||||
else if (FLOAT.includes(column.type))
|
|
||||||
sqlInsertString += parseFloat(val);
|
|
||||||
else if (val === '') sqlInsertString += '\'\'';
|
|
||||||
else {
|
|
||||||
sqlInsertString += typeof val === 'string'
|
|
||||||
? this.escapeAndQuote(val)
|
|
||||||
: typeof val === 'object'
|
|
||||||
? this.escapeAndQuote(JSON.stringify(val))
|
|
||||||
: val;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (parseInt(i) !== columns.length - 1)
|
if (parseInt(i) !== columns.length - 1)
|
||||||
sqlInsertString += ', ';
|
sqlInsertString += ', ';
|
||||||
|
@ -136,7 +136,7 @@
|
|||||||
<script setup lang="ts">
|
<script setup lang="ts">
|
||||||
import { Component, computed, onBeforeUnmount, onMounted, onUpdated, Prop, Ref, ref } from 'vue';
|
import { Component, computed, onBeforeUnmount, onMounted, onUpdated, Prop, Ref, ref } from 'vue';
|
||||||
import { ConnectionParams } from 'common/interfaces/antares';
|
import { ConnectionParams } from 'common/interfaces/antares';
|
||||||
import { arrayToFile } from '../libs/arrayToFile';
|
import { exportRows } from '../libs/exportRows';
|
||||||
import { useNotificationsStore } from '@/stores/notifications';
|
import { useNotificationsStore } from '@/stores/notifications';
|
||||||
import { useFocusTrap } from '@/composables/useFocusTrap';
|
import { useFocusTrap } from '@/composables/useFocusTrap';
|
||||||
import Schema from '@/ipc-api/Schema';
|
import Schema from '@/ipc-api/Schema';
|
||||||
@ -312,7 +312,7 @@ const closeModal = () => emit('close');
|
|||||||
|
|
||||||
const downloadTable = (format: 'csv' | 'json') => {
|
const downloadTable = (format: 'csv' | 'json') => {
|
||||||
if (!sortedResults.value) return;
|
if (!sortedResults.value) return;
|
||||||
arrayToFile({
|
exportRows({
|
||||||
type: format,
|
type: format,
|
||||||
content: sortedResults.value,
|
content: sortedResults.value,
|
||||||
filename: 'processes'
|
filename: 'processes'
|
||||||
|
@ -112,6 +112,9 @@
|
|||||||
<li class="menu-item">
|
<li class="menu-item">
|
||||||
<a class="c-hand" @click="downloadTable('csv')">CSV</a>
|
<a class="c-hand" @click="downloadTable('csv')">CSV</a>
|
||||||
</li>
|
</li>
|
||||||
|
<li class="menu-item">
|
||||||
|
<a class="c-hand" @click="downloadTable('sql')">SQL INSERT</a>
|
||||||
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
<div class="input-group pr-2" :title="t('message.commitMode')">
|
<div class="input-group pr-2" :title="t('message.commitMode')">
|
||||||
@ -446,7 +449,7 @@ const clear = () => {
|
|||||||
clearTabData();
|
clearTabData();
|
||||||
};
|
};
|
||||||
|
|
||||||
const downloadTable = (format: 'csv' | 'json') => {
|
const downloadTable = (format: 'csv' | 'json' | 'sql') => {
|
||||||
queryTable.value.downloadTable(format, `${props.tab.type}-${props.tab.index}`);
|
queryTable.value.downloadTable(format, `${props.tab.type}-${props.tab.index}`);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -121,7 +121,7 @@ import { uidGen } from 'common/libs/uidGen';
|
|||||||
import { useSettingsStore } from '@/stores/settings';
|
import { useSettingsStore } from '@/stores/settings';
|
||||||
import { useWorkspacesStore } from '@/stores/workspaces';
|
import { useWorkspacesStore } from '@/stores/workspaces';
|
||||||
import { useConsoleStore } from '@/stores/console';
|
import { useConsoleStore } from '@/stores/console';
|
||||||
import { arrayToFile } from '../libs/arrayToFile';
|
import { exportRows } from '../libs/exportRows';
|
||||||
import { TEXT, LONG_TEXT, BLOB } from 'common/fieldTypes';
|
import { TEXT, LONG_TEXT, BLOB } from 'common/fieldTypes';
|
||||||
import BaseVirtualScroll from '@/components/BaseVirtualScroll.vue';
|
import BaseVirtualScroll from '@/components/BaseVirtualScroll.vue';
|
||||||
import WorkspaceTabQueryTableRow from '@/components/WorkspaceTabQueryTableRow.vue';
|
import WorkspaceTabQueryTableRow from '@/components/WorkspaceTabQueryTableRow.vue';
|
||||||
@ -177,6 +177,7 @@ const selectedField = ref(null);
|
|||||||
const isEditingRow = ref(false);
|
const isEditingRow = ref(false);
|
||||||
|
|
||||||
const workspaceSchema = computed(() => getWorkspace(props.connUid).breadcrumbs.schema);
|
const workspaceSchema = computed(() => getWorkspace(props.connUid).breadcrumbs.schema);
|
||||||
|
const workspaceClient = computed(() => getWorkspace(props.connUid).client);
|
||||||
|
|
||||||
const primaryField = computed(() => {
|
const primaryField = computed(() => {
|
||||||
const primaryFields = fields.value.filter(field => field.key === 'pri');
|
const primaryFields = fields.value.filter(field => field.key === 'pri');
|
||||||
@ -534,7 +535,7 @@ const selectResultset = (index: number) => {
|
|||||||
resultsetIndex.value = index;
|
resultsetIndex.value = index;
|
||||||
};
|
};
|
||||||
|
|
||||||
const downloadTable = (format: 'csv' | 'json', filename: string) => {
|
const downloadTable = (format: 'csv' | 'json' | 'sql', table: string) => {
|
||||||
if (!sortedResults.value) return;
|
if (!sortedResults.value) return;
|
||||||
|
|
||||||
const rows = JSON.parse(JSON.stringify(sortedResults.value)).map((row: any) => {
|
const rows = JSON.parse(JSON.stringify(sortedResults.value)).map((row: any) => {
|
||||||
@ -542,10 +543,14 @@ const downloadTable = (format: 'csv' | 'json', filename: string) => {
|
|||||||
return row;
|
return row;
|
||||||
});
|
});
|
||||||
|
|
||||||
arrayToFile({
|
exportRows({
|
||||||
type: format,
|
type: format,
|
||||||
content: rows,
|
content: rows,
|
||||||
filename
|
fields: fieldsObj.value as {
|
||||||
|
[key: string]: {type: string; datePrecision: number};
|
||||||
|
},
|
||||||
|
client: workspaceClient.value,
|
||||||
|
table
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -105,6 +105,9 @@
|
|||||||
<li class="menu-item">
|
<li class="menu-item">
|
||||||
<a class="c-hand" @click="downloadTable('csv')">CSV</a>
|
<a class="c-hand" @click="downloadTable('csv')">CSV</a>
|
||||||
</li>
|
</li>
|
||||||
|
<li class="menu-item">
|
||||||
|
<a class="c-hand" @click="downloadTable('sql')">SQL INSERT</a>
|
||||||
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -375,7 +378,7 @@ const setRefreshInterval = () => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const downloadTable = (format: 'csv' | 'json') => {
|
const downloadTable = (format: 'csv' | 'json' | 'sql') => {
|
||||||
queryTable.value.downloadTable(format, props.table);
|
queryTable.value.downloadTable(format, props.table);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -97,7 +97,9 @@ const removeRow = (i: number) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const doFilter = () => {
|
const doFilter = () => {
|
||||||
const clausoles = rows.value.filter(el => el.active).map(el => createClausole(el));
|
const clausoles = rows.value
|
||||||
|
.filter(el => el.active)
|
||||||
|
.map(el => createClausole(el));
|
||||||
emit('filter', clausoles);
|
emit('filter', clausoles);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1,7 +1,14 @@
|
|||||||
export const arrayToFile = (args: {
|
import { ClientCode } from 'common/interfaces/antares';
|
||||||
type: 'csv' | 'json';
|
import { jsonToSqlInsert } from 'common/libs/sqlUtils';
|
||||||
|
|
||||||
|
export const exportRows = (args: {
|
||||||
|
type: 'csv' | 'json'| 'sql';
|
||||||
content: object[];
|
content: object[];
|
||||||
filename: string;
|
table: string;
|
||||||
|
client?: ClientCode;
|
||||||
|
fields?: {
|
||||||
|
[key: string]: {type: string; datePrecision: number};
|
||||||
|
};
|
||||||
}) => {
|
}) => {
|
||||||
let mime;
|
let mime;
|
||||||
let content;
|
let content;
|
||||||
@ -20,6 +27,23 @@ export const arrayToFile = (args: {
|
|||||||
content = csv.join('\n');
|
content = csv.join('\n');
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
case 'sql': {
|
||||||
|
mime = 'text/sql';
|
||||||
|
const sql = [];
|
||||||
|
|
||||||
|
for (const row of args.content) {
|
||||||
|
sql.push(jsonToSqlInsert({
|
||||||
|
json: row,
|
||||||
|
client:
|
||||||
|
args.client,
|
||||||
|
fields: args.fields,
|
||||||
|
table: args.table
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
content = sql.join('\n');
|
||||||
|
break;
|
||||||
|
}
|
||||||
case 'json':
|
case 'json':
|
||||||
mime = 'application/json';
|
mime = 'application/json';
|
||||||
content = JSON.stringify(args.content, null, 3);
|
content = JSON.stringify(args.content, null, 3);
|
||||||
@ -30,7 +54,7 @@ export const arrayToFile = (args: {
|
|||||||
|
|
||||||
const file = new Blob([content], { type: mime });
|
const file = new Blob([content], { type: mime });
|
||||||
const downloadLink = document.createElement('a');
|
const downloadLink = document.createElement('a');
|
||||||
downloadLink.download = `${args.filename}.${args.type}`;
|
downloadLink.download = `${args.table}.${args.type}`;
|
||||||
downloadLink.href = window.URL.createObjectURL(file);
|
downloadLink.href = window.URL.createObjectURL(file);
|
||||||
downloadLink.style.display = 'none';
|
downloadLink.style.display = 'none';
|
||||||
document.body.appendChild(downloadLink);
|
document.body.appendChild(downloadLink);
|
@ -4,7 +4,7 @@
|
|||||||
"./src/main/**/*",
|
"./src/main/**/*",
|
||||||
"./src/renderer/**/*",
|
"./src/renderer/**/*",
|
||||||
"./src/common/interfaces/antares.ts"
|
"./src/common/interfaces/antares.ts"
|
||||||
],
|
, "src/common/libs/jsonToSql.ts" ],
|
||||||
"exclude": ["./src/renderer/libs/ext-language_tools.js"],
|
"exclude": ["./src/renderer/libs/ext-language_tools.js"],
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"baseUrl": "./",
|
"baseUrl": "./",
|
||||||
|
Loading…
x
Reference in New Issue
Block a user