mirror of https://github.com/Fabio286/antares.git
perf: big performance improvement in database structure loading
This commit is contained in:
parent
b9ed8dd610
commit
a11bac504c
|
@ -46,7 +46,7 @@ export default connections => {
|
|||
try {
|
||||
await connection.connect();
|
||||
|
||||
const structure = await connection.getStructure();
|
||||
const structure = await connection.getStructure(new Set());
|
||||
|
||||
connections[conn.uid] = connection;
|
||||
|
||||
|
|
|
@ -50,9 +50,9 @@ export default connections => {
|
|||
}
|
||||
});
|
||||
|
||||
ipcMain.handle('get-structure', async (event, uid) => {
|
||||
ipcMain.handle('get-structure', async (event, params) => {
|
||||
try {
|
||||
const structure = await connections[uid].getStructure();
|
||||
const structure = await connections[params.uid].getStructure(params.schemas);
|
||||
|
||||
return { status: 'success', response: structure };
|
||||
}
|
||||
|
|
|
@ -78,6 +78,8 @@ export class MySQLClient extends AntaresCore {
|
|||
case 16777215:
|
||||
name = 'MEDIUMBLOB';
|
||||
break;
|
||||
default:
|
||||
name = field.charsetNr === 63 ? 'BLOB' : 'TEXT';
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -118,10 +120,11 @@ export class MySQLClient extends AntaresCore {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param {Array} schemas list
|
||||
* @returns {Array.<Object>} databases scructure
|
||||
* @memberof MySQLClient
|
||||
*/
|
||||
async getStructure () {
|
||||
async getStructure (schemas) {
|
||||
const { rows: databases } = await this.raw('SHOW DATABASES');
|
||||
const { rows: functions } = await this.raw('SHOW FUNCTION STATUS');
|
||||
const { rows: procedures } = await this.raw('SHOW PROCEDURE STATUS');
|
||||
|
@ -131,6 +134,8 @@ export class MySQLClient extends AntaresCore {
|
|||
const triggersArr = [];
|
||||
|
||||
for (const db of databases) {
|
||||
if (!schemas.has(db.Database)) continue;
|
||||
|
||||
let { rows: tables } = await this.raw(`SHOW TABLE STATUS FROM \`${db.Database}\``);
|
||||
if (tables.length) {
|
||||
tables = tables.map(table => {
|
||||
|
@ -151,109 +156,121 @@ export class MySQLClient extends AntaresCore {
|
|||
}
|
||||
|
||||
return databases.map(db => {
|
||||
// TABLES
|
||||
const remappedTables = tablesArr.filter(table => table.Db === db.Database).map(table => {
|
||||
let tableType;
|
||||
switch (table.Comment) {
|
||||
case 'VIEW':
|
||||
tableType = 'view';
|
||||
break;
|
||||
default:
|
||||
tableType = 'table';
|
||||
break;
|
||||
}
|
||||
if (schemas.has(db.Database)) {
|
||||
// TABLES
|
||||
const remappedTables = tablesArr.filter(table => table.Db === db.Database).map(table => {
|
||||
let tableType;
|
||||
switch (table.Comment) {
|
||||
case 'VIEW':
|
||||
tableType = 'view';
|
||||
break;
|
||||
default:
|
||||
tableType = 'table';
|
||||
break;
|
||||
}
|
||||
|
||||
return {
|
||||
name: table.Name,
|
||||
type: tableType,
|
||||
rows: table.Rows,
|
||||
created: table.Create_time,
|
||||
updated: table.Update_time,
|
||||
engine: table.Engine,
|
||||
comment: table.Comment,
|
||||
size: table.Data_length + table.Index_length,
|
||||
autoIncrement: table.Auto_increment,
|
||||
collation: table.Collation
|
||||
};
|
||||
});
|
||||
|
||||
// PROCEDURES
|
||||
const remappedProcedures = procedures.filter(procedure => procedure.Db === db.Database).map(procedure => {
|
||||
return {
|
||||
name: procedure.Name,
|
||||
type: procedure.Type,
|
||||
definer: procedure.Definer,
|
||||
created: procedure.Created,
|
||||
updated: procedure.Modified,
|
||||
comment: procedure.Comment,
|
||||
charset: procedure.character_set_client,
|
||||
security: procedure.Security_type
|
||||
};
|
||||
});
|
||||
|
||||
// FUNCTIONS
|
||||
const remappedFunctions = functions.filter(func => func.Db === db.Database).map(func => {
|
||||
return {
|
||||
name: func.Name,
|
||||
type: func.Type,
|
||||
definer: func.Definer,
|
||||
created: func.Created,
|
||||
updated: func.Modified,
|
||||
comment: func.Comment,
|
||||
charset: func.character_set_client,
|
||||
security: func.Security_type
|
||||
};
|
||||
});
|
||||
|
||||
// SCHEDULERS
|
||||
const remappedSchedulers = schedulers.filter(scheduler => scheduler.Db === db.Database).map(scheduler => {
|
||||
return {
|
||||
name: scheduler.EVENT_NAME,
|
||||
definition: scheduler.EVENT_DEFINITION,
|
||||
type: scheduler.EVENT_TYPE,
|
||||
definer: scheduler.DEFINER,
|
||||
body: scheduler.EVENT_BODY,
|
||||
starts: scheduler.STARTS,
|
||||
ends: scheduler.ENDS,
|
||||
status: scheduler.STATUS,
|
||||
executeAt: scheduler.EXECUTE_AT,
|
||||
intervalField: scheduler.INTERVAL_FIELD,
|
||||
intervalValue: scheduler.INTERVAL_VALUE,
|
||||
onCompletion: scheduler.ON_COMPLETION,
|
||||
originator: scheduler.ORIGINATOR,
|
||||
sqlMode: scheduler.SQL_MODE,
|
||||
created: scheduler.CREATED,
|
||||
updated: scheduler.LAST_ALTERED,
|
||||
lastExecuted: scheduler.LAST_EXECUTED,
|
||||
comment: scheduler.EVENT_COMMENT,
|
||||
charset: scheduler.CHARACTER_SET_CLIENT,
|
||||
timezone: scheduler.TIME_ZONE
|
||||
};
|
||||
});
|
||||
|
||||
// TRIGGERS
|
||||
const remappedTriggers = triggersArr.filter(trigger => trigger.Db === db.Database).map(trigger => {
|
||||
return {
|
||||
name: trigger.Trigger,
|
||||
statement: trigger.Statement,
|
||||
timing: trigger.Timing,
|
||||
definer: trigger.Definer,
|
||||
event: trigger.Event,
|
||||
table: trigger.Table,
|
||||
sqlMode: trigger.sql_mode,
|
||||
created: trigger.Created,
|
||||
charset: trigger.character_set_client
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
name: table.Name,
|
||||
type: tableType,
|
||||
rows: table.Rows,
|
||||
created: table.Create_time,
|
||||
updated: table.Update_time,
|
||||
engine: table.Engine,
|
||||
comment: table.Comment,
|
||||
size: table.Data_length + table.Index_length,
|
||||
autoIncrement: table.Auto_increment,
|
||||
collation: table.Collation
|
||||
name: db.Database,
|
||||
tables: remappedTables,
|
||||
functions: remappedFunctions,
|
||||
procedures: remappedProcedures,
|
||||
triggers: remappedTriggers,
|
||||
schedulers: remappedSchedulers
|
||||
};
|
||||
});
|
||||
|
||||
// PROCEDURES
|
||||
const remappedProcedures = procedures.filter(procedure => procedure.Db === db.Database).map(procedure => {
|
||||
}
|
||||
else {
|
||||
return {
|
||||
name: procedure.Name,
|
||||
type: procedure.Type,
|
||||
definer: procedure.Definer,
|
||||
created: procedure.Created,
|
||||
updated: procedure.Modified,
|
||||
comment: procedure.Comment,
|
||||
charset: procedure.character_set_client,
|
||||
security: procedure.Security_type
|
||||
name: db.Database,
|
||||
tables: [],
|
||||
functions: [],
|
||||
procedures: [],
|
||||
triggers: [],
|
||||
schedulers: []
|
||||
};
|
||||
});
|
||||
|
||||
// FUNCTIONS
|
||||
const remappedFunctions = functions.filter(func => func.Db === db.Database).map(func => {
|
||||
return {
|
||||
name: func.Name,
|
||||
type: func.Type,
|
||||
definer: func.Definer,
|
||||
created: func.Created,
|
||||
updated: func.Modified,
|
||||
comment: func.Comment,
|
||||
charset: func.character_set_client,
|
||||
security: func.Security_type
|
||||
};
|
||||
});
|
||||
|
||||
// SCHEDULERS
|
||||
const remappedSchedulers = schedulers.filter(scheduler => scheduler.Db === db.Database).map(scheduler => {
|
||||
return {
|
||||
name: scheduler.EVENT_NAME,
|
||||
definition: scheduler.EVENT_DEFINITION,
|
||||
type: scheduler.EVENT_TYPE,
|
||||
definer: scheduler.DEFINER,
|
||||
body: scheduler.EVENT_BODY,
|
||||
starts: scheduler.STARTS,
|
||||
ends: scheduler.ENDS,
|
||||
status: scheduler.STATUS,
|
||||
executeAt: scheduler.EXECUTE_AT,
|
||||
intervalField: scheduler.INTERVAL_FIELD,
|
||||
intervalValue: scheduler.INTERVAL_VALUE,
|
||||
onCompletion: scheduler.ON_COMPLETION,
|
||||
originator: scheduler.ORIGINATOR,
|
||||
sqlMode: scheduler.SQL_MODE,
|
||||
created: scheduler.CREATED,
|
||||
updated: scheduler.LAST_ALTERED,
|
||||
lastExecuted: scheduler.LAST_EXECUTED,
|
||||
comment: scheduler.EVENT_COMMENT,
|
||||
charset: scheduler.CHARACTER_SET_CLIENT,
|
||||
timezone: scheduler.TIME_ZONE
|
||||
};
|
||||
});
|
||||
|
||||
// TRIGGERS
|
||||
const remappedTriggers = triggersArr.filter(trigger => trigger.Db === db.Database).map(trigger => {
|
||||
return {
|
||||
name: trigger.Trigger,
|
||||
statement: trigger.Statement,
|
||||
timing: trigger.Timing,
|
||||
definer: trigger.Definer,
|
||||
event: trigger.Event,
|
||||
table: trigger.Table,
|
||||
sqlMode: trigger.sql_mode,
|
||||
created: trigger.Created,
|
||||
charset: trigger.character_set_client
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
name: db.Database,
|
||||
tables: remappedTables,
|
||||
functions: remappedFunctions,
|
||||
procedures: remappedProcedures,
|
||||
triggers: remappedTriggers,
|
||||
schedulers: remappedSchedulers
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -366,13 +383,13 @@ export class MySQLClient extends AntaresCore {
|
|||
}
|
||||
|
||||
/**
|
||||
* SELECT `user`, `host`, IF(LENGTH(password)>0, password, authentication_string) AS `password` FROM `mysql`.`user`
|
||||
* SELECT `user`, `host`, authentication_string) AS `password` FROM `mysql`.`user`
|
||||
*
|
||||
* @returns {Array.<Object>} users list
|
||||
* @memberof MySQLClient
|
||||
*/
|
||||
async getUsers () {
|
||||
const { rows } = await this.raw('SELECT `user`, `host`, IF(LENGTH(password)>0, password, authentication_string) AS `password` FROM `mysql`.`user`');
|
||||
const { rows } = await this.raw('SELECT `user`, `host`, authentication_string AS `password` FROM `mysql`.`user`');
|
||||
|
||||
return rows.map(row => {
|
||||
return {
|
||||
|
|
|
@ -3,10 +3,11 @@
|
|||
<summary
|
||||
class="accordion-header database-name"
|
||||
:class="{'text-bold': breadcrumbs.schema === database.name}"
|
||||
@click="changeBreadcrumbs({schema: database.name, table: null})"
|
||||
@click="selectSchema(database.name)"
|
||||
@contextmenu.prevent="showDatabaseContext($event, database.name)"
|
||||
>
|
||||
<i class="icon mdi mdi-18px mdi-chevron-right" />
|
||||
<div v-if="isLoading" class="icon loading" />
|
||||
<i v-else class="icon mdi mdi-18px mdi-chevron-right" />
|
||||
<i class="database-icon mdi mdi-18px mdi-database mr-1" />
|
||||
<span>{{ database.name }}</span>
|
||||
</summary>
|
||||
|
@ -161,13 +162,22 @@ export default {
|
|||
database: Object,
|
||||
connection: Object
|
||||
},
|
||||
data () {
|
||||
return {
|
||||
isLoading: false
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
...mapGetters({
|
||||
getLoadedSchemas: 'workspaces/getLoadedSchemas',
|
||||
getWorkspace: 'workspaces/getWorkspace'
|
||||
}),
|
||||
breadcrumbs () {
|
||||
return this.getWorkspace(this.connection.uid).breadcrumbs;
|
||||
},
|
||||
loadedSchemas () {
|
||||
return this.getLoadedSchemas(this.connection.uid);
|
||||
},
|
||||
maxSize () {
|
||||
return this.database.tables.reduce((acc, curr) => {
|
||||
if (curr.size > acc) acc = curr.size;
|
||||
|
@ -180,9 +190,19 @@ export default {
|
|||
},
|
||||
methods: {
|
||||
...mapActions({
|
||||
changeBreadcrumbs: 'workspaces/changeBreadcrumbs'
|
||||
changeBreadcrumbs: 'workspaces/changeBreadcrumbs',
|
||||
refreshSchema: 'workspaces/refreshSchema'
|
||||
}),
|
||||
formatBytes,
|
||||
async selectSchema (schema) {
|
||||
if (!this.loadedSchemas.has(schema)) {
|
||||
this.isLoading = true;
|
||||
await this.refreshSchema({ uid: this.connection.uid, schema });
|
||||
this.isLoading = false;
|
||||
}
|
||||
|
||||
this.changeBreadcrumbs({ schema, table: null });
|
||||
},
|
||||
showDatabaseContext (event, database) {
|
||||
this.changeBreadcrumbs({ schema: database, table: null });
|
||||
this.$emit('show-database-context', { event, database });
|
||||
|
@ -230,6 +250,16 @@ export default {
|
|||
.misc-icon {
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
.loading {
|
||||
height: 18px;
|
||||
width: 18px;
|
||||
|
||||
&::after {
|
||||
height: 0.6rem;
|
||||
width: 0.6rem;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.misc-name {
|
||||
|
|
|
@ -18,8 +18,8 @@ export default class {
|
|||
return ipcRenderer.invoke('delete-database', params);
|
||||
}
|
||||
|
||||
static getStructure (uid) {
|
||||
return ipcRenderer.invoke('get-structure', uid);
|
||||
static getStructure (params) {
|
||||
return ipcRenderer.invoke('get-structure', params);
|
||||
}
|
||||
|
||||
static getCollations (uid) {
|
||||
|
|
|
@ -40,6 +40,9 @@ export default {
|
|||
.filter(workspace => workspace.connected)
|
||||
.map(workspace => workspace.uid);
|
||||
},
|
||||
getLoadedSchemas: state => uid => {
|
||||
return state.workspaces.find(workspace => workspace.uid === uid).loaded_schemas;
|
||||
},
|
||||
isUnsavedDiscardModal: state => {
|
||||
return state.is_unsaved_discard_modal;
|
||||
}
|
||||
|
@ -65,6 +68,8 @@ export default {
|
|||
? {
|
||||
...workspace,
|
||||
structure: {},
|
||||
breadcrumbs: {},
|
||||
loaded_schemas: new Set(),
|
||||
connected: false
|
||||
}
|
||||
: workspace);
|
||||
|
@ -77,6 +82,19 @@ export default {
|
|||
}
|
||||
: workspace);
|
||||
},
|
||||
REFRESH_SCHEMA (state, { uid, schema, schemaElements }) {
|
||||
state.workspaces = state.workspaces.map(workspace => {
|
||||
if (workspace.uid === uid) {
|
||||
const schemaIndex = workspace.structure.findIndex(s => s.name === schema);
|
||||
|
||||
if (schemaIndex !== -1)
|
||||
workspace.structure[schemaIndex] = schemaElements;
|
||||
else
|
||||
workspace.structure.push(schemaElements);
|
||||
}
|
||||
return workspace;
|
||||
});
|
||||
},
|
||||
REFRESH_COLLATIONS (state, { uid, collations }) {
|
||||
state.workspaces = state.workspaces.map(workspace => workspace.uid === uid
|
||||
? {
|
||||
|
@ -198,6 +216,13 @@ export default {
|
|||
},
|
||||
SET_PENDING_BREADCRUMBS (state, payload) {
|
||||
state.pending_breadcrumbs = payload;
|
||||
},
|
||||
ADD_LOADED_SCHEMA (state, payload) {
|
||||
state.workspaces = state.workspaces.map(workspace => {
|
||||
if (workspace.uid === payload.uid)
|
||||
workspace.loaded_schemas.add(payload.schema);
|
||||
return workspace;
|
||||
});
|
||||
}
|
||||
},
|
||||
actions: {
|
||||
|
@ -237,9 +262,10 @@ export default {
|
|||
dispatch('notifications/addNotification', { status: 'error', message: err.stack }, { root: true });
|
||||
}
|
||||
},
|
||||
async refreshStructure ({ dispatch, commit }, uid) {
|
||||
async refreshStructure ({ dispatch, commit, getters }, uid) {
|
||||
try {
|
||||
const { status, response } = await Database.getStructure(uid);
|
||||
const { status, response } = await Database.getStructure({ uid, schemas: getters.getLoadedSchemas(uid) });
|
||||
|
||||
if (status === 'error')
|
||||
dispatch('notifications/addNotification', { status, message: response }, { root: true });
|
||||
else
|
||||
|
@ -249,6 +275,18 @@ export default {
|
|||
dispatch('notifications/addNotification', { status: 'error', message: err.stack }, { root: true });
|
||||
}
|
||||
},
|
||||
async refreshSchema ({ dispatch, commit }, { uid, schema }) {
|
||||
try {
|
||||
const { status, response } = await Database.getStructure({ uid, schemas: new Set([schema]) });
|
||||
if (status === 'error')
|
||||
dispatch('notifications/addNotification', { status, message: response }, { root: true });
|
||||
else
|
||||
commit('REFRESH_SCHEMA', { uid, schema, schemaElements: response.find(_schema => _schema.name === schema) });
|
||||
}
|
||||
catch (err) {
|
||||
dispatch('notifications/addNotification', { status: 'error', message: err.stack }, { root: true });
|
||||
}
|
||||
},
|
||||
async refreshCollations ({ dispatch, commit }, uid) {
|
||||
try {
|
||||
const { status, response } = await Database.getCollations(uid);
|
||||
|
@ -312,7 +350,8 @@ export default {
|
|||
variables: [],
|
||||
collations: [],
|
||||
users: [],
|
||||
breadcrumbs: {}
|
||||
breadcrumbs: {},
|
||||
loaded_schemas: new Set()
|
||||
};
|
||||
|
||||
commit('ADD_WORKSPACE', workspace);
|
||||
|
@ -349,6 +388,9 @@ export default {
|
|||
|
||||
commit('CHANGE_BREADCRUMBS', { uid: getters.getSelected, breadcrumbs: { ...breadcrumbsObj, ...payload } });
|
||||
lastBreadcrumbs = { ...breadcrumbsObj, ...payload };
|
||||
|
||||
if (payload.schema)
|
||||
commit('ADD_LOADED_SCHEMA', { uid: getters.getSelected, schema: payload.schema });
|
||||
},
|
||||
newTab ({ commit }, uid) {
|
||||
const tab = uidGen('T');
|
||||
|
|
Loading…
Reference in New Issue