perf: split the export select query to avoid running out of memory

This commit is contained in:
Fabio Di Stasio 2021-11-01 16:12:20 +01:00
parent d9d3bf2bc9
commit 409ed54608
1 changed files with 49 additions and 45 deletions

View File

@ -52,6 +52,7 @@ ${footer}
async getTableInsert (tableName) {
let rowCount = 0;
let sqlStr = '';
const pageSize = 1000;
const countResults = await this._client.raw(
`SELECT COUNT(1) as count FROM \`${this.schemaName}\`.\`${tableName}\``
@ -59,6 +60,7 @@ ${footer}
if (countResults.rows.length === 1) rowCount = countResults.rows[0].count;
if (rowCount > 0) {
const totalPages = Math.ceil(rowCount / pageSize);
let queryLength = 0;
let rowsWritten = 0;
const { sqlInsertDivider, sqlInsertAfter } = this._options;
@ -71,16 +73,18 @@ ${footer}
', '
)}) VALUES`;
const tableResult = await this._client.raw(
`SELECT ${columnNames.join(', ')} FROM \`${
this.schemaName
}\`.\`${tableName}\``
);
sqlStr += `LOCK TABLES \`${tableName}\` WRITE;\n`;
sqlStr += `/*!40000 ALTER TABLE \`${tableName}\` DISABLE KEYS */;`;
sqlStr += '\n\n';
for (let pageNumber = 0; pageNumber < totalPages; pageNumber++) {
const tableResult = await this._client.raw(
`SELECT ${columnNames.join(', ')} FROM \`${
this.schemaName
}\`.\`${tableName}\`
LIMIT ${pageSize} OFFSET ${pageSize * pageNumber}`
);
sqlStr += insertStmt;
for (const rowIndex in tableResult.rows) {
@ -127,8 +131,8 @@ ${footer}
queryLength += sqlInsertString.length;
rowsWritten++;
}
sqlStr += ';\n\n';
}
sqlStr += `/*!40000 ALTER TABLE \`${tableName}\` ENABLE KEYS */;\n`;
sqlStr += 'UNLOCK TABLES;';