diff --git a/README.md b/README.md
index a0b42756..21d9d454 100644
--- a/README.md
+++ b/README.md
@@ -109,7 +109,7 @@ Or, if you have moved config
folder out from Nmig's directory:
Note: "logs_directory" will be created during script execution.
VERSION
-Current version is 6.0.0
+Current version is 6.1.0
LICENSE
NMIG is available under "GNU GENERAL PUBLIC LICENSE" (v. 3)
diff --git a/package.json b/package.json
index 2a103a71..fd79aa79 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "nmig",
- "version": "6.0.0",
+ "version": "6.1.0",
"description": "The database migration app",
"author": "Anatoly Khaytovich",
"license": "GPL-3.0",
diff --git a/src/DataChunksProcessor.ts b/src/DataChunksProcessor.ts
index 0393a999..42c1e295 100644
--- a/src/DataChunksProcessor.ts
+++ b/src/DataChunksProcessor.ts
@@ -44,8 +44,9 @@ export default async (
true,
);
const logTitle = 'DataChunksProcessor::default';
+ const arrTableColumns = (conversion._dicTables.get(tableName) as Table).arrTableColumns;
const selectFieldList: string = arrangeColumnsData(
- (conversion._dicTables.get(tableName) as Table).arrTableColumns,
+ arrTableColumns,
+conversion._mysqlVersion.split('.').slice(0, 2).join('.'),
conversion._encoding,
);
@@ -72,8 +73,11 @@ export default async (
const metadata: string = JSON.stringify({
_tableName: tableName,
- _selectFieldList: selectFieldList,
_rowsCnt: rowsCnt,
+ _selectFieldList: selectFieldList,
+ _copyColumnNamesList: arrTableColumns
+ .map((column: any): string => `"${column.Field}"`)
+ .join(','),
});
params.sql = `INSERT INTO ${getDataPoolTableName(conversion)}("metadata") VALUES ($1);`;
diff --git a/src/DataPipeManager.ts b/src/DataPipeManager.ts
index 67995942..5de53c43 100644
--- a/src/DataPipeManager.ts
+++ b/src/DataPipeManager.ts
@@ -187,7 +187,7 @@ export default class DataPipeManager {
DataPipeManager.getNumberOfReaderProcesses(conversion);
// !!!Note, invoke the "DataPipeManager.runDataReaderProcess" method sequentially.
- // DO NOT use ".map(async _ => await DataPipeManager.runDataReaderProcess(..." to avoid race condition.
+ // DO NOT use ".map(async _ => await DataPipeManager.runDataReaderProcess(...))" to avoid race condition.
for (let i = 0; i < numberOfReaderProcesses; ++i) {
await DataPipeManager.runDataReaderProcess(conversion);
}
diff --git a/src/DataReader.ts b/src/DataReader.ts
index 73008de4..3037e948 100644
--- a/src/DataReader.ts
+++ b/src/DataReader.ts
@@ -74,14 +74,14 @@ process.on('message', async (signal: MessageToDataReader): Promise => {
const populateTable = async (conv: Conversion, chunk: any): Promise => {
const tableName: string = chunk._tableName;
const strSelectFieldList: string = chunk._selectFieldList;
+ const copyColumnNamesList: string = chunk._copyColumnNamesList;
const rowsCnt: number = chunk._rowsCnt;
const dataPoolId: number = chunk._id;
const originalTableName: string = extraConfigProcessor.getTableName(conv, tableName, true);
const sql = `SELECT ${strSelectFieldList} FROM \`${originalTableName}\`;`;
const mysqlClient: PoolConnection = await DBAccess.getMysqlClient(conv);
- const sqlCopy = `COPY "${conv._schema}"."${tableName}" FROM STDIN
- WITH(FORMAT csv, DELIMITER '${conv._delimiter}',
- ENCODING '${conv._targetConString.charset}');`;
+ const sqlCopy = `COPY "${conv._schema}"."${tableName}" (${copyColumnNamesList}) FROM STDIN
+ WITH (FORMAT csv, DELIMITER '${conv._delimiter}', ENCODING '${conv._targetConString.charset}');`;
const client: PoolClient = await DBAccess.getPgClient(conv);
let originalSessionReplicationRole: string | null = null;