Skip to content

Commit

Permalink
resolved merge conflicts
Browse files Browse the repository at this point in the history
  • Loading branch information
AnatolyUss committed Oct 28, 2020
2 parents c8cec71 + 9258a5e commit 91612ec
Show file tree
Hide file tree
Showing 11 changed files with 333 additions and 222 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ Or, if you have moved <code>config</code> folder out from Nmig's directory:<br /
<br /><b>Note:</b> "logs_directory" will be created during script execution.</p>

<h3>VERSION</h3>
<p>Current version is 5.4.0<br />
<p>Current version is 5.5.0<br />

<h3>LICENSE</h3>
<p>NMIG is available under "GNU GENERAL PUBLIC LICENSE" (v. 3) <br />
Expand Down
16 changes: 16 additions & 0 deletions config/index_types_map.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"README" : [
"This JSON document represents a correct index-types map between MySQL and PostgreSQL.",
"If you wish to customize (not recommended) this map - you can.",
"Map explanation:",
"Each key represents a MySQL index-type, and each value represents corresponding PostgreSQL index-type."
],

"BTREE": "BTREE",

"HASH": "HASH",

"SPATIAL": "GIST",

"FULLTEXT": "GIN"
}
323 changes: 218 additions & 105 deletions package-lock.json

Large diffs are not rendered by default.

14 changes: 7 additions & 7 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "nmig",
"version": "5.4.0",
"version": "5.5.0",
"description": "The database migration app",
"author": "Anatoly Khaytovich<[email protected]>",
"license": "GPL-3.0",
Expand All @@ -12,18 +12,18 @@
"node": ">=10.0.0"
},
"dependencies": {
"json2csv": "^5.0.1",
"json2csv": "^5.0.3",
"mysql": "^2.18.1",
"pg": "^8.3.3",
"pg-copy-streams": "^2.2.2",
"pg": "^8.4.2",
"pg-copy-streams": "^5.1.1",
"@types/mysql": "^2.15.15",
"@types/node": "^14.10.1",
"@types/pg": "^7.14.4"
"@types/node": "^14.14.5",
"@types/pg": "^7.14.5"
},
"devDependencies": {
"@types/tape": "^4.13.0",
"tape": "^5.0.1",
"typescript": "^4.0.2"
"typescript": "^4.0.5"
},
"scripts": {
"build": "tsc",
Expand Down
76 changes: 26 additions & 50 deletions src/BootProcessor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ import DBVendors from './DBVendors';
import IDBAccessQueryParams from './IDBAccessQueryParams';
import IConfAndLogsPaths from './IConfAndLogsPaths';
import { getStateLogsTableName } from './MigrationStateManager';
import { generateError, log } from './FsOps';

/**
* Checks correctness of connection details of both MySQL and PostgreSQL.
Expand Down Expand Up @@ -67,61 +68,36 @@ export const getLogo = (): string => {
/**
* Boots the migration.
*/
export const boot = (conversion: Conversion): Promise<Conversion> => {
return new Promise<Conversion>(async resolve => {
const connectionErrorMessage = await checkConnection(conversion);
const logo: string = getLogo();
export const boot = async (conversion: Conversion): Promise<Conversion> => {
const connectionErrorMessage = await checkConnection(conversion);
const logo: string = getLogo();
const logTitle: string = 'BootProcessor::boot';

if (connectionErrorMessage) {
console.log(`${ logo } \n ${ connectionErrorMessage }`);
process.exit(1);
}
if (connectionErrorMessage) {
await generateError(conversion, `\t--[${ logTitle }]\n ${ logo } \n ${ connectionErrorMessage }`);
process.exit(1);
}

const sql: string = `SELECT EXISTS(SELECT 1 FROM information_schema.tables`
+ ` WHERE table_schema = '${ conversion._schema }' AND table_name = '${ getStateLogsTableName(conversion, true) }');`;
const sql: string = `SELECT EXISTS(SELECT 1 FROM information_schema.tables`
+ ` WHERE table_schema = '${ conversion._schema }' AND table_name = '${ getStateLogsTableName(conversion, true) }');`;

const params: IDBAccessQueryParams = {
conversion: conversion,
caller: 'BootProcessor::boot',
sql: sql,
vendor: DBVendors.PG,
processExitOnError: true,
shouldReturnClient: false
};

const result: DBAccessQueryResult = await DBAccess.query(params);
const isExists: boolean = !!result.data.rows[0].exists;
const message: string = `${ (isExists
? '\n\t--[boot] NMIG is ready to restart after some failure.\n\t--[boot] Consider checking log files at the end of migration.'
: '\n\t--[boot] NMIG is ready to start.') } \n\t--[boot] Proceed? [Y/n]`;

console.log(logo + message);

const _getUserInput = (input: string): void => {
const trimedInput: string = input.trim();

if (trimedInput === 'n' || trimedInput === 'N') {
console.log('\t--[boot] Migration aborted.\n');
process.exit(0);
}

if (trimedInput === 'y' || trimedInput === 'Y') {
process.stdin.removeListener('data', _getUserInput);
conversion._timeBegin = new Date();
return resolve(conversion);
}

const hint: string = `\t--[boot] Unexpected input ${ trimedInput }\n`
+ `\t--[boot] Expected input is upper case Y\n\t--[boot] or lower case n\n${message}`;
const params: IDBAccessQueryParams = {
conversion: conversion,
caller: 'BootProcessor::boot',
sql: sql,
vendor: DBVendors.PG,
processExitOnError: true,
shouldReturnClient: false
};

console.log(hint);
};
const result: DBAccessQueryResult = await DBAccess.query(params);
const isExists: boolean = !!result.data.rows[0].exists;
const message: string = `${ (isExists
? '\n\t--[boot] NMIG is restarting after some failure.\n\t--[boot] Consider checking log files at the end of migration.\n'
: '\n\t--[boot] NMIG is starting.') } \n`;

process.stdin
.resume()
.setEncoding(conversion._encoding)
.on('data', _getUserInput);
});
log(conversion, `\t--[${ logTitle }] ${ logo }${ message }`);
return conversion;
};

/**
Expand Down
15 changes: 13 additions & 2 deletions src/Conversion.ts
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,11 @@ export default class Conversion {
*/
public readonly _dataTypesMapAddr: string;

/**
* A path to the index types map.
*/
public readonly _indexTypesMapAddr: string;

/**
* A path to the "errors-only.log" file.
*/
Expand All @@ -108,7 +113,7 @@ export default class Conversion {
/**
* The timestamp, at which the migration began.
*/
public _timeBegin: Date | null;
public _timeBegin: Date;

/**
* Current version of source (MySQL) db.
Expand Down Expand Up @@ -185,6 +190,11 @@ export default class Conversion {
*/
public _dataTypesMap: any;

/**
* The index types map.
*/
public _indexTypesMap: any;

/**
* Buffer level when stream.write() starts returning false.
* This number is a number of JavaScript objects.
Expand All @@ -200,12 +210,13 @@ export default class Conversion {
this._targetConString = this._config.target;
this._logsDirPath = this._config.logsDirPath;
this._dataTypesMapAddr = this._config.dataTypesMapAddr;
this._indexTypesMapAddr = this._config.indexTypesMapAddr;
this._allLogsPath = path.join(this._logsDirPath, 'all.log');
this._errorLogsPath = path.join(this._logsDirPath, 'errors-only.log');
this._notCreatedViewsPath = path.join(this._logsDirPath, 'not_created_views');
this._excludeTables = this._config.exclude_tables === undefined ? [] : this._config.exclude_tables;
this._includeTables = this._config.include_tables === undefined ? [] : this._config.include_tables;
this._timeBegin = null;
this._timeBegin = new Date();
this._encoding = this._config.encoding === undefined ? 'utf8' : this._config.encoding;
this._0777 = '0777';
this._mysqlVersion = '5.6.21'; // Simply a default value.
Expand Down
4 changes: 2 additions & 2 deletions src/DataLoader.ts
Original file line number Diff line number Diff line change
Expand Up @@ -165,10 +165,10 @@ const getCopyStream = (
const copyStream: any = client.query(from(sqlCopy));

copyStream
.on('end', async () => {
.on('finish', async () => {
// COPY FROM STDIN does not return the number of rows inserted.
// But the transactional behavior still applies, meaning no records inserted if at least one failed.
// That is why in case of 'on end' the rowsCnt value is actually the number of records inserted.
// That is why in case of 'on finish' the rowsCnt value is actually the number of records inserted.
processSend(new MessageToMaster(tableName, rowsCnt));
await deleteChunk(conv, dataPoolId, client);
})
Expand Down
81 changes: 35 additions & 46 deletions src/FsOps.ts
Original file line number Diff line number Diff line change
Expand Up @@ -84,48 +84,57 @@ export const log = (conversion: Conversion, log: string | NodeJS.ErrnoException,
};

/**
* Reads the configuration file.
* Reads and parses JOSN file under given path.
*/
export const readConfig = (confPath: string, logsPath: string, configFileName: string = 'config.json'): Promise<any> => {
const readAndParseJsonFile = (pathToFile: string): Promise<any> => {
return new Promise<any>(resolve => {
const pathToConfig = path.join(confPath, configFileName);

fs.readFile(pathToConfig, (error: ErrnoException | null, data: Buffer) => {
fs.readFile(pathToFile, (error: ErrnoException | null, data: Buffer) => {
if (error) {
console.log(`\n\t--Cannot run migration\nCannot read configuration info from ${ pathToConfig }`);
console.log(`\n\t--Cannot run migration\nCannot read configuration info from ${ pathToFile }`);
process.exit(1);
}

const config: any = JSON.parse(data.toString());
config.logsDirPath = path.join(logsPath, 'logs_directory');
config.dataTypesMapAddr = path.join(confPath, 'data_types_map.json');
resolve(config);
});
});
};

/**
* Reads the extra configuration file, if necessary.
* Reads the configuration file.
*/
export const readExtraConfig = (config: any, confPath: string, extraConfigFileName: string = 'extra_config.json'): Promise<any> => {
return new Promise<any>(resolve => {
if (config.enable_extra_config !== true) {
config.extraConfig = null;
return resolve(config);
}

const pathToExtraConfig = path.join(confPath, extraConfigFileName);
export const readConfig = async (confPath: string, logsPath: string, configFileName: string = 'config.json'): Promise<any> => {
const pathToConfig = path.join(confPath, configFileName);
const config: any = await readAndParseJsonFile(pathToConfig);
config.logsDirPath = path.join(logsPath, 'logs_directory');
config.dataTypesMapAddr = path.join(confPath, 'data_types_map.json');
config.indexTypesMapAddr = path.join(confPath, 'index_types_map.json');
return config;
};

fs.readFile(pathToExtraConfig, (error: ErrnoException | null, data: Buffer) => {
if (error) {
console.log(`\n\t--Cannot run migration\nCannot read configuration info from ${ pathToExtraConfig }`);
process.exit(1);
}
/**
* Reads the extra configuration file, if necessary.
*/
export const readExtraConfig = async (config: any, confPath: string, extraConfigFileName: string = 'extra_config.json'): Promise<any> => {
if (config.enable_extra_config !== true) {
config.extraConfig = null;
return config;
}

const pathToExtraConfig = path.join(confPath, extraConfigFileName);
config.extraConfig = await readAndParseJsonFile(pathToExtraConfig);
return config;
};

config.extraConfig = JSON.parse(data.toString());
resolve(config);
});
});
/**
* Reads both "./config/data_types_map.json" and "./config/index_types_map.json" and converts its json content to js object.
*/
export const readDataAndIndexTypesMap = async (conversion: Conversion): Promise<Conversion> => {
const logTitle: string = 'FsOps::readDataAndIndexTypesMap';
conversion._dataTypesMap = await readAndParseJsonFile(conversion._dataTypesMapAddr);
conversion._indexTypesMap = await readAndParseJsonFile(conversion._indexTypesMapAddr);
log(conversion, `\t--[${ logTitle }] Data and Index Types Maps are loaded...`);
return conversion;
};

/**
Expand Down Expand Up @@ -166,23 +175,3 @@ const createDirectory = (conversion: Conversion, directoryPath: string, logTitle
});
});
};

/**
* Reads "./config/data_types_map.json" and converts its json content to js object.
*/
export const readDataTypesMap = (conversion: Conversion): Promise<Conversion> => {
return new Promise<Conversion>(resolve => {
fs.readFile(conversion._dataTypesMapAddr, (error: ErrnoException | null, data: Buffer) => {
const logTitle: string = 'FsOps::readDataTypesMap';

if (error) {
console.log(`\t--[${ logTitle }] Cannot read "DataTypesMap" from ${conversion._dataTypesMapAddr}`);
process.exit(1);
}

conversion._dataTypesMap = JSON.parse(data.toString());
console.log(`\t--[${ logTitle }] Data Types Map is loaded...`);
resolve(conversion);
});
});
};
11 changes: 9 additions & 2 deletions src/IndexAndKeyProcessor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,13 @@ import DBAccessQueryResult from './DBAccessQueryResult';
import IDBAccessQueryParams from './IDBAccessQueryParams';
import * as extraConfigProcessor from './ExtraConfigProcessor';

/**
* Returns PostgreSQL index type, that correlates to given MySQL index type.
*/
const getIndexType = (conversion: Conversion, indexType: string): string => {
return indexType in conversion._indexTypesMap ? conversion._indexTypesMap[indexType] : 'BTREE';
};

/**
* Creates primary key and indices.
*/
Expand Down Expand Up @@ -62,7 +69,7 @@ export default async (conversion: Conversion, tableName: string): Promise<void>
objPgIndices[index.Key_name] = {
is_unique: index.Non_unique === 0,
column_name: [`"${ pgColumnName }"`],
Index_type: ` USING ${ index.Index_type === 'SPATIAL' ? 'GIST' : index.Index_type }`
index_type: ` USING ${ getIndexType(conversion, index.Index_type) }`,
};
});

Expand All @@ -79,7 +86,7 @@ export default async (conversion: Conversion, tableName: string): Promise<void>
indexType = 'index';
sqlAddIndex = `CREATE ${ (objPgIndices[index].is_unique ? 'UNIQUE ' : '') }INDEX "${ conversion._schema }_${ tableName }_${ columnName }_idx"
ON "${ conversion._schema }"."${ tableName }"
${ objPgIndices[index].Index_type } (${ objPgIndices[index].column_name.join(',') });`;
${ objPgIndices[index].index_type } (${ objPgIndices[index].column_name.join(',') });`;
}

params.vendor = DBVendors.PG;
Expand Down
6 changes: 3 additions & 3 deletions src/Main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,16 +30,16 @@ import { processConstraints } from './ConstraintsProcessor';
import { getConfAndLogsPaths, boot } from './BootProcessor';
import { createStateLogsTable, dropStateLogsTable } from './MigrationStateManager';
import { createDataPoolTable, readDataPool } from './DataPoolManager';
import { readConfig, readExtraConfig, createLogsDirectory, readDataTypesMap } from './FsOps';
import { readConfig, readExtraConfig, createLogsDirectory, readDataAndIndexTypesMap } from './FsOps';

const { confPath, logsPath } = getConfAndLogsPaths();

readConfig(confPath, logsPath)
.then(config => readExtraConfig(config, confPath))
.then(Conversion.initializeConversion)
.then(boot)
.then(readDataTypesMap)
.then(createLogsDirectory)
.then(readDataAndIndexTypesMap)
.then(boot)
.then(createSchema)
.then(createStateLogsTable)
.then(createDataPoolTable)
Expand Down
Loading

0 comments on commit 91612ec

Please sign in to comment.