Merge branch 'feature/js-to-ts' into develop

This commit is contained in:
Anatoly 2018-10-07 00:23:55 +03:00
commit abdf211936
77 changed files with 3830 additions and 4752 deletions

1
.gitignore vendored
View file

@ -1,3 +1,4 @@
.idea
node_modules
logs_directory
dist

View file

@ -27,7 +27,7 @@ from MySQL to PostgreSQL as easy and smooth as possible.</p>
</ul>
<h3>USAGE</h3>
<p><b>1.</b> Create a new database.<br />
<p><b>1.</b> Create a new PostgreSQL database.<br />
<b>Sample:</b>&nbsp;<code> CREATE DATABASE my_postgresql_database;</code><br />
If you are planning to migrate spatial data (geometry type columns), then <b>PostGIS</b> should be installed and enabled.
</p>
@ -42,10 +42,11 @@ from MySQL to PostgreSQL as easy and smooth as possible.</p>
<li>Make sure, that username, you use in your PostgreSQL connection details, defined as superuser (usually "postgres")<br> More info: <a href="http://www.postgresql.org/docs/current/static/app-createuser.html">http://www.postgresql.org/docs/current/static/app-createuser.html</a></li>
</ul>
<p><b>4.</b> Go to Nmig directory, install dependencies, and run the app<br />
<p><b>4.</b> Go to Nmig directory, install dependencies, compile and run the app<br />
&nbsp;&nbsp;&nbsp;&nbsp;<b>Sample:</b><br />
<pre>$ cd /path/to/nmig</pre><br />
<pre>$ npm install</pre><br />
<pre>$ npm run build</pre><br />
<pre>$ npm start</pre><br />
</p>
@ -66,7 +67,7 @@ from MySQL to PostgreSQL as easy and smooth as possible.</p>
<p><b>1.</b> Create a new PostgreSQL database.<br />
<b>Sample:</b>&nbsp;<code> CREATE DATABASE nmig_test_db;</code><br />
</p>
<p><b>2.</b> Download Nmig package.</p>
<p><b>2.</b> Download Nmig package.<br/ ><b>Sample:</b>&nbsp;<code>/path/to/nmig</code></p>
<p><b>3.</b> Edit configuration file located at <code>/path/to/nmig/config/test_config.json</code> with correct details.<br /></p>
<b>Notes:</b>
<ul>
@ -76,10 +77,11 @@ from MySQL to PostgreSQL as easy and smooth as possible.</p>
<a href="http://www.postgresql.org/docs/current/static/app-createuser.html">http://www.postgresql.org/docs/current/static/app-createuser.html</a>
</li>
</ul>
<p><b>4.</b> Go to nmig directory, install dependencies, and run tests<br />
<p><b>4.</b> Go to nmig directory, install dependencies, compile and run tests<br />
&nbsp;&nbsp;&nbsp;&nbsp;<b>Sample:</b><br />
<pre>$ cd /path/to/nmig</pre><br />
<pre>$ npm install</pre><br />
<pre>$ npm run build</pre><br />
<pre>$ npm test</pre><br />
</p>
<p><b>5.</b> At the end of migration check log files, if necessary.<br />&nbsp;&nbsp;&nbsp;
@ -87,7 +89,7 @@ from MySQL to PostgreSQL as easy and smooth as possible.</p>
<b>Note:</b> "logs_directory" will be created during script execution.</p>
<h3>VERSION</h3>
<p>Current version is 3.4.0<br />
<p>Current version is 3.5.0<br />
(major version . improvements . bug fixes)</p>
<h3>KNOWN ISSUES</h3>

306
package-lock.json generated
View file

@ -1,9 +1,59 @@
{
"name": "nmig",
"version": "3.4.0",
"version": "3.5.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"@types/events": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/@types/events/-/events-1.2.0.tgz",
"integrity": "sha512-KEIlhXnIutzKwRbQkGWb/I4HFqBuUykAdHgDED6xqwXJfONCjF5VoE0cXEiurh3XauygxzeDzgtXUqvLkxFzzA==",
"dev": true
},
"@types/mysql": {
"version": "2.15.5",
"resolved": "https://registry.npmjs.org/@types/mysql/-/mysql-2.15.5.tgz",
"integrity": "sha512-4QAISTUGZbcFh7bqdndo08xRdES5OTU+JODy8VCZbe1yiXyGjqw1H83G43XjQ3IbC10wn9xlGd44A5RXJwNh0Q==",
"dev": true,
"requires": {
"@types/node": "*"
}
},
"@types/node": {
"version": "10.5.4",
"resolved": "https://registry.npmjs.org/@types/node/-/node-10.5.4.tgz",
"integrity": "sha512-8TqvB0ReZWwtcd3LXq3YSrBoLyXFgBX/sBZfGye9+YS8zH7/g+i6QRIuiDmwBoTzcQ/pk89nZYTYU4c5akKkzw==",
"dev": true
},
"@types/pg": {
"version": "7.4.10",
"resolved": "https://registry.npmjs.org/@types/pg/-/pg-7.4.10.tgz",
"integrity": "sha512-IQ9vRZ3oX99TXZiVq5PgODNoqgHvn2girbkxa6gBT7DPGgvRiJ7kZNwmPiLqSOzlRgMHBIujFeiwD5Sf5TIJqg==",
"dev": true,
"requires": {
"@types/events": "*",
"@types/node": "*",
"@types/pg-types": "*"
}
},
"@types/pg-types": {
"version": "1.11.4",
"resolved": "https://registry.npmjs.org/@types/pg-types/-/pg-types-1.11.4.tgz",
"integrity": "sha512-WdIiQmE347LGc1Vq3Ki8sk3iyCuLgnccqVzgxek6gEHp2H0p3MQ3jniIHt+bRODXKju4kNQ+mp53lmP5+/9moQ==",
"dev": true,
"requires": {
"moment": ">=2.14.0"
}
},
"@types/tape": {
"version": "4.2.32",
"resolved": "https://registry.npmjs.org/@types/tape/-/tape-4.2.32.tgz",
"integrity": "sha512-xil0KO5wkPoixdBWGIGolPv9dekf6dVkjjJLAFYchfKcd4DICou67rgGCIO7wAh3i5Ff/6j9IDgZz+GU9cMaqQ==",
"dev": true,
"requires": {
"@types/node": "*"
}
},
"balanced-match": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
@ -11,9 +61,9 @@
"dev": true
},
"bignumber.js": {
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-4.0.4.tgz",
"integrity": "sha512-LDXpJKVzEx2/OqNbG9mXBNvHuiRL4PzHCGfnANHMJ+fv68Ads3exDVJeGDJws+AoNEuca93bU3q+S0woeUaCdg=="
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-4.1.0.tgz",
"integrity": "sha512-eJzYkFYy9L4JzXsbymsFn3p54D+llV27oTQ+ziJG7WFRheJcNZilgVXMG0LoZtlQSKBsJdWtLFqOD0u+U0jZKA=="
},
"brace-expansion": {
"version": "1.1.11",
@ -21,7 +71,7 @@
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"dev": true,
"requires": {
"balanced-match": "1.0.0",
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
@ -53,8 +103,8 @@
"integrity": "sha1-g6c/L+pWmJj7c3GTyPhzyvbUXJQ=",
"dev": true,
"requires": {
"foreach": "2.0.5",
"object-keys": "1.0.11"
"foreach": "^2.0.5",
"object-keys": "^1.0.8"
}
},
"defined": {
@ -64,16 +114,16 @@
"dev": true
},
"es-abstract": {
"version": "1.10.0",
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.10.0.tgz",
"integrity": "sha512-/uh/DhdqIOSkAWifU+8nG78vlQxdLckUdI/sPgy0VhuXi2qJ7T8czBmqIYtLQVpCIFYafChnsRsB5pyb1JdmCQ==",
"version": "1.12.0",
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.12.0.tgz",
"integrity": "sha512-C8Fx/0jFmV5IPoMOFPA9P9G5NtqW+4cOPit3MIuvR2t7Ag2K15EJTpxnHAYTzL+aYQJIESYeXZmDBfOBE1HcpA==",
"dev": true,
"requires": {
"es-to-primitive": "1.1.1",
"function-bind": "1.1.1",
"has": "1.0.1",
"is-callable": "1.1.3",
"is-regex": "1.0.4"
"es-to-primitive": "^1.1.1",
"function-bind": "^1.1.1",
"has": "^1.0.1",
"is-callable": "^1.1.3",
"is-regex": "^1.0.4"
}
},
"es-to-primitive": {
@ -82,18 +132,18 @@
"integrity": "sha1-RTVSSKiJeQNLZ5Lhm7gfK3l13Q0=",
"dev": true,
"requires": {
"is-callable": "1.1.3",
"is-date-object": "1.0.1",
"is-symbol": "1.0.1"
"is-callable": "^1.1.1",
"is-date-object": "^1.0.1",
"is-symbol": "^1.0.1"
}
},
"for-each": {
"version": "0.3.2",
"resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.2.tgz",
"integrity": "sha1-LEBFC5NI6X8oEyJZO6lnBLmr1NQ=",
"version": "0.3.3",
"resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz",
"integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==",
"dev": true,
"requires": {
"is-function": "1.0.1"
"is-callable": "^1.1.3"
}
},
"foreach": {
@ -120,21 +170,21 @@
"integrity": "sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ==",
"dev": true,
"requires": {
"fs.realpath": "1.0.0",
"inflight": "1.0.6",
"inherits": "2.0.3",
"minimatch": "3.0.4",
"once": "1.4.0",
"path-is-absolute": "1.0.1"
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.0.4",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
}
},
"has": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/has/-/has-1.0.1.tgz",
"integrity": "sha1-hGFzP1OLCDfJNh45qauelwTcLyg=",
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
"integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==",
"dev": true,
"requires": {
"function-bind": "1.1.1"
"function-bind": "^1.1.1"
}
},
"inflight": {
@ -143,8 +193,8 @@
"integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
"dev": true,
"requires": {
"once": "1.4.0",
"wrappy": "1.0.2"
"once": "^1.3.0",
"wrappy": "1"
}
},
"inherits": {
@ -153,9 +203,9 @@
"integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4="
},
"is-callable": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.3.tgz",
"integrity": "sha1-hut1OSgF3cM69xySoO7fdO52BLI=",
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.4.tgz",
"integrity": "sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA==",
"dev": true
},
"is-date-object": {
@ -164,19 +214,13 @@
"integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY=",
"dev": true
},
"is-function": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/is-function/-/is-function-1.0.1.tgz",
"integrity": "sha1-Es+5i2W1fdPRk6MSH19uL0N2ArU=",
"dev": true
},
"is-regex": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.4.tgz",
"integrity": "sha1-VRdIm1RwkbCTDglWVM7SXul+lJE=",
"dev": true,
"requires": {
"has": "1.0.1"
"has": "^1.0.1"
}
},
"is-symbol": {
@ -190,18 +234,13 @@
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
"integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE="
},
"js-string-escape": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/js-string-escape/-/js-string-escape-1.0.1.tgz",
"integrity": "sha1-4mJbrbwNZ8dTPp7cEGjFh65BN+8="
},
"minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"dev": true,
"requires": {
"brace-expansion": "1.1.11"
"brace-expansion": "^1.1.7"
}
},
"minimist": {
@ -210,27 +249,33 @@
"integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=",
"dev": true
},
"moment": {
"version": "2.22.2",
"resolved": "https://registry.npmjs.org/moment/-/moment-2.22.2.tgz",
"integrity": "sha1-PCV/mDn8DpP/UxSWMiOeuQeD/2Y=",
"dev": true
},
"mysql": {
"version": "2.15.0",
"resolved": "https://registry.npmjs.org/mysql/-/mysql-2.15.0.tgz",
"integrity": "sha512-C7tjzWtbN5nzkLIV+E8Crnl9bFyc7d3XJcBAvHKEVkjrYjogz3llo22q6s/hw+UcsE4/844pDob9ac+3dVjQSA==",
"version": "2.16.0",
"resolved": "https://registry.npmjs.org/mysql/-/mysql-2.16.0.tgz",
"integrity": "sha512-dPbN2LHonQp7D5ja5DJXNbCLe/HRdu+f3v61aguzNRQIrmZLOeRoymBYyeThrR6ug+FqzDL95Gc9maqZUJS+Gw==",
"requires": {
"bignumber.js": "4.0.4",
"readable-stream": "2.3.3",
"safe-buffer": "5.1.1",
"sqlstring": "2.3.0"
"bignumber.js": "4.1.0",
"readable-stream": "2.3.6",
"safe-buffer": "5.1.2",
"sqlstring": "2.3.1"
}
},
"object-inspect": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.3.0.tgz",
"integrity": "sha512-OHHnLgLNXpM++GnJRyyhbr2bwl3pPVm4YvaraHrRvDt/N3r+s/gDVHciA7EJBTkijKXj61ssgSAikq1fb0IBRg==",
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.6.0.tgz",
"integrity": "sha512-GJzfBZ6DgDAmnuaM3104jR4s1Myxr3Y3zfIyN4z3UdqN69oSRacNK8UhnobDdC+7J2AHCjGwxQubNJfE70SXXQ==",
"dev": true
},
"object-keys": {
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.0.11.tgz",
"integrity": "sha1-xUYBd4rVYPEULODgG8yotW0TQm0=",
"version": "1.0.12",
"resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.0.12.tgz",
"integrity": "sha512-FTMyFUm2wBcGHnH2eXmz7tC6IwlqQZ6mVZ+6dm6vZ4IQIHjs6FdNsQBuKGPuUUUY6NfJw2PshC08Tn6LzLDOag==",
"dev": true
},
"once": {
@ -239,7 +284,7 @@
"integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
"dev": true,
"requires": {
"wrappy": "1.0.2"
"wrappy": "1"
}
},
"packet-reader": {
@ -260,17 +305,16 @@
"dev": true
},
"pg": {
"version": "7.4.1",
"resolved": "https://registry.npmjs.org/pg/-/pg-7.4.1.tgz",
"integrity": "sha512-Pi5qYuXro5PAD9xXx8h7bFtmHgAQEG6/SCNyi7gS3rvb/ZQYDmxKchfB0zYtiSJNWq9iXTsYsHjrM+21eBcN1A==",
"version": "7.4.3",
"resolved": "https://registry.npmjs.org/pg/-/pg-7.4.3.tgz",
"integrity": "sha1-97b5P1NA7MJZavu5ShPj1rYJg0s=",
"requires": {
"buffer-writer": "1.0.1",
"js-string-escape": "1.0.1",
"packet-reader": "0.3.1",
"pg-connection-string": "0.1.3",
"pg-pool": "2.0.3",
"pg-types": "1.12.1",
"pgpass": "1.0.2",
"pg-pool": "~2.0.3",
"pg-types": "~1.12.1",
"pgpass": "1.x",
"semver": "4.3.2"
}
},
@ -294,10 +338,10 @@
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-1.12.1.tgz",
"integrity": "sha1-1kCH45A7WP+q0nnnWVxSIIoUw9I=",
"requires": {
"postgres-array": "1.0.2",
"postgres-bytea": "1.0.0",
"postgres-date": "1.0.3",
"postgres-interval": "1.1.1"
"postgres-array": "~1.0.0",
"postgres-bytea": "~1.0.0",
"postgres-date": "~1.0.0",
"postgres-interval": "^1.1.0"
}
},
"pgpass": {
@ -305,7 +349,7 @@
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.2.tgz",
"integrity": "sha1-Knu0G2BltnkH6R2hsHwYR8h3swY=",
"requires": {
"split": "1.0.1"
"split": "^1.0.0"
}
},
"postgres-array": {
@ -324,39 +368,39 @@
"integrity": "sha1-4tiXAu/bJY/52c7g/pG9BpdSV6g="
},
"postgres-interval": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.1.1.tgz",
"integrity": "sha512-OkuCi9t/3CZmeQreutGgx/OVNv9MKHGIT5jH8KldQ4NLYXkvmT9nDVxEuCENlNwhlGPE374oA/xMqn05G49pHA==",
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.1.2.tgz",
"integrity": "sha512-fC3xNHeTskCxL1dC8KOtxXt7YeFmlbTYtn7ul8MkVERuTmf7pI4DrkAxcw3kh1fQ9uz4wQmd03a1mRiXUZChfQ==",
"requires": {
"xtend": "4.0.1"
"xtend": "^4.0.0"
}
},
"process-nextick-args": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz",
"integrity": "sha1-FQ4gt1ZZCtP5EJPyWk8q2L/zC6M="
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz",
"integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw=="
},
"readable-stream": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.3.tgz",
"integrity": "sha512-m+qzzcn7KUxEmd1gMbchF+Y2eIUbieUaxkWtptyHywrX0rE8QEYqPC07Vuy4Wm32/xE16NcdBctb8S0Xe/5IeQ==",
"version": "2.3.6",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz",
"integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==",
"requires": {
"core-util-is": "1.0.2",
"inherits": "2.0.3",
"isarray": "1.0.0",
"process-nextick-args": "1.0.7",
"safe-buffer": "5.1.1",
"string_decoder": "1.0.3",
"util-deprecate": "1.0.2"
"core-util-is": "~1.0.0",
"inherits": "~2.0.3",
"isarray": "~1.0.0",
"process-nextick-args": "~2.0.0",
"safe-buffer": "~5.1.1",
"string_decoder": "~1.1.1",
"util-deprecate": "~1.0.1"
}
},
"resolve": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.4.0.tgz",
"integrity": "sha512-aW7sVKPufyHqOmyyLzg/J+8606v5nevBgaliIlV7nUpVMsDnoBGV/cbSLNjZAg9q0Cfd/+easKVKQ8vOu8fn1Q==",
"version": "1.7.1",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.7.1.tgz",
"integrity": "sha512-c7rwLofp8g1U+h1KNyHL/jicrKg1Ek4q+Lr33AL65uZTinUZHe30D5HlyN5V9NW0JX1D5dXQ4jqW5l7Sy/kGfw==",
"dev": true,
"requires": {
"path-parse": "1.0.5"
"path-parse": "^1.0.5"
}
},
"resumer": {
@ -365,13 +409,13 @@
"integrity": "sha1-8ej0YeQGS6Oegq883CqMiT0HZ1k=",
"dev": true,
"requires": {
"through": "2.3.8"
"through": "~2.3.4"
}
},
"safe-buffer": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz",
"integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg=="
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
},
"semver": {
"version": "4.3.2",
@ -383,13 +427,13 @@
"resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz",
"integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==",
"requires": {
"through": "2.3.8"
"through": "2"
}
},
"sqlstring": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/sqlstring/-/sqlstring-2.3.0.tgz",
"integrity": "sha1-UluKT9Jtb3GqYegipsr5dtMa0qg="
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/sqlstring/-/sqlstring-2.3.1.tgz",
"integrity": "sha1-R1OT/56RR5rqYtyvDKPRSYOn+0A="
},
"string.prototype.trim": {
"version": "1.1.2",
@ -397,38 +441,38 @@
"integrity": "sha1-0E3iyJ4Tf019IG8Ia17S+ua+jOo=",
"dev": true,
"requires": {
"define-properties": "1.1.2",
"es-abstract": "1.10.0",
"function-bind": "1.1.1"
"define-properties": "^1.1.2",
"es-abstract": "^1.5.0",
"function-bind": "^1.0.2"
}
},
"string_decoder": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.0.3.tgz",
"integrity": "sha512-4AH6Z5fzNNBcH+6XDMfA/BTt87skxqJlO0lAh3Dker5zThcAxG6mKz+iGu308UKoPPQ8Dcqx/4JhujzltRa+hQ==",
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
"integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
"requires": {
"safe-buffer": "5.1.1"
"safe-buffer": "~5.1.0"
}
},
"tape": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/tape/-/tape-4.8.0.tgz",
"integrity": "sha512-TWILfEnvO7I8mFe35d98F6T5fbLaEtbFTG/lxWvid8qDfFTxt19EBijWmB4j3+Hoh5TfHE2faWs73ua+EphuBA==",
"version": "4.9.1",
"resolved": "https://registry.npmjs.org/tape/-/tape-4.9.1.tgz",
"integrity": "sha512-6fKIXknLpoe/Jp4rzHKFPpJUHDHDqn8jus99IfPnHIjyz78HYlefTGD3b5EkbQzuLfaEvmfPK3IolLgq2xT3kw==",
"dev": true,
"requires": {
"deep-equal": "1.0.1",
"defined": "1.0.0",
"for-each": "0.3.2",
"function-bind": "1.1.1",
"glob": "7.1.2",
"has": "1.0.1",
"inherits": "2.0.3",
"minimist": "1.2.0",
"object-inspect": "1.3.0",
"resolve": "1.4.0",
"resumer": "0.0.0",
"string.prototype.trim": "1.1.2",
"through": "2.3.8"
"deep-equal": "~1.0.1",
"defined": "~1.0.0",
"for-each": "~0.3.3",
"function-bind": "~1.1.1",
"glob": "~7.1.2",
"has": "~1.0.3",
"inherits": "~2.0.3",
"minimist": "~1.2.0",
"object-inspect": "~1.6.0",
"resolve": "~1.7.1",
"resumer": "~0.0.0",
"string.prototype.trim": "~1.1.2",
"through": "~2.3.8"
}
},
"through": {
@ -436,6 +480,12 @@
"resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz",
"integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU="
},
"typescript": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.1.1.tgz",
"integrity": "sha512-Veu0w4dTc/9wlWNf2jeRInNodKlcdLgemvPsrNpfu5Pq39sgfFjvIIgTsvUHCoLBnMhPoUA+tFxsXjU6VexVRQ==",
"dev": true
},
"util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",

View file

@ -1,6 +1,6 @@
{
"name": "nmig",
"version": "3.4.0",
"version": "3.5.0",
"description": "The database migration app",
"author": "Anatoly Khaytovich<anatolyuss@gmail.com>",
"license": "GPL-3.0",
@ -12,16 +12,22 @@
"node": ">=8.0.0"
},
"dependencies": {
"mysql": "^2.15.0",
"pg": "^7.4.1",
"mysql": "^2.16.0",
"pg": "^7.4.3",
"pg-copy-streams": "^1.2.0"
},
"scripts": {
"start": "node src/Main.js",
"test": "node test/Main.test.js"
},
"devDependencies": {
"tape": "^4.8.0"
"@types/mysql": "^2.15.5",
"@types/node": "^10.5.4",
"@types/pg": "^7.4.10",
"@types/tape": "^4.2.32",
"tape": "^4.9.1",
"typescript": "^3.1.1"
},
"scripts": {
"build": "tsc",
"start": "node dist/src/Main.js",
"test": "node dist/test/Main.test.js"
},
"keywords": [
"database migration",

View file

@ -1,92 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const generateError = require('./ErrorGenerator');
const log = require('./Logger');
const connect = require('./Connector');
/**
* Decodes binary data from from textual representation in string.
*
* @param {Conversion} conversion
*
* @returns {Promise<Conversion>}
*/
module.exports = conversion => {
log(conversion, '\t--[decodeBinaryData] Decodes binary data from textual representation in string.');
return connect(conversion).then(() => {
return new Promise(resolve => {
conversion._pg.connect((error, client, release) => {
if (error) {
generateError(conversion, '\t--[decodeBinaryData] Cannot connect to PostgreSQL server...');
return resolve(conversion);
}
const sql = `SELECT table_name, column_name
FROM information_schema.columns
WHERE table_catalog = '${ conversion._targetConString.database }'
AND table_schema = '${ conversion._schema }'
AND data_type IN ('bytea', 'geometry');`;
client.query(sql, (err, data) => {
release();
if (err) {
generateError(conversion, `\t--[decodeBinaryData] ${ err }`, sql);
return resolve(conversion);
}
const decodePromises = [];
for (let i = 0; i < data.rows.length; ++i) {
decodePromises.push(new Promise(resolveDecode => {
conversion._pg.connect((connectionError, pgClient, clientRelease) => {
if (connectionError) {
generateError(conversion, '\t--[decodeBinaryData] Cannot connect to PostgreSQL server...');
return resolveDecode();
}
const tableName = data.rows[i].table_name;
const columnName = data.rows[i].column_name;
const sqlDecode = `UPDATE ${ conversion._schema }."${ tableName }"
SET "${ columnName }" = DECODE(ENCODE("${ columnName }", 'escape'), 'hex');`;
pgClient.query(sqlDecode, decodeError => {
clientRelease();
if (decodeError) {
generateError(conversion, `\t--[decodeBinaryData] ${ decodeError }`, sqlDecode);
}
resolveDecode();
});
});
}));
}
Promise.all(decodePromises).then(() => resolve(conversion));
});
});
});
});
};

61
src/BinaryDataDecoder.ts Normal file
View file

@ -0,0 +1,61 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import { log } from './FsOps';
import Conversion from './Conversion';
import DBAccess from './DBAccess';
import DBAccessQueryResult from './DBAccessQueryResult';
import DBVendors from './DBVendors';
import { PoolClient } from 'pg';
/**
* Decodes binary data from from textual representation in string.
*/
export default async function (conversion: Conversion): Promise<Conversion> {
const logTitle: string = 'BinaryDataDecoder::decodeBinaryData';
log(conversion, `\t--[${ logTitle }] Decodes binary data from textual representation in string.`);
const dbAccess: DBAccess = new DBAccess(conversion);
const sql: string = `SELECT table_name, column_name
FROM information_schema.columns
WHERE table_catalog = '${ conversion._targetConString.database }'
AND table_schema = '${ conversion._schema }'
AND data_type IN ('bytea', 'geometry');`;
const result: DBAccessQueryResult = await dbAccess.query(logTitle, sql, DBVendors.PG, false, false);
if (result.error) {
// No need to continue if no 'bytea' or 'geometry' columns found.
dbAccess.releaseDbClient(<PoolClient>result.client);
return conversion;
}
const decodePromises: Promise<void>[] = result.data.rows.map(async (row: any) => {
const tableName: string = row.table_name;
const columnName: string = row.column_name;
const sqlDecode: string = `UPDATE ${ conversion._schema }."${ tableName }"
SET "${ columnName }" = DECODE(ENCODE("${ columnName }", 'escape'), 'hex');`;
await dbAccess.query(logTitle, sqlDecode, DBVendors.PG, false, false);
});
await Promise.all(decodePromises);
return conversion;
}

View file

@ -1,93 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const connect = require('./Connector');
/**
* Boot the migration.
*
* @param {Conversion} self
*
* @returns {Promise}
*/
module.exports = self => {
return connect(self).then(() => {
return new Promise(resolve => {
self._pg.connect((error, client, done) => {
if (error) {
console.log('\t--[boot] Cannot connect to PostgreSQL server...\n' + error);
done();
process.exit();
} else {
const sql = 'SELECT EXISTS(SELECT 1 FROM information_schema.tables '
+ 'WHERE table_schema = \'' + self._schema
+ '\' AND table_name = \'state_logs_' + self._schema + self._mySqlDbName + '\');';
client.query(sql, (err, result) => {
done();
if (err) {
console.log('\t--[boot] Error when executed query:\n' + sql + '\nError message:\n' + err);
process.exit();
} else {
const isExists = !!result.rows[0].exists;
const message = (isExists
? '\n\t--[boot] NMIG is ready to restart after some failure.'
+ '\n\t--[boot] Consider checking log files at the end of migration.'
: '\n\t--[boot] NMIG is ready to start.') + '\n\t--[boot] Proceed? [Y/n]';
const logo = '\n\t/\\_ |\\ /\\/\\ /\\___'
+ '\n\t| \\ | |\\ | | | __'
+ '\n\t| |\\\\| || | | | \\_ \\'
+ '\n\t| | \\| || | | |__/ |'
+ '\n\t\\| \\/ /_|/______/'
+ '\n\n\tNMIG - the database migration tool'
+ '\n\tCopyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>\n\n'
+ '\t--[boot] Configuration has been just loaded.'
+ message;
console.log(logo);
process
.stdin
.resume()
.setEncoding(self._encoding)
.on('data', stdin => {
if (stdin.indexOf('n') !== -1) {
console.log('\t--[boot] Migration aborted.\n');
process.exit();
} else if (stdin.indexOf('Y') !== -1) {
resolve(self);
} else {
const hint = '\t--[boot] Unexpected input ' + stdin + '\n'
+ '\t--[boot] Expected input is upper case Y\n'
+ '\t--[boot] or lower case n\n' + message;
console.log(hint);
}
});
}
});
}
});
});
});
};

101
src/BootProcessor.ts Normal file
View file

@ -0,0 +1,101 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import Conversion from './Conversion';
import DBAccess from './DBAccess';
import DBAccessQueryResult from './DBAccessQueryResult';
import DBVendors from './DBVendors';
/**
* Checks correctness of connection details of both MySQL and PostgreSQL.
*/
export async function checkConnection(conversion: Conversion, dbAccess: DBAccess): Promise<string> {
const logTitle: string = 'BootProcessor::checkConnection';
let resultMessage: string = '';
const sql: string = 'SELECT 1;';
const mySqlResult: DBAccessQueryResult = await dbAccess.query(logTitle, sql, DBVendors.MYSQL, false, false);
resultMessage += mySqlResult.error ? `\tMySQL connection error: ${ JSON.stringify(mySqlResult.error) }\n` : '';
const pgResult: DBAccessQueryResult = await dbAccess.query(logTitle, sql, DBVendors.PG, false, false);
resultMessage += pgResult.error ? `\tPostgreSQL connection error: ${ JSON.stringify(pgResult.error) }` : '';
return resultMessage;
}
/**
* Returns Nmig's logo.
*/
export function getLogo(): string {
return '\n\t/\\_ |\\ /\\/\\ /\\___'
+ '\n\t| \\ | |\\ | | | __'
+ '\n\t| |\\\\| || | | | \\_ \\'
+ '\n\t| | \\| || | | |__/ |'
+ '\n\t\\| \\/ /_|/______/'
+ '\n\n\tNMIG - the database migration tool'
+ '\n\tCopyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>\n\n'
+ '\t--[boot] Configuration has been just loaded.';
}
/**
* Boots the migration.
*/
export function boot(conversion: Conversion): Promise<Conversion> {
return new Promise<Conversion>(async resolve => {
const dbAccess: DBAccess = new DBAccess(conversion);
const connectionErrorMessage = await checkConnection(conversion, dbAccess);
const logo: string = getLogo();
if (connectionErrorMessage) {
console.log(`${ logo } \n ${ connectionErrorMessage }`);
process.exit();
}
const sql: string = `SELECT EXISTS(SELECT 1 FROM information_schema.tables
WHERE table_schema = '${ conversion._schema }'
AND table_name = 'state_logs_${ conversion._schema }${ conversion._mySqlDbName }');`;
const result: DBAccessQueryResult = await dbAccess.query('Boot', sql, DBVendors.PG, true, false);
const isExists: boolean = !!result.data.rows[0].exists;
const message: string = `${ (isExists
? '\n\t--[boot] NMIG is ready to restart after some failure.\n\t--[boot] Consider checking log files at the end of migration.'
: '\n\t--[boot] NMIG is ready to start.') } \n\t--[boot] Proceed? [Y/n]`;
console.log(logo + message);
process
.stdin
.resume()
.setEncoding(conversion._encoding)
.on('data', (stdin: string) => {
if (stdin.indexOf('n') !== -1) {
console.log('\t--[boot] Migration aborted.\n');
process.exit();
} else if (stdin.indexOf('Y') !== -1) {
return resolve(conversion);
} else {
const hint: string = `\t--[boot] Unexpected input ${ stdin }\n
\t--[boot] Expected input is upper case Y\n
\t--[boot] or lower case n\n${ message }`;
console.log(hint);
}
});
});
}

View file

@ -18,51 +18,51 @@
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
import { Readable } from 'stream';
const { Readable } = require('stream');
export default class BufferStream extends Readable {
/**
* The Buffer, that contains the data to load.
*/
private _source?: Buffer;
/**
* Indicator of the offset, from which the data should be read into underlying stream buffer.
*/
private _offset?: number;
module.exports = class BufferStream extends Readable {
/**
* BufferStream constructor.
*
* @param {Buffer} source
*/
constructor(source) {
public constructor(source: Buffer) {
super();
this._source = source;
this._offset = 0;
// When source buffer consumed entirely, then the 'end' event is emitted.
this.on('end', this.destroy.bind(this));
// When source buffer consumed entirely, the 'end' event is emitted.
this.on('end', this._destruct.bind(this));
}
/**
* BufferStream destructor.
*
* @returns {undefined}
*/
destroy() {
this._source = null;
this._offset = null;
private _destruct(): void {
this._source = undefined;
this._offset = undefined;
}
/**
* Read chunks from the source buffer into the underlying stream buffer.
*
* @param {Number} size
*
* @returns {undefined}
* Reads chunks from the source buffer into the underlying stream buffer.
*/
_read(size) {
public _read(size: number): void {
// Push the next chunk onto the internal stream buffer.
if (this._offset < this._source.length) {
this.push(this._source.slice(this._offset, this._offset + size));
this._offset += size;
if ((<number>this._offset) < (<Buffer>this._source).length) {
this.push((<Buffer>this._source).slice((<number>this._offset), (<number>this._offset) + size));
(<number>this._offset) += size;
return;
}
// When the source ends, then the EOF - signaling `null` chunk should be pushed.
// When the source ends, the EOF - signaling `null` chunk should be pushed.
this.push(null);
}
};
}

View file

@ -1,88 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software= you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License.
*
* This program is distributed in the hope that it will be useful;
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not; see <http=//www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const path = require('path');
module.exports = class Conversion {
/**
* Constructor.
*
* @param {Object} config
*/
constructor(config) {
this._config = config;
this._sourceConString = this._config.source;
this._targetConString = this._config.target;
this._logsDirPath = this._config.logsDirPath;
this._dataTypesMapAddr = this._config.dataTypesMapAddr;
this._allLogsPath = path.join(this._logsDirPath, 'all.log');
this._errorLogsPath = path.join(this._logsDirPath, 'errors-only.log');
this._notCreatedViewsPath = path.join(this._logsDirPath, 'not_created_views');
this._noVacuum = this._config.no_vacuum === undefined ? [] : this._config.no_vacuum;
this._excludeTables = this._config.exclude_tables === undefined ? [] : this._config.exclude_tables;
this._timeBegin = new Date();
this._encoding = this._config.encoding === undefined ? 'utf8' : this._config.encoding;
this._dataChunkSize = this._config.data_chunk_size === undefined ? 1 : +this._config.data_chunk_size;
this._dataChunkSize = this._dataChunkSize <= 0 ? 1 : this._dataChunkSize;
this._0777 = '0777';
this._mysql = null;
this._pg = null;
this._mysqlVersion = '5.6.21'; // Simply a default value.
this._extraConfig = this._config.extraConfig === undefined ? false : this._config.extraConfig;
this._tablesToMigrate = [];
this._viewsToMigrate = [];
this._processedChunks = 0;
this._dataPool = [];
this._dicTables = Object.create(null);
this._mySqlDbName = this._sourceConString.database;
this._schema = this._config.schema === undefined || this._config.schema === ''
? this._mySqlDbName
: this._config.schema;
this._maxDbConnectionPoolSize = this._config.max_db_connection_pool_size !== undefined && this.isIntNumeric(this._config.max_db_connection_pool_size)
? +this._config.max_db_connection_pool_size
: 10;
this._runsInTestMode = false;
this._eventEmitter = null;
this._migrationCompletedEvent = 'migrationCompleted';
this._removeTestResources = this._config.remove_test_resources === undefined ? true : this._config.remove_test_resources;
this._maxDbConnectionPoolSize = this._maxDbConnectionPoolSize > 0 ? this._maxDbConnectionPoolSize : 10;
this._loaderMaxOldSpaceSize = this._config.loader_max_old_space_size;
this._loaderMaxOldSpaceSize = this.isIntNumeric(this._loaderMaxOldSpaceSize) ? this._loaderMaxOldSpaceSize : 'DEFAULT';
this._migrateOnlyData = this._config.migrate_only_data === undefined ? false : this._config.migrate_only_data;
this._delimiter = this._config.delimiter !== undefined && this._config.delimiter.length === 1
? this._config.delimiter
: ',';
}
/**
* Checks if given value is integer number.
*
* @param {String|Number} value
*
* @returns {Boolean}
*/
isIntNumeric(value) {
return !isNaN(parseInt(value)) && isFinite(value);
}
};

View file

@ -18,16 +18,11 @@
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
/**
* Define if given type is one of MySQL spacial types.
*
* @param {String} type
*
* @returns {Boolean}
* Defines if given type is one of MySQL spacial types.
*/
const isSpacial = type => {
const isSpacial = (type: string): boolean => {
return type.indexOf('geometry') !== -1
|| type.indexOf('point') !== -1
|| type.indexOf('linestring') !== -1
@ -35,69 +30,50 @@ const isSpacial = type => {
};
/**
* Define if given type is one of MySQL binary types.
*
* @param {String} type
*
* @returns {Boolean}
* Defines if given type is one of MySQL binary types.
*/
const isBinary = type => {
const isBinary = (type: string): boolean => {
return type.indexOf('blob') !== -1 || type.indexOf('binary') !== -1;
};
/**
* Define if given type is one of MySQL bit types.
*
* @param {String} type
*
* @returns {Boolean}
* Defines if given type is one of MySQL bit types.
*/
const isBit = type => {
const isBit = (type: string): boolean => {
return type.indexOf('bit') !== -1;
};
/**
* Define if given type is one of MySQL date-time types.
*
* @param {String} type
*
* @returns {Boolean}
* Defines if given type is one of MySQL date-time types.
*/
const isDateTime = type => {
const isDateTime = (type: string): boolean => {
return type.indexOf('timestamp') !== -1 || type.indexOf('date') !== -1;
};
/**
* Arranges columns data before loading.
*
* @param {Array} arrTableColumns
* @param {Number} mysqlVersion
*
* @returns {String}
*/
module.exports = (arrTableColumns, mysqlVersion) => {
let strRetVal = '';
const arrTableColumnsLength = arrTableColumns.length;
const wkbFunc = mysqlVersion >= 5.76 ? 'ST_AsWKB' : 'AsWKB';
export default (arrTableColumns: any[], mysqlVersion: string | number): string => {
let strRetVal: string = '';
const wkbFunc: string = mysqlVersion >= 5.76 ? 'ST_AsWKB' : 'AsWKB';
for (let i = 0; i < arrTableColumnsLength; ++i) {
const field = arrTableColumns[i].Field;
const type = arrTableColumns[i].Type;
arrTableColumns.forEach((column: any) => {
const field: string = column.Field;
const type: string = column.Type;
if (isSpacial(type)) {
// Apply HEX(ST_AsWKB(...)) due to the issue, described at https://bugs.mysql.com/bug.php?id=69798
strRetVal += 'HEX(' + wkbFunc + '(`' + field + '`)) AS `' + field + '`,';
strRetVal += `HEX(${ wkbFunc }(\`${ field }\`)) AS \`${ field }\`,`;
} else if (isBinary(type)) {
strRetVal += 'HEX(`' + field + '`) AS `' + field + '`,';
strRetVal += `HEX(\`${ field }\`) AS \`${ field }\`,`;
} else if (isBit(type)) {
strRetVal += 'BIN(`' + field + '`) AS `' + field + '`,';
strRetVal += `BIN(\`${ field }\`) AS \`${ field }\`,`;
} else if (isDateTime(type)) {
strRetVal += 'IF(`' + field + '` IN(\'0000-00-00\', \'0000-00-00 00:00:00\'), \'-INFINITY\', CAST(`'
+ field + '` AS CHAR)) AS `' + field + '`,';
strRetVal += `IF(\`${ field }\` IN('0000-00-00', '0000-00-00 00:00:00'), '-INFINITY', CAST(\`${ field }\` AS CHAR)) AS \`${ field }\`,`;
} else {
strRetVal += '`' + field + '` AS `' + field + '`,';
strRetVal += `\`${ field }\` AS \`${ field }\`,`;
}
}
});
return strRetVal.slice(0, -1);
};
}

View file

@ -1,181 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const log = require('./Logger');
const generateError = require('./ErrorGenerator');
const connect = require('./Connector');
const extraConfigProcessor = require('./ExtraConfigProcessor');
/**
* Escape quotes inside given string.
*
* @param {String} str
*
* @returns {String}
*/
const escapeQuotes = str => {
const regexp = new RegExp('\'', 'g');
return str.replace(regexp, '\'\'');
};
/**
* Create table comments.
*
* @param {Conversion} self
* @param {String} tableName
*
* @returns {Promise}
*/
const processTableComments = (self, tableName) => {
return new Promise(resolve => {
self._mysql.getConnection((error, connection) => {
if (error) {
// The connection is undefined.
generateError(self, '\t--[processTableComments] Cannot connect to MySQL server...\n\t' + error);
resolve();
} else {
let sql = "SELECT table_comment AS table_comment "
+ "FROM information_schema.tables "
+ "WHERE table_schema = '" + self._mySqlDbName + "' "
+ "AND table_name = '" + extraConfigProcessor.getTableName(self, tableName, true) + "';";
connection.query(sql, (err, rows) => {
connection.release();
if (err) {
generateError(self, '\t--[processTableComments] ' + err, sql);
resolve();
} else {
self._pg.connect((e, client, done) => {
if (e) {
generateError(self, '\t--[processTableComments] Cannot connect to PostgreSQL server...\n' + e);
resolve();
} else {
const comment = escapeQuotes(rows[0].table_comment);
sql = 'COMMENT ON TABLE "' + self._schema + '"."' + tableName + '" IS ' + '\'' + comment + '\';';
client.query(sql, queryError => {
done();
if (queryError) {
const msg = '\t--[processTableComments] Error while processing comment for "'
+ self._schema + '"."' + tableName + '"...\n' + queryError;
generateError(self, msg, sql);
resolve();
} else {
const success = '\t--[processTableComments] Successfully set comment for table "'
+ self._schema + '"."' + tableName + '"';
log(self, success, self._dicTables[tableName].tableLogPath);
resolve();
}
});
}
});
}
});
}
});
});
}
/**
* Create columns comments.
*
* @param {Conversion} self
* @param {String} tableName
*
* @returns {Promise}
*/
const processColumnsComments = (self, tableName) => {
return new Promise(resolve => {
const arrCommentPromises = [];
const originalTableName = extraConfigProcessor.getTableName(self, tableName, true);
for (let i = 0; i < self._dicTables[tableName].arrTableColumns.length; ++i) {
if (self._dicTables[tableName].arrTableColumns[i].Comment !== '') {
arrCommentPromises.push(
new Promise(resolveComment => {
self._pg.connect((error, client, done) => {
if (error) {
generateError(self, '\t--[processColumnsComments] Cannot connect to PostgreSQL server...\n' + error);
resolveComment();
} else {
const columnName = extraConfigProcessor.getColumnName(
self,
originalTableName,
self._dicTables[tableName].arrTableColumns[i].Field,
false
);
const comment = escapeQuotes(self._dicTables[tableName].arrTableColumns[i].Comment);
const sql = 'COMMENT ON COLUMN "' + self._schema + '"."' + tableName + '"."' + columnName + '" IS \'' + comment + '\';';
client.query(sql, err => {
done();
if (err) {
const msg = '\t--[processColumnsComments] Error while processing comment for "' + self._schema + '"."'
+ tableName + '"."' + columnName + '"...\n' + err;
generateError(self, msg, sql);
resolveComment();
} else {
const success = '\t--[processColumnsComments] Set comment for "' + self._schema + '"."' + tableName
+ '" column: "' + columnName + '"...';
log(self, success, self._dicTables[tableName].tableLogPath);
resolveComment();
}
});
}
});
})
);
}
}
Promise.all(arrCommentPromises).then(() => resolve());
});
}
/**
* Migrate comments.
*
* @param {Conversion} self
* @param {String} tableName
*
* @returns {Promise}
*/
module.exports = (self, tableName) => {
return connect(self).then(() => {
return new Promise(resolve => {
const msg = '\t--[CommentsProcessor] Creates comments for table "' + self._schema + '"."' + tableName + '"...';
log(self, msg, self._dicTables[tableName].tableLogPath);
Promise.all([
processTableComments(self, tableName),
processColumnsComments(self, tableName)
]).then(() => resolve());
});
});
};

103
src/CommentsProcessor.ts Normal file
View file

@ -0,0 +1,103 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import { log } from './FsOps';
import Conversion from './Conversion';
import DBAccess from './DBAccess';
import DBVendors from './DBVendors';
import DBAccessQueryResult from './DBAccessQueryResult';
import * as extraConfigProcessor from './ExtraConfigProcessor';
/**
* Escapes quotes inside given string.
*/
function escapeQuotes(str: string): string {
const regexp: RegExp = new RegExp(`'`, 'g');
return str.replace(regexp, `''`);
}
/**
* Creates table comments.
*/
async function processTableComments(conversion: Conversion, tableName: string): Promise<void> {
const logTitle: string = 'CommentsProcessor::processTableComments';
const dbAccess: DBAccess = new DBAccess(conversion);
const sqlSelectComment: string = `SELECT table_comment AS table_comment FROM information_schema.tables
WHERE table_schema = '${ conversion._mySqlDbName }'
AND table_name = '${ extraConfigProcessor.getTableName(conversion, tableName, true) }';`;
const resultSelectComment: DBAccessQueryResult = await dbAccess.query(logTitle, sqlSelectComment, DBVendors.MYSQL, false, false);
if (resultSelectComment.error) {
return;
}
const comment: string = escapeQuotes(resultSelectComment.data[0].table_comment);
const sqlCreateComment: string = `COMMENT ON TABLE "${ conversion._schema }"."${ tableName }" IS '${ comment }';`;
const createCommentResult: DBAccessQueryResult = await dbAccess.query(logTitle, sqlCreateComment, DBVendors.PG, false, false);
if (createCommentResult.error) {
return;
}
const successMsg: string = `\t--[${ logTitle }] Successfully set comment for table "${ conversion._schema }"."${ tableName }"`;
log(conversion, successMsg, conversion._dicTables[tableName].tableLogPath);
}
/**
* Creates columns comments.
*/
async function processColumnsComments(conversion: Conversion, tableName: string): Promise<void> {
const logTitle: string = 'CommentsProcessor::processColumnsComments';
const originalTableName: string = extraConfigProcessor.getTableName(conversion, tableName, true);
const dbAccess: DBAccess = new DBAccess(conversion);
const commentPromises: Promise<void>[] = conversion._dicTables[tableName].arrTableColumns.map(async (column: any) => {
if (column.Comment === '') {
return;
}
const columnName: string = extraConfigProcessor.getColumnName(conversion, originalTableName, column.Field, false);
const comment = escapeQuotes(column.Comment);
const sqlCreateComment: string = `COMMENT ON COLUMN "${ conversion._schema }"."${ tableName }"."${ columnName }" IS '${ comment }';`;
const createCommentResult: DBAccessQueryResult = await dbAccess.query(logTitle, sqlCreateComment, DBVendors.PG, false, false);
if (createCommentResult.error) {
return;
}
const successMsg: string = `\t--[${ logTitle }] Set comment for "${ conversion._schema }"."${ tableName }" column: "${ columnName }"...`;
log(conversion, successMsg, conversion._dicTables[tableName].tableLogPath);
});
await Promise.all(commentPromises);
}
/**
* Migrates comments.
*/
export default async function(conversion: Conversion, tableName: string): Promise<void> {
const logTitle: string = 'CommentsProcessor::default';
const msg: string = `\t--[${ logTitle }] Creates comments for table "${ conversion._schema }"."${ tableName }"...`;
log(conversion, msg, conversion._dicTables[tableName].tableLogPath);
await Promise.all([
processTableComments(conversion, tableName),
processColumnsComments(conversion, tableName)
]);
}

View file

@ -1,151 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const mysql = require('mysql');
const pg = require('pg');
const generateError = require('./ErrorGenerator');
module.exports = class ConnectionEmitter {
/**
* ConnectionEmitter constructor.
*
* @param {Conversion} conversion
*/
constructor(conversion) {
this._conversion = conversion;
}
/**
* Ensure MySQL connection pool existence.
*
* @returns {undefined}
*/
_getMysqlConnection() {
if (!this._conversion._mysql) {
this._conversion._sourceConString.connectionLimit = this._conversion._maxDbConnectionPoolSize;
const pool = mysql.createPool(this._conversion._sourceConString);
if (!pool) {
generateError(this._conversion, '\t--[getMysqlConnection] Cannot connect to MySQL server...');
process.exit();
}
this._conversion._mysql = pool;
}
}
/**
* Ensure PostgreSQL connection pool existence.
*
* @returns {undefined}
*/
_getPgConnection() {
if (!this._conversion._pg) {
this._conversion._targetConString.max = this._conversion._maxDbConnectionPoolSize;
const pool = new pg.Pool(this._conversion._targetConString);
if (!pool) {
generateError(this._conversion, '\t--[getPgConnection] Cannot connect to PostgreSQL server...');
process.exit();
}
this._conversion._pg = pool;
this._conversion._pg.on('error', error => {
const message = `Cannot connect to PostgreSQL server...\n${ error.message }\n${ error.stack }`;
generateError(this._conversion, message);
process.exit();
});
}
}
/**
* Obtain Connection instance.
*
* @returns {Promise<Connection>}
*/
async getMysqlClient() {
try {
this._getMysqlConnection();
return await this._conversion._mysql.getConnection();
} catch (error) {
generateError(this._conversion, `\t--[getMysqlClient] Cannot connect to PostgreSQL server...\n${ error }`);
process.exit();
}
}
/**
* Obtain pg.Client instance.
*
* @returns {Promise<pg.Client>}
*/
async getPgClient() {
try {
this._getPgConnection();
return await this._conversion._pg.connect();
} catch (error) {
generateError(this._conversion, `\t--[getPgClient] Cannot connect to PostgreSQL server...\n${ error }`);
process.exit();
}
}
/**
* Runs a query on the first available idle client and returns its result.
* Note, the pool does the acquiring and releasing of the client internally.
*
* @param {String} sql
*
* @returns {Promise<pg.Result>}
*/
async runPgPoolQuery(sql) {
try {
this._getPgConnection();
return await this._conversion._pg.query(sql);
} catch (error) {
generateError(this._conversion, `\t--[pgPoolQuery] Cannot connect to PostgreSQL server...\n${ error }`);
process.exit();
}
}
/**
* Releases MySQL Client back to the pool.
*
* @param {Connection} mysqlClient
*
* @returns {undefined}
*/
releaseMysqlClient(mysqlClient) {
mysqlClient.release();
}
/**
* Releases pg.Client back to the pool.
*
* @param {pg.Client} pgClient
*
* @returns {undefined}
*/
releasePgClient(pgClient) {
pgClient.release();
}
};

View file

@ -1,86 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const mysql = require('mysql');
const pg = require('pg');
const log = require('./Logger');
const generateError = require('./ErrorGenerator');
const generateReport = require('./ReportGenerator');
/**
* Check if both servers are connected.
* If not, than create connections.
* Kill current process if can not connect.
*
* @param {Conversion} self
*
* @returns {Promise}
*/
module.exports = self => {
return new Promise(resolve => {
const mysqlConnectionPromise = new Promise((mysqlResolve, mysqlReject) => {
if (!self._mysql) {
self._sourceConString.connectionLimit = self._maxDbConnectionPoolSize;
self._sourceConString.multipleStatements = true;
const pool = mysql.createPool(self._sourceConString);
if (pool) {
self._mysql = pool;
mysqlResolve();
} else {
log(self, '\t--[connect] Cannot connect to MySQL server...');
mysqlReject();
}
} else {
mysqlResolve();
}
});
const pgConnectionPromise = new Promise((pgResolve, pgReject) => {
if (!self._pg) {
self._targetConString.max = self._maxDbConnectionPoolSize;
const pool = new pg.Pool(self._targetConString);
if (pool) {
self._pg = pool;
self._pg.on('error', error => {
const message = 'Cannot connect to PostgreSQL server...\n' + error.message + '\n' + error.stack;
generateError(self, message);
generateReport(self, message);
});
pgResolve();
} else {
log(self, '\t--[connect] Cannot connect to PostgreSQL server...');
pgReject();
}
} else {
pgResolve();
}
});
Promise.all([mysqlConnectionPromise, pgConnectionPromise])
.then(() => resolve())
.catch(() => process.exit());
});
};

View file

@ -1,210 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const generateError = require('./ErrorGenerator');
const extraConfigProcessor = require('./ExtraConfigProcessor');
/**
* Update consistency state.
*
* @param {Conversion} self
* @param {Number} dataPoolId
*
* @returns {Promise}
*/
const updateConsistencyState = (self, dataPoolId) => {
return new Promise(resolve => {
self._pg.connect((error, client, done) => {
if (error) {
generateError(self, '\t--[ConsistencyEnforcer.updateConsistencyState] Cannot connect to PostgreSQL server...\n' + error);
resolve();
} else {
const sql = 'UPDATE "' + self._schema + '"."data_pool_' + self._schema
+ self._mySqlDbName + '" SET is_started = TRUE WHERE id = ' + dataPoolId + ';';
client.query(sql, err => {
done();
if (err) {
generateError(self, '\t--[ConsistencyEnforcer.updateConsistencyState] ' + err, sql);
}
resolve();
});
}
});
});
}
/**
* Get the `is_started` value of current chunk.
*
* @param {Conversion} self
* @param {Number} dataPoolId
*
* @returns {Promise}
*/
const getIsStarted = (self, dataPoolId) => {
return new Promise(resolve => {
self._pg.connect((error, client, done) => {
if (error) {
generateError(self, '\t--[ConsistencyEnforcer.getConsistencyState] Cannot connect to PostgreSQL server...\n' + error);
resolve(false);
} else {
const sql = 'SELECT is_started AS is_started FROM "' + self._schema + '"."data_pool_' + self._schema
+ self._mySqlDbName + '" WHERE id = ' + dataPoolId + ';';
client.query(sql, (err, data) => {
done();
if (err) {
generateError(self, '\t--[ConsistencyEnforcer.getConsistencyState] ' + err, sql);
resolve(false);
} else {
resolve(data.rows[0].is_started);
}
});
}
});
});
}
/**
* Current data chunk runs after a disaster recovery.
* Must determine if current chunk has already been loaded.
* This is in order to prevent possible data duplications.
*
* @param {Conversion} self
* @param {Object} chunk
*
* @returns {Promise}
*/
const hasCurrentChunkLoaded = (self, chunk) => {
return new Promise(resolve => {
self._pg.connect((pgError, client, done) => {
if (pgError) {
generateError(self, '\t--[ConsistencyEnforcer::hasCurrentChunkLoaded] Cannot connect to PostgreSQL server...\n' + pgError);
resolve(true);
} else {
const originalTableName = extraConfigProcessor.getTableName(self, chunk._tableName, true);
const sql = 'SELECT EXISTS(SELECT 1 FROM "' + self._schema + '"."' + chunk._tableName
+ '" WHERE "' + self._schema + '_' + originalTableName + '_data_chunk_id_temp" = ' + chunk._id + ');';
client.query(sql, (err, result) => {
done();
if (err) {
generateError(self, '\t--[ConsistencyEnforcer::hasCurrentChunkLoaded] ' + err, sql);
resolve(true);
} else {
resolve(!!result.rows[0].exists);
}
});
}
});
});
}
/**
* Get consistency state.
*
* @param {Conversion} self
* @param {Object} chunk
*
* @returns {Promise}
*/
const getConsistencyState = (self, chunk) => {
return new Promise(resolve => {
getIsStarted(self, chunk._id).then(isStarted => {
if (isStarted) {
hasCurrentChunkLoaded(self, chunk).then(result => resolve(result));
} else {
// Normal migration flow.
resolve(false);
}
});
});
}
/**
* Enforce consistency before processing a chunk of data.
* Ensure there are no any data duplications.
* In case of normal execution - it is a good practice.
* In case of rerunning nmig after unexpected failure - it is absolutely mandatory.
*
* @param {Conversion} self
* @param {Object} chunk
*
* @returns {Promise}
*/
module.exports.enforceConsistency = (self, chunk) => {
return new Promise(resolve => {
getConsistencyState(self, chunk).then(hasAlreadyBeenLoaded => {
if (hasAlreadyBeenLoaded) {
/*
* Current data chunk runs after a disaster recovery.
* It has already been loaded.
*/
resolve(false);
} else {
// Normal migration flow.
updateConsistencyState(self, chunk._id).then(() => resolve(true));
}
})
});
};
/**
* Drop the {self._schema + '_' + originalTableName + '_data_chunk_id_temp'} column from current table.
*
* @param {Conversion} self
* @param {String} tableName
*
* @returns {Promise}
*/
module.exports.dropDataChunkIdColumn = (self, tableName) => {
return new Promise(resolve => {
self._pg.connect((pgError, client, done) => {
if (pgError) {
generateError(self, '\t--[ConsistencyEnforcer::dropDataChunkIdColumn] Cannot connect to PostgreSQL server...\n' + pgError);
resolve();
} else {
const originalTableName = extraConfigProcessor.getTableName(self, tableName, true);
const columnToDrop = self._schema + '_' + originalTableName + '_data_chunk_id_temp';
const sql = 'ALTER TABLE "' + self._schema + '"."' + tableName + '" DROP COLUMN "' + columnToDrop + '";';
client.query(sql, (err, result) => {
done();
if (err) {
const errMsg = '\t--[ConsistencyEnforcer::dropDataChunkIdColumn] Failed to drop column "' + columnToDrop + '"\n'
+ '\t--[ConsistencyEnforcer::dropDataChunkIdColumn] '+ err;
generateError(self, errMsg, sql);
}
resolve();
});
}
});
});
};

109
src/ConsistencyEnforcer.ts Normal file
View file

@ -0,0 +1,109 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import DBAccessQueryResult from './DBAccessQueryResult';
import Conversion from './Conversion';
import DBAccess from './DBAccess';
import DBVendors from './DBVendors';
import * as extraConfigProcessor from './ExtraConfigProcessor';
/**
* Updates consistency state.
*/
async function updateConsistencyState(conversion: Conversion, dataPoolId: number): Promise<void> {
const logTitle: string = 'ConsistencyEnforcer::updateConsistencyState';
const sql: string = `UPDATE "${ conversion._schema }"."data_pool_${ conversion._schema }${ conversion._mySqlDbName }"
SET is_started = TRUE WHERE id = ${ dataPoolId };`;
const dbAccess: DBAccess = new DBAccess(conversion);
await dbAccess.query(logTitle, sql, DBVendors.PG, false, false);
}
/**
* Retrieves the `is_started` value of current chunk.
*/
async function getIsStarted(conversion: Conversion, dataPoolId: number): Promise<boolean> {
const logTitle: string = 'ConsistencyEnforcer::getIsStarted';
const sql: string = `SELECT is_started AS is_started
FROM "${ conversion._schema }"."data_pool_${ conversion._schema }${ conversion._mySqlDbName }"
WHERE id = ${ dataPoolId };`;
const dbAccess: DBAccess = new DBAccess(conversion);
const result: DBAccessQueryResult = await dbAccess.query(logTitle, sql, DBVendors.PG, false, false);
return result.error ? false : !!result.data.rows[0].is_started;
}
/**
* Current data chunk runs after a disaster recovery.
* Must determine if current chunk has already been loaded.
* This is in order to prevent possible data duplications.
*/
async function hasCurrentChunkLoaded(conversion: Conversion, chunk: any): Promise<boolean> {
const logTitle: string = 'ConsistencyEnforcer::hasCurrentChunkLoaded';
const originalTableName: string = extraConfigProcessor.getTableName(conversion, chunk._tableName, true);
const sql: string = `SELECT EXISTS(SELECT 1 FROM "${ conversion._schema }"."${ chunk._tableName }"
WHERE "${ conversion._schema }_${ originalTableName }_data_chunk_id_temp" = ${ chunk._id });`;
const dbAccess: DBAccess = new DBAccess(conversion);
const result: DBAccessQueryResult = await dbAccess.query(logTitle, sql, DBVendors.PG, false, false);
return result.error ? true : !!result.data.rows[0].exists;
}
/**
* Determines consistency state.
*/
async function getConsistencyState(conversion: Conversion, chunk: any): Promise<boolean> {
const isStarted: boolean = await getIsStarted(conversion, chunk._id);
// "isStarted" is false in normal migration flow.
return isStarted ? hasCurrentChunkLoaded(conversion, chunk) : false;
}
/**
* Enforces consistency before processing a chunk of data.
* Ensures there are no any data duplications.
* In case of normal execution - it is a good practice.
* In case of rerunning Nmig after unexpected failure - it is absolutely mandatory.
*/
export async function enforceConsistency(conversion: Conversion, chunk: any): Promise<boolean> {
const hasAlreadyBeenLoaded: boolean = await getConsistencyState(conversion, chunk);
if (hasAlreadyBeenLoaded) {
// Current data chunk runs after a disaster recovery.
// It has already been loaded.
return false;
}
// Normal migration flow.
await updateConsistencyState(conversion, chunk._id);
return true;
}
/**
* Drops the {conversion._schema + '_' + originalTableName + '_data_chunk_id_temp'} column from current table.
*/
export async function dropDataChunkIdColumn(conversion: Conversion, tableName: string): Promise<void> {
const logTitle: string = 'ConsistencyEnforcer::dropDataChunkIdColumn';
const originalTableName: string = extraConfigProcessor.getTableName(conversion, tableName, true);
const columnToDrop: string = `${ conversion._schema }_${ originalTableName }_data_chunk_id_temp`;
const sql: string = `ALTER TABLE "${ conversion._schema }"."${ tableName }" DROP COLUMN "${ columnToDrop }";`;
const dbAccess: DBAccess = new DBAccess(conversion);
await dbAccess.query(logTitle, sql, DBVendors.PG, false, false);
}

View file

@ -1,134 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const sequencesProcessor = require('./SequencesProcessor');
const dataPoolManager = require('./DataPoolManager');
const runVacuumFullAndAnalyze = require('./VacuumProcessor');
const migrationStateManager = require('./MigrationStateManager');
const generateReport = require('./ReportGenerator');
const processEnum = require('./EnumProcessor');
const processNull = require('./NullProcessor');
const processDefault = require('./DefaultProcessor');
const processIndexAndKey = require('./IndexAndKeyProcessor');
const processComments = require('./CommentsProcessor');
const processForeignKey = require('./ForeignKeyProcessor');
const processViews = require('./ViewGenerator');
const { dropDataChunkIdColumn } = require('./ConsistencyEnforcer');
/**
* Continues migration process after data loading, when migrate_only_data is true.
*
* @param {Conversion} self
*
* @returns {undefined}
*/
const continueProcessAfterDataLoadingShort = self => {
const promises = [];
for (let i = 0; i < self._tablesToMigrate.length; ++i) {
const tableName = self._tablesToMigrate[i];
promises.push(
dropDataChunkIdColumn(self, tableName).then(() => {
return sequencesProcessor.setSequenceValue(self, tableName);
})
);
}
Promise.all(promises).then(() => {
return dataPoolManager.dropDataPoolTable(self);
}).then(() => {
return runVacuumFullAndAnalyze(self);
}).then(() => {
return migrationStateManager.dropStateLogsTable(self);
}).then(
() => generateReport(self, 'NMIG migration is accomplished.')
);
}
/**
* Continues migration process after data loading, when migrate_only_data is false.
*
* @param {Conversion} self
*
* @returns {undefined}
*/
const continueProcessAfterDataLoadingLong = self => {
migrationStateManager.get(self, 'per_table_constraints_loaded').then(isTableConstraintsLoaded => {
const promises = [];
if (!isTableConstraintsLoaded) {
for (let i = 0; i < self._tablesToMigrate.length; ++i) {
const tableName = self._tablesToMigrate[i];
promises.push(
dropDataChunkIdColumn(self, tableName).then(() => {
return processEnum(self, tableName);
}).then(() => {
return processNull(self, tableName);
}).then(() => {
return processDefault(self, tableName);
}).then(() => {
return sequencesProcessor.createSequence(self, tableName);
}).then(() => {
return processIndexAndKey(self, tableName);
}).then(() => {
return processComments(self, tableName);
})
);
}
}
Promise.all(promises).then(() => {
migrationStateManager.set(self, 'per_table_constraints_loaded').then(() => {
return processForeignKey(self);
}).then(() => {
return migrationStateManager.set(self, 'foreign_keys_loaded');
}).then(() => {
return dataPoolManager.dropDataPoolTable(self);
}).then(() => {
return processViews(self);
}).then(() => {
return migrationStateManager.set(self, 'views_loaded');
}).then(() => {
return runVacuumFullAndAnalyze(self);
}).then(() => {
return migrationStateManager.dropStateLogsTable(self);
}).then(
() => generateReport(self, 'NMIG migration is accomplished.')
);
});
});
}
/**
* Continues migration process after data loading.
*
* @param {Conversion} self
*
* @returns {undefined}
*/
module.exports = self => {
if (self._migrateOnlyData) {
continueProcessAfterDataLoadingShort(self);
} else {
continueProcessAfterDataLoadingLong(self);
}
};

View file

@ -0,0 +1,91 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import * as sequencesProcessor from './SequencesProcessor';
import * as dataPoolManager from './DataPoolManager';
import runVacuumFullAndAnalyze from './VacuumProcessor';
import * as migrationStateManager from './MigrationStateManager';
import generateReport from './ReportGenerator';
import processEnum from './EnumProcessor';
import processNull from './NullProcessor';
import processDefault from './DefaultProcessor';
import processIndexAndKey from './IndexAndKeyProcessor';
import processComments from './CommentsProcessor';
import processForeignKey from './ForeignKeyProcessor';
import processViews from './ViewGenerator';
import { dropDataChunkIdColumn } from './ConsistencyEnforcer';
import Conversion from './Conversion';
/**
* Continues migration process after data loading, when migrate_only_data is true.
*/
async function continueProcessAfterDataLoadingShort(conversion: Conversion): Promise<void> {
const promises: Promise<void>[] = conversion._tablesToMigrate.map(async (tableName: string) => {
await dropDataChunkIdColumn(conversion, tableName);
return sequencesProcessor.setSequenceValue(conversion, tableName);
});
await Promise.all(promises);
await dataPoolManager.dropDataPoolTable(conversion);
await runVacuumFullAndAnalyze(conversion);
await migrationStateManager.dropStateLogsTable(conversion);
generateReport(conversion, 'NMIG migration is accomplished.');
}
/**
* Continues migration process after data loading, when migrate_only_data is false.
*/
async function continueProcessAfterDataLoadingLong(conversion: Conversion): Promise<void> {
const isTableConstraintsLoaded: boolean = await migrationStateManager.get(conversion, 'per_table_constraints_loaded');
const promises: Promise<void>[] = conversion._tablesToMigrate.map(async (tableName: string) => {
if (!isTableConstraintsLoaded) {
await dropDataChunkIdColumn(conversion, tableName);
await processEnum(conversion, tableName);
await processNull(conversion, tableName);
await processDefault(conversion, tableName);
await sequencesProcessor.createSequence(conversion, tableName);
await processIndexAndKey(conversion, tableName);
await processComments(conversion, tableName);
}
});
await Promise.all(promises);
await migrationStateManager.set(conversion, 'per_table_constraints_loaded');
await processForeignKey(conversion);
await migrationStateManager.set(conversion, 'foreign_keys_loaded');
await dataPoolManager.dropDataPoolTable(conversion);
await processViews(conversion);
await migrationStateManager.set(conversion, 'views_loaded');
await runVacuumFullAndAnalyze(conversion);
await migrationStateManager.dropStateLogsTable(conversion);
generateReport(conversion, 'NMIG migration is accomplished.');
}
/**
* Continues migration process after data loading.
*/
export default async function(conversion: Conversion): Promise<void> {
if (conversion._migrateOnlyData) {
await continueProcessAfterDataLoadingShort(conversion);
return;
}
await continueProcessAfterDataLoadingLong(conversion);
}

258
src/Conversion.ts Normal file
View file

@ -0,0 +1,258 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software= you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License.
*
* This program is distributed in the hope that it will be useful;
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not; see <http=//www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import * as path from 'path';
import { EventEmitter } from 'events';
import { Pool as MySQLPool } from 'mysql';
import { Pool as PgPool } from 'pg';
export default class Conversion {
/**
* Parsed Nmig's configuration object.
*/
public readonly _config: any;
/**
* An object, representing source (MySQL) db connection details.
*/
public readonly _sourceConString: any;
/**
* An object, representing target (PostgreSQL) db connection details.
*/
public readonly _targetConString: any;
/**
* During migration each table's data will be split into chunks not larger than data_chunk_size (in MB).
*/
public _dataChunkSize: number;
/**
* V8 memory limit of the loader process.
*/
public _loaderMaxOldSpaceSize: number | string;
/**
* Maximal amount of simultaneous connections to your MySQL and PostgreSQL servers.
*/
public readonly _maxDbConnectionPoolSize: number;
/**
* JavaScript encoding type.
*/
public readonly _encoding: string;
/**
* The path to the "all.log" file.
*/
public readonly _allLogsPath: string;
/**
* Default file permissions.
*/
public readonly _0777: string;
/**
* Specifies the character, that separates columns within each record.
*/
public readonly _delimiter: string;
/**
* Defines if only the data should be migrated (into a preset schema).
*/
public readonly _migrateOnlyData: boolean;
/**
* A path to the "logs_directory".
*/
public readonly _logsDirPath: string;
/**
* A path to the data types map.
*/
public readonly _dataTypesMapAddr: string;
/**
* A path to the "errors-only.log" file.
*/
public readonly _errorLogsPath: string;
/**
* A path to the "not_created_views" folder.
*/
public readonly _notCreatedViewsPath: string;
/**
* A list of tables, to which PostgreSQL's VACUUM will not be applied at the end of migration.
*/
public readonly _noVacuum: string[];
/**
* List of tables, that will not be migrated.List (Array) of tables, that will not be migrated.
*/
public readonly _excludeTables: string[];
/**
* The timestamp, at which the migration began.
*/
public readonly _timeBegin: Date;
/**
* Current version of source (MySQL) db.
*/
public _mysqlVersion: string | number;
/**
* Node-MySQL connections pool.
*/
public _mysql?: MySQLPool;
/**
* Node-Postgres connection pool.
*/
public _pg?: PgPool;
/**
* An object, representing additional configuration options.
*/
public readonly _extraConfig: any;
/**
* A list of tables, that should be migrated.
*/
public readonly _tablesToMigrate: string[];
/**
* A list of views, that should be migrated.
*/
public readonly _viewsToMigrate: string[];
/**
* A name of the schema, that will contain all migrated tables.
*/
public readonly _schema: string;
/**
* A name of source (MySQL) db, that should be migrated.
*/
public readonly _mySqlDbName: string;
/**
* A number of already processed data chunks.
*/
public _processedChunks: number;
/**
* A dictionary of table names, and corresponding metadata.
*/
public readonly _dicTables: any;
/**
* An array of data chunks.
*/
public readonly _dataPool: any[];
/**
* A flag, that indicates if Nmig currently runs in test mode.
*/
public _runsInTestMode: boolean;
/**
* A flag, that indicates if test resources created by Nmig should be removed.
*/
public readonly _removeTestResources: boolean;
/**
* "migrationCompleted" event.
*/
public readonly _migrationCompletedEvent: string;
/**
* An EventEmitter instance.
*/
public _eventEmitter: EventEmitter | null;
/**
* The data types map.
*/
public _dataTypesMap: any;
/**
* Constructor.
*/
public constructor(config: any) {
this._config = config;
this._sourceConString = this._config.source;
this._targetConString = this._config.target;
this._logsDirPath = this._config.logsDirPath;
this._dataTypesMapAddr = this._config.dataTypesMapAddr;
this._allLogsPath = path.join(this._logsDirPath, 'all.log');
this._errorLogsPath = path.join(this._logsDirPath, 'errors-only.log');
this._notCreatedViewsPath = path.join(this._logsDirPath, 'not_created_views');
this._noVacuum = this._config.no_vacuum === undefined ? [] : this._config.no_vacuum;
this._excludeTables = this._config.exclude_tables === undefined ? [] : this._config.exclude_tables;
this._timeBegin = new Date();
this._encoding = this._config.encoding === undefined ? 'utf8' : this._config.encoding;
this._dataChunkSize = this._config.data_chunk_size === undefined ? 1 : +this._config.data_chunk_size;
this._dataChunkSize = this._dataChunkSize <= 0 ? 1 : this._dataChunkSize;
this._0777 = '0777';
this._mysqlVersion = '5.6.21'; // Simply a default value.
this._extraConfig = this._config.extraConfig === undefined ? false : this._config.extraConfig;
this._tablesToMigrate = [];
this._viewsToMigrate = [];
this._processedChunks = 0;
this._dataPool = [];
this._dicTables = Object.create(null);
this._mySqlDbName = this._sourceConString.database;
this._schema = this._config.schema === undefined || this._config.schema === ''
? this._mySqlDbName
: this._config.schema;
this._maxDbConnectionPoolSize = this._config.max_db_connection_pool_size !== undefined && Conversion._isIntNumeric(this._config.max_db_connection_pool_size)
? +this._config.max_db_connection_pool_size
: 10;
this._runsInTestMode = false;
this._eventEmitter = null;
this._migrationCompletedEvent = 'migrationCompleted';
this._removeTestResources = this._config.remove_test_resources === undefined ? true : this._config.remove_test_resources;
this._maxDbConnectionPoolSize = this._maxDbConnectionPoolSize > 0 ? this._maxDbConnectionPoolSize : 10;
this._loaderMaxOldSpaceSize = this._config.loader_max_old_space_size;
this._loaderMaxOldSpaceSize = Conversion._isIntNumeric(this._loaderMaxOldSpaceSize) ? this._loaderMaxOldSpaceSize : 'DEFAULT';
this._migrateOnlyData = this._config.migrate_only_data === undefined ? false : this._config.migrate_only_data;
this._delimiter = this._config.delimiter !== undefined && this._config.delimiter.length === 1
? this._config.delimiter
: ',';
}
/**
* Checks if given value is integer number.
*/
private static _isIntNumeric(value: any): boolean {
return !isNaN(parseInt(value)) && isFinite(value);
}
/**
* Initializes Conversion instance.
*/
public static initializeConversion(config: any): Promise<Conversion> {
return Promise.resolve(new Conversion(config));
}
}

216
src/DBAccess.ts Normal file
View file

@ -0,0 +1,216 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import * as mysql from 'mysql';
import { MysqlError, Pool as MySQLPool, PoolConnection } from 'mysql';
import { Pool as PgPool, PoolClient, QueryResult } from 'pg';
import { generateError } from './FsOps';
import Conversion from './Conversion';
import generateReport from './ReportGenerator';
import DBVendors from './DBVendors';
import DBAccessQueryResult from './DBAccessQueryResult';
export default class DBAccess {
/**
* Conversion instance.
*/
private readonly _conversion: Conversion;
/**
* DBAccess constructor.
*/
public constructor(conversion: Conversion) {
this._conversion = conversion;
}
/**
* Ensures MySQL connection pool existence.
*/
private async _getMysqlConnection(): Promise<void> {
if (!this._conversion._mysql) {
this._conversion._sourceConString.connectionLimit = this._conversion._maxDbConnectionPoolSize;
this._conversion._sourceConString.multipleStatements = true;
const pool: MySQLPool = mysql.createPool(this._conversion._sourceConString);
if (!pool) {
await generateError(this._conversion, '\t--[getMysqlConnection] Cannot connect to MySQL server...');
process.exit();
}
this._conversion._mysql = pool;
}
}
/**
* Ensures PostgreSQL connection pool existence.
*/
private async _getPgConnection(): Promise<void> {
if (!this._conversion._pg) {
this._conversion._targetConString.max = this._conversion._maxDbConnectionPoolSize;
const pool: PgPool = new PgPool(this._conversion._targetConString);
if (!pool) {
await generateError(this._conversion, '\t--[getPgConnection] Cannot connect to PostgreSQL server...');
process.exit();
}
this._conversion._pg = pool;
this._conversion._pg.on('error', async (error: Error) => {
const message: string = `Cannot connect to PostgreSQL server...\n' ${ error.message }\n${ error.stack }`;
await generateError(this._conversion, message);
generateReport(this._conversion, message);
});
}
}
/**
* Obtains PoolConnection instance.
*/
public getMysqlClient(): Promise<PoolConnection> {
return new Promise<PoolConnection>(async (resolve, reject) => {
await this._getMysqlConnection();
(<MySQLPool>this._conversion._mysql).getConnection((err: MysqlError | null, connection: PoolConnection) => {
return err ? reject(err) : resolve(connection);
});
});
}
/**
* Obtains PoolClient instance.
*/
public async getPgClient(): Promise<PoolClient> {
await this._getPgConnection();
return (<PgPool>this._conversion._pg).connect();
}
/**
* Runs a query on the first available idle client and returns its result.
* Note, the pool does the acquiring and releasing of the client internally.
*/
public async runPgPoolQuery(sql: string): Promise<QueryResult> {
await this._getPgConnection();
return (<PgPool>this._conversion._pg).query(sql);
}
/**
* Releases MySQL or PostgreSQL connection back to appropriate pool.
*/
public async releaseDbClient(dbClient?: PoolConnection | PoolClient): Promise<void> {
try {
(<PoolConnection | PoolClient>dbClient).release();
dbClient = undefined;
} catch (error) {
await generateError(this._conversion, `\t--[DBAccess::releaseDbClient] ${ error }`);
}
}
/**
* Checks if there are no more queries to be sent using current client.
* In such case the client should be released.
*/
private async _releaseDbClientIfNecessary(client: PoolConnection | PoolClient, shouldHoldClient: boolean): Promise<void> {
if (!shouldHoldClient) {
await this.releaseDbClient(client);
}
}
/**
* Sends given SQL query to specified DB.
* Performs appropriate actions (requesting/releasing client) against target connections pool.
*/
public async query(
caller: string,
sql: string,
vendor: DBVendors,
processExitOnError: boolean,
shouldReturnClient: boolean,
client?: PoolConnection | PoolClient,
bindings?: any[]
): Promise<DBAccessQueryResult> {
// Checks if there is an available client.
if (!client) {
try {
// Client is undefined.
// It must be requested from the connections pool.
client = vendor === DBVendors.PG ? await this.getPgClient() : await this.getMysqlClient();
} catch (error) {
// An error occurred when tried to obtain a client from one of pools.
await generateError(this._conversion, `\t--[${ caller }] ${ error }`, sql);
return processExitOnError ? process.exit() : { client: client, data: undefined, error: error };
}
}
return vendor === DBVendors.PG
? this._queryPG(caller, sql, processExitOnError, shouldReturnClient, (<PoolClient>client), bindings)
: this._queryMySQL(caller, sql, processExitOnError, shouldReturnClient, (<PoolConnection>client), bindings);
}
/**
* Sends given SQL query to MySQL.
*/
private _queryMySQL(
caller: string,
sql: string,
processExitOnError: boolean,
shouldReturnClient: boolean,
client?: PoolConnection,
bindings?: any[]
): Promise<DBAccessQueryResult> {
return new Promise<DBAccessQueryResult>((resolve, reject) => {
if (Array.isArray(bindings)) {
sql = (<PoolConnection>client).format(sql, bindings);
}
(<PoolConnection>client).query(sql, async (error: MysqlError | null, data: any) => {
await this._releaseDbClientIfNecessary((<PoolConnection>client), shouldReturnClient);
if (error) {
await generateError(this._conversion, `\t--[${ caller }] ${ error }`, sql);
return processExitOnError ? process.exit() : reject({ client: client, data: undefined, error: error });
}
return resolve({ client: client, data: data, error: undefined });
});
});
}
/**
* Sends given SQL query to PostgreSQL.
*/
private async _queryPG(
caller: string,
sql: string,
processExitOnError: boolean,
shouldReturnClient: boolean,
client?: PoolClient,
bindings?: any[]
): Promise<DBAccessQueryResult> {
try {
const data: any = Array.isArray(bindings) ? await (<PoolClient>client).query(sql, bindings) : await (<PoolClient>client).query(sql);
await this._releaseDbClientIfNecessary((<PoolClient>client), shouldReturnClient); // Sets the client undefined.
return { client: client, data: data, error: undefined };
} catch (error) {
await this._releaseDbClientIfNecessary((<PoolClient>client), shouldReturnClient); // Sets the client undefined.
await generateError(this._conversion, `\t--[${ caller }] ${ error }`, sql);
return processExitOnError ? process.exit() : { client: client, data: undefined, error: error };
}
}
}

View file

@ -18,29 +18,25 @@
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
import { PoolClient } from 'pg';
import { PoolConnection } from 'mysql';
const fs = require('fs');
export default interface DBAccessQueryResult {
/**
* MySQL's or PostgreSQL's client instance.
* The client may be undefined.
*/
client?: PoolConnection | PoolClient;
/**
* Reads "./config/data_types_map.json" and converts its json content to js object.
* Appends this object to "FromMySQL2PostgreSQL" instance.
*
* @param {Conversion} self
*
* @returns {Promise}
*/
module.exports = self => {
return new Promise(resolve => {
fs.readFile(self._dataTypesMapAddr, (error, data) => {
if (error) {
console.log('\t--[readDataTypesMap] Cannot read "DataTypesMap" from ' + self._dataTypesMapAddr);
process.exit();
}
/**
* Query result.
* The data may be undefined.
*/
data?: any;
self._dataTypesMap = JSON.parse(data);
console.log('\t--[readDataTypesMap] Data Types Map is loaded...');
resolve(self);
});
});
};
/**
* Query error.
* The data may be undefined.
*/
error?: any;
}

View file

@ -18,18 +18,9 @@
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
enum DBVendors {
MYSQL,
PG,
}
module.exports = class Table {
/**
* This function represents table related metadata.
* Constructor.
*
* @param {String} tableLogPath
*/
constructor(tableLogPath) {
this.tableLogPath = tableLogPath;
this.arrTableColumns = [];
this.totalRowsInserted = 0;
}
};
export default DBVendors;

View file

@ -1,148 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const connect = require('./Connector');
const log = require('./Logger');
const generateError = require('./ErrorGenerator');
const arrangeColumnsData = require('./ColumnsDataArranger');
const extraConfigProcessor = require('./ExtraConfigProcessor');
/**
* Prepares an array of tables and chunk offsets.
*
* @param {Conversion} self
* @param {String} tableName
* @param {Boolean} haveDataChunksProcessed
*
* @returns {Promise}
*/
module.exports = (self, tableName, haveDataChunksProcessed) => {
return connect(self).then(() => {
return new Promise(resolve => {
if (haveDataChunksProcessed) {
return resolve();
}
self._mysql.getConnection((error, connection) => {
if (error) {
// The connection is undefined.
generateError(self, '\t--[prepareDataChunks] Cannot connect to MySQL server...\n\t' + error);
resolve();
} else {
// Determine current table size, apply "chunking".
const originalTableName = extraConfigProcessor.getTableName(self, tableName, true);
let sql = "SELECT (data_length / 1024 / 1024) AS size_in_mb "
+ "FROM information_schema.tables "
+ "WHERE table_schema = '" + self._mySqlDbName + "' "
+ "AND table_name = '" + originalTableName + "';";
connection.query(sql, (err, rows) => {
if (err) {
connection.release();
generateError(self, '\t--[prepareDataChunks] ' + err, sql);
resolve();
} else {
const tableSizeInMb = +rows[0].size_in_mb;
rows = null;
sql = 'SELECT COUNT(1) AS rows_count FROM `' + originalTableName + '`;';
const strSelectFieldList = arrangeColumnsData(
self._dicTables[tableName].arrTableColumns,
self._mysqlVersion
);
connection.query(sql, (err2, rows2) => {
connection.release();
if (err2) {
generateError(self, '\t--[prepareDataChunks] ' + err2, sql);
resolve();
} else {
const rowsCnt = rows2[0].rows_count;
rows2 = null;
let chunksCnt = tableSizeInMb / self._dataChunkSize;
chunksCnt = chunksCnt < 1 ? 1 : chunksCnt;
const rowsInChunk = Math.ceil(rowsCnt / chunksCnt);
const arrDataPoolPromises = [];
const msg = '\t--[prepareDataChunks] Total rows to insert into '
+ '"' + self._schema + '"."' + tableName + '": ' + rowsCnt;
log(self, msg, self._dicTables[tableName].tableLogPath);
for (let offset = 0; offset < rowsCnt; offset += rowsInChunk) {
arrDataPoolPromises.push(new Promise(resolveDataUnit => {
self._pg.connect((error, client, done) => {
if (error) {
generateError(self, '\t--[prepareDataChunks] Cannot connect to PostgreSQL server...\n' + error);
resolveDataUnit();
} else {
const strJson = '{"_tableName":"' + tableName
+ '","_selectFieldList":"' + strSelectFieldList + '",'
+ '"_offset":' + offset + ','
+ '"_rowsInChunk":' + rowsInChunk + ','
+ '"_rowsCnt":' + rowsCnt + '}';
/*
* Define current data chunk size in MB.
* If there is only one chunk, then its size is equal to the table size.
* If there are more than one chunk,
* then a size of each chunk besides the last one is equal to "data_chunk_size",
* and a size of the last chunk is either "data_chunk_size" or tableSizeInMb % chunksCnt.
*/
let currentChunkSizeInMb = 0;
if (chunksCnt === 1) {
currentChunkSizeInMb = tableSizeInMb;
} else if (offset + rowsInChunk >= rowsCnt) {
currentChunkSizeInMb = tableSizeInMb % chunksCnt;
currentChunkSizeInMb = currentChunkSizeInMb || self._dataChunkSize;
} else {
currentChunkSizeInMb = self._dataChunkSize;
}
sql = 'INSERT INTO "' + self._schema + '"."data_pool_' + self._schema
+ self._mySqlDbName + '"("is_started", "json", "size_in_mb")'
+ ' VALUES(FALSE, $1, $2);';
client.query(sql, [strJson, currentChunkSizeInMb], err => {
done();
if (err) {
generateError(self, '\t--[prepareDataChunks] INSERT failed...\n' + err, sql);
}
resolveDataUnit();
});
}
});
}));
}
Promise.all(arrDataPoolPromises).then(() => resolve());
}
});
}
});
}
});
});
});
};

105
src/DataChunksProcessor.ts Normal file
View file

@ -0,0 +1,105 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import { log } from './FsOps';
import arrangeColumnsData from './ColumnsDataArranger';
import * as extraConfigProcessor from './ExtraConfigProcessor';
import Conversion from './Conversion';
import DBAccess from './DBAccess';
import DBAccessQueryResult from './DBAccessQueryResult';
import DBVendors from './DBVendors';
/**
* Prepares an array of tables and chunk offsets.
*/
export default async (conversion: Conversion, tableName: string, haveDataChunksProcessed: boolean): Promise<void> => {
if (haveDataChunksProcessed) {
return;
}
// Determine current table size, apply "chunking".
const originalTableName: string = extraConfigProcessor.getTableName(conversion, tableName, true);
let sql: string = `SELECT (data_length / 1024 / 1024) AS size_in_mb FROM information_schema.tables
WHERE table_schema = '${ conversion._mySqlDbName }' AND table_name = '${ originalTableName }';`;
const logTitle: string = 'DataChunksProcessor::default';
const dbAccess: DBAccess = new DBAccess(conversion);
const sizeQueryResult: DBAccessQueryResult = await dbAccess.query(logTitle, sql, DBVendors.MYSQL, true, true);
const tableSizeInMb: number = +sizeQueryResult.data[0].size_in_mb;
const strSelectFieldList: string = arrangeColumnsData(conversion._dicTables[tableName].arrTableColumns, conversion._mysqlVersion);
sql = `SELECT COUNT(1) AS rows_count FROM \`${ originalTableName }\`;`;
const countResult: DBAccessQueryResult = await dbAccess.query(
logTitle,
sql,
DBVendors.MYSQL,
true,
false,
sizeQueryResult.client
);
const rowsCnt: number = countResult.data[0].rows_count;
let chunksCnt: number = tableSizeInMb / conversion._dataChunkSize;
chunksCnt = chunksCnt < 1 ? 1 : chunksCnt;
const rowsInChunk: number = Math.ceil(rowsCnt / chunksCnt);
const arrDataPoolPromises: Promise<void>[] = [];
const msg: string = `\t--[prepareDataChunks] Total rows to insert into "${ conversion._schema }"."${ tableName }": ${ rowsCnt }`;
log(conversion, msg, conversion._dicTables[tableName].tableLogPath);
for (let offset: number = 0; offset < rowsCnt; offset += rowsInChunk) {
arrDataPoolPromises.push(new Promise<void>(async resolveDataUnit => {
const strJson: string = `{"_tableName":"${ tableName }","_selectFieldList":"${ strSelectFieldList }",
"_offset":${ offset },"_rowsInChunk":${ rowsInChunk },"_rowsCnt":${ rowsCnt }}`;
// Define current data chunk size in MB.
// If there is only one chunk, then its size is equal to the table size.
// If there are more than one chunk,
// then a size of each chunk besides the last one is equal to "data_chunk_size",
// and a size of the last chunk is either "data_chunk_size" or tableSizeInMb % chunksCnt.
let currentChunkSizeInMb: number = 0;
if (chunksCnt === 1) {
currentChunkSizeInMb = tableSizeInMb;
} else if (offset + rowsInChunk >= rowsCnt) {
currentChunkSizeInMb = tableSizeInMb % chunksCnt;
currentChunkSizeInMb = currentChunkSizeInMb || conversion._dataChunkSize;
} else {
currentChunkSizeInMb = conversion._dataChunkSize;
}
sql = `INSERT INTO "${ conversion._schema }"."data_pool_${ conversion._schema }${ conversion._mySqlDbName }"
("is_started", "json", "size_in_mb") VALUES (FALSE, $1, $2);`;
await dbAccess.query(
logTitle,
sql,
DBVendors.PG,
false,
false,
undefined,
[strJson, currentChunkSizeInMb]
);
resolveDataUnit();
}));
}
await Promise.all(arrDataPoolPromises);
}

View file

@ -1,253 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const path = require('path');
const { from } = require('pg-copy-streams');
const csvStringify = require('./CsvStringifyModified');
const log = require('./Logger');
const generateError = require('./ErrorGenerator');
const connect = require('./Connector');
const Conversion = require('./Classes/Conversion');
const MessageToMaster = require('./Classes/MessageToMaster');
const { enforceConsistency } = require('./ConsistencyEnforcer');
const extraConfigProcessor = require('./ExtraConfigProcessor');
const BufferStream = require('./Classes/BufferStream');
process.on('message', signal => {
const self = new Conversion(signal.config);
const promises = [];
log(self, '\t--[loadData] Loading the data...');
for (let i = 0; i < signal.chunks.length; ++i) {
promises.push(
connect(self).then(() => {
return enforceConsistency(self, signal.chunks[i]);
}).then(isNormalFlow => {
if (isNormalFlow) {
return populateTableWorker(
self,
signal.chunks[i]._tableName,
signal.chunks[i]._selectFieldList,
signal.chunks[i]._offset,
signal.chunks[i]._rowsInChunk,
signal.chunks[i]._rowsCnt,
signal.chunks[i]._id
);
}
return deleteChunk(self, signal.chunks[i]._id);
})
);
}
Promise.all(promises).then(() => process.send('processed'));
});
/**
* Delete given record from the data-pool.
*
* @param {Conversion} self
* @param {Number} dataPoolId
* @param {Node-pg client|undefined} client
* @param {Function|undefined} done
*
* @returns {Promise}
*/
const deleteChunk = (self, dataPoolId, client, done) => {
return new Promise(resolve => {
if (client) {
const sql = 'DELETE FROM "' + self._schema + '"."data_pool_' + self._schema + self._mySqlDbName
+ '" ' + 'WHERE id = ' + dataPoolId + ';';
client.query(sql, err => {
done();
if (err) {
generateError(self, '\t--[deleteChunk] ' + err, sql);
}
resolve();
});
} else {
self._pg.connect((error, client, done) => {
if (error) {
generateError(self, '\t--[deleteChunk] Cannot connect to PostgreSQL server...\n' + error);
resolve();
} else {
const sql = 'DELETE FROM "' + self._schema + '"."data_pool_' + self._schema + self._mySqlDbName
+ '" ' + 'WHERE id = ' + dataPoolId + ';';
client.query(sql, err => {
done();
if (err) {
generateError(self, '\t--[deleteChunk] ' + err, sql);
}
resolve();
});
}
});
}
});
};
/**
* Build a MySQL query to retrieve the chunk of data.
*
* @param {String} tableName
* @param {String} strSelectFieldList
* @param {Number} offset
* @param {Number} rowsInChunk
*
* @returns {String}
*/
const buildChunkQuery = (tableName, strSelectFieldList, offset, rowsInChunk) => {
return 'SELECT ' + strSelectFieldList + ' FROM `' + tableName + '` LIMIT ' + offset + ',' + rowsInChunk + ';';
};
/**
* Process data-loading error.
*
* @param {Conversion} self
* @param {String} streamError
* @param {String} sql
* @param {String} sqlCopy
* @param {String} tableName
* @param {Number} dataPoolId
* @param {Node-pg client|undefined} client
* @param {Function|undefined} done
* @param {Function} callback
*
* @returns {undefined}
*/
const processDataError = (self, streamError, sql, sqlCopy, tableName, dataPoolId, client, done, callback) => {
generateError(self, '\t--[populateTableWorker] ' + streamError, sqlCopy);
const rejectedData = '\t--[populateTableWorker] Error loading table data:\n' + sql + '\n';
log(self, rejectedData, path.join(self._logsDirPath, tableName + '.log'));
deleteChunk(self, dataPoolId, client, done).then(() => callback());
};
/**
* Load a chunk of data using "PostgreSQL COPY".
*
* @param {Conversion} self
* @param {String} tableName
* @param {String} strSelectFieldList
* @param {Number} offset
* @param {Number} rowsInChunk
* @param {Number} rowsCnt
* @param {Number} dataPoolId
*
* @returns {Promise}
*/
const populateTableWorker = (self, tableName, strSelectFieldList, offset, rowsInChunk, rowsCnt, dataPoolId) => {
return new Promise(resolvePopulateTableWorker => {
self._mysql.getConnection((error, connection) => {
if (error) {
// The connection is undefined.
generateError(self, '\t--[populateTableWorker] Cannot connect to MySQL server...\n\t' + error);
resolvePopulateTableWorker();
} else {
const originalTableName = extraConfigProcessor.getTableName(self, tableName, true);
const sql = buildChunkQuery(originalTableName, strSelectFieldList, offset, rowsInChunk);
connection.query(sql, (err, rows) => {
connection.release();
if (err) {
generateError(self, '\t--[populateTableWorker] ' + err, sql);
resolvePopulateTableWorker();
} else {
rowsInChunk = rows.length;
rows[0][self._schema + '_' + originalTableName + '_data_chunk_id_temp'] = dataPoolId;
csvStringify(rows, (csvError, csvString) => {
rows = null;
if (csvError) {
generateError(self, '\t--[populateTableWorker] ' + csvError);
resolvePopulateTableWorker();
} else {
const buffer = Buffer.from(csvString, self._encoding);
csvString = null;
self._pg.connect((error, client, done) => {
if (error) {
generateError(self, '\t--[populateTableWorker] Cannot connect to PostgreSQL server...\n' + error, sql);
resolvePopulateTableWorker();
} else {
const sqlCopy = 'COPY "' + self._schema + '"."' + tableName + '" FROM STDIN DELIMITER \'' + self._delimiter + '\' CSV;';
const copyStream = client.query(from(sqlCopy));
const bufferStream = new BufferStream(buffer);
copyStream.on('end', () => {
/*
* COPY FROM STDIN does not return the number of rows inserted.
* But the transactional behavior still applies (no records inserted if at least one failed).
* That is why in case of 'on end' the rowsInChunk value is actually the number of records inserted.
*/
process.send(new MessageToMaster(tableName, rowsInChunk, rowsCnt));
deleteChunk(self, dataPoolId, client, done).then(() => resolvePopulateTableWorker());
});
copyStream.on('error', copyStreamError => {
processDataError(
self,
copyStreamError,
sql,
sqlCopy,
tableName,
dataPoolId,
client,
done,
resolvePopulateTableWorker
);
});
bufferStream.on('error', bufferStreamError => {
processDataError(
self,
bufferStreamError,
sql,
sqlCopy,
tableName,
dataPoolId,
client,
done,
resolvePopulateTableWorker
);
});
bufferStream
.setEncoding(self._encoding)
.pipe(copyStream);
}
});
}
}, self._encoding);
}
});
}
});
});
};

166
src/DataLoader.ts Normal file
View file

@ -0,0 +1,166 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import * as csvStringify from './CsvStringifyModified';
import { log, generateError } from './FsOps';
import Conversion from './Conversion';
import DBAccess from './DBAccess';
import DBAccessQueryResult from './DBAccessQueryResult';
import DBVendors from './DBVendors';
import MessageToMaster from './MessageToMaster';
import { enforceConsistency } from './ConsistencyEnforcer';
import * as extraConfigProcessor from './ExtraConfigProcessor';
import BufferStream from './BufferStream';
import * as path from 'path';
import { PoolClient } from 'pg';
const { from } = require('pg-copy-streams'); // No declaration file for module "pg-copy-streams".
process.on('message', async (signal: any) => {
const conv: Conversion = new Conversion(signal.config);
log(conv, '\t--[loadData] Loading the data...');
const promises: Promise<void>[] = signal.chunks.map(async (chunk: any) => {
const isNormalFlow: boolean = await enforceConsistency(conv, chunk);
if (isNormalFlow) {
return populateTableWorker(conv, chunk._tableName, chunk._selectFieldList, chunk._offset, chunk._rowsInChunk, chunk._rowsCnt, chunk._id);
}
const dbAccess: DBAccess = new DBAccess(conv);
const client: PoolClient = await dbAccess.getPgClient();
return deleteChunk(conv, chunk._id, client);
});
await Promise.all(promises);
processSend('processed');
});
/**
* Wraps "process.send" method to avoid "cannot invoke an object which is possibly undefined" warning.
*/
function processSend(x: any): void {
if (process.send) {
process.send(x);
}
}
/**
* Deletes given record from the data-pool.
*/
async function deleteChunk(conv: Conversion, dataPoolId: number, client: PoolClient): Promise<void> {
const sql: string = `DELETE FROM "${ conv._schema }"."data_pool_${ conv._schema }${ conv._mySqlDbName }" WHERE id = ${ dataPoolId };`;
const dbAccess: DBAccess = new DBAccess(conv);
try {
await client.query(sql);
} catch (error) {
await generateError(conv, `\t--[DataLoader::deleteChunk] ${ error }`, sql);
} finally {
dbAccess.releaseDbClient(client);
}
}
/**
* Builds a MySQL query to retrieve the chunk of data.
*/
function buildChunkQuery(tableName: string, selectFieldList: string, offset: number, rowsInChunk: number): string {
return `SELECT ${ selectFieldList } FROM \`${ tableName }\` LIMIT ${ offset },${ rowsInChunk };`;
}
/**
* Processes data-loading error.
*/
async function processDataError(
conv: Conversion,
streamError: string,
sql: string,
sqlCopy: string,
tableName: string,
dataPoolId: number,
client: PoolClient
): Promise<void> {
await generateError(conv, `\t--[populateTableWorker] ${ streamError }`, sqlCopy);
const rejectedData: string = `\t--[populateTableWorker] Error loading table data:\n${ sql }\n`;
log(conv, rejectedData, path.join(conv._logsDirPath, `${ tableName }.log`));
return deleteChunk(conv, dataPoolId, client);
}
/**
* Loads a chunk of data using "PostgreSQL COPY".
*/
async function populateTableWorker(
conv: Conversion,
tableName: string,
strSelectFieldList: string,
offset: number,
rowsInChunk: number,
rowsCnt: number,
dataPoolId: number
): Promise<void> {
return new Promise<void>(async resolvePopulateTableWorker => {
const originalTableName: string = extraConfigProcessor.getTableName(conv, tableName, true);
const sql: string = buildChunkQuery(originalTableName, strSelectFieldList, offset, rowsInChunk);
const dbAccess: DBAccess = new DBAccess(conv);
const logTitle: string = 'DataLoader::populateTableWorker';
const result: DBAccessQueryResult = await dbAccess.query(logTitle, sql, DBVendors.MYSQL, false, false);
if (result.error) {
return resolvePopulateTableWorker();
}
rowsInChunk = result.data.length;
result.data[0][`${ conv._schema }_${ originalTableName }_data_chunk_id_temp`] = dataPoolId;
csvStringify(result.data, async (csvError: any, csvString: string) => {
if (csvError) {
await generateError(conv, `\t--[${ logTitle }] ${ csvError }`);
return resolvePopulateTableWorker();
}
const buffer: Buffer = Buffer.from(csvString, conv._encoding);
const sqlCopy: string = `COPY "${ conv._schema }"."${ tableName }" FROM STDIN DELIMITER '${ conv._delimiter }' CSV;`;
const client: PoolClient = await dbAccess.getPgClient();
const copyStream: any = client.query(from(sqlCopy));
const bufferStream: BufferStream = new BufferStream(buffer);
copyStream.on('end', () => {
/*
* COPY FROM STDIN does not return the number of rows inserted.
* But the transactional behavior still applies (no records inserted if at least one failed).
* That is why in case of 'on end' the rowsInChunk value is actually the number of records inserted.
*/
processSend(new MessageToMaster(tableName, rowsInChunk, rowsCnt));
return deleteChunk(conv, dataPoolId, client).then(() => resolvePopulateTableWorker());
});
copyStream.on('error', (copyStreamError: string) => {
return processDataError(conv, copyStreamError, sql, sqlCopy, tableName, dataPoolId, client)
.then(() => resolvePopulateTableWorker());
});
bufferStream.on('error', (bufferStreamError: string) => {
return processDataError(conv, bufferStreamError, sql, sqlCopy, tableName, dataPoolId, client)
.then(() => resolvePopulateTableWorker());
});
bufferStream.setEncoding(conv._encoding).pipe(copyStream);
}, conv._encoding);
});
}

View file

@ -1,191 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const childProcess = require('child_process');
const path = require('path');
const log = require('./Logger');
const generateError = require('./ErrorGenerator');
const MessageToDataLoader = require('./Classes/MessageToDataLoader');
const processConstraints = require('./ConstraintsProcessor');
const decodeBinaryData = require('./BinaryDataDecoder');
/**
* Kill a process specified by the pid.
*
* @param {Number} pid
*
* @returns {undefined}
*/
const killProcess = pid => {
try {
process.kill(pid);
} catch (killError) {
generateError(self, '\t--[killProcess] ' + killError);
}
};
/**
* Check if all data chunks were processed.
*
* @param {Conversion} self
*
* @returns {Boolean}
*/
const dataPoolProcessed = self => {
return self._processedChunks === self._dataPool.length;
};
/**
* Get a size (in MB) of the smallest, non processed data chunk.
* If all data chunks are processed then return 0.
*
* @param {Conversion} self
*
* @returns {Number}
*/
const getSmallestDataChunkSizeInMb = self => {
for (let i = self._dataPool.length - 1; i >= 0; --i) {
if (self._dataPool[i]._processed === false) {
return self._dataPool[i]._size_in_mb;
}
}
return 0;
};
/**
* Create an array of indexes, that point to data chunks, that will be processed during current COPY operation.
*
* @param {Conversion} self
*
* @returns {Array}
*/
const fillBandwidth = self => {
const dataChunkIndexes = [];
/*
* Loop through the data pool from the beginning to the end.
* Note, the data pool is created with predefined order, the order by data chunk size descending.
* Note, the "bandwidth" variable represents an actual amount of data, that will be loaded during current COPY operation.
*/
for (let i = 0, bandwidth = 0; i < self._dataPool.length; ++i) {
/*
* Check if current chunk has already been marked as "processed".
* If yes, then continue to the next iteration.
*/
if (self._dataPool[i]._processed === false) {
// Sum a size of data chunks, that are yet to be processed.
bandwidth += self._dataPool[i]._size_in_mb;
if (self._dataChunkSize - bandwidth >= getSmallestDataChunkSizeInMb(self)) {
/*
* Currently, the bandwidth is smaller than "data_chunk_size",
* and the difference between "data_chunk_size" and the bandwidth
* is larger or equal to currently-smallest data chunk.
* This means, that more data chunks can be processed during current COPY operation.
*/
dataChunkIndexes.push(i);
self._dataPool[i]._processed = true;
continue;
}
if (self._dataChunkSize >= bandwidth) {
/*
* Currently, the "data_chunk_size" is greater or equal to the bandwidth.
* This means, that no more data chunks can be processed during current COPY operation.
* Current COPY operation will be performed with maximal possible bandwidth capacity.
*/
dataChunkIndexes.push(i);
self._dataPool[i]._processed = true;
break;
}
/*
* This data chunk will not be processed during current COPY operation, because when it is added
* to the bandwidth, the bandwidth's size may become larger than "data_chunk_size".
* The bandwidth's value should be decreased prior the next iteration.
*/
bandwidth -= self._dataPool[i]._size_in_mb;
}
}
return dataChunkIndexes;
};
/**
* Instructs DataLoader which data chunks should be loaded.
* No need to check the state-log.
* If dataPool's length is zero, then nmig will proceed to the next step.
*
* @param {Conversion} self
* @param {String} strDataLoaderPath
* @param {Object} options
*
* @returns {undefined}
*/
const pipeData = (self, strDataLoaderPath, options) => {
if (dataPoolProcessed(self)) {
return decodeBinaryData(self).then(processConstraints);
}
const loaderProcess = childProcess.fork(strDataLoaderPath, options);
const bandwidth = fillBandwidth(self);
const chunksToLoad = bandwidth.map(index => {
return self._dataPool[index];
});
loaderProcess.on('message', signal => {
if (typeof signal === 'object') {
self._dicTables[signal.tableName].totalRowsInserted += signal.rowsInserted;
const msg = '\t--[pipeData] For now inserted: ' + self._dicTables[signal.tableName].totalRowsInserted + ' rows, '
+ 'Total rows to insert into "' + self._schema + '"."' + signal.tableName + '": ' + signal.totalRowsToInsert;
log(self, msg);
} else {
killProcess(loaderProcess.pid);
self._processedChunks += chunksToLoad.length;
return pipeData(self, strDataLoaderPath, options);
}
});
loaderProcess.send(new MessageToDataLoader(self._config, chunksToLoad));
};
/**
* Manage the DataPipe.
*
* @param {Conversion} self
*
* @returns {undefined}
*/
module.exports = self => {
if (dataPoolProcessed(self)) {
return decodeBinaryData(self).then(processConstraints);
}
const strDataLoaderPath = path.join(__dirname, 'DataLoader.js');
const options = self._loaderMaxOldSpaceSize === 'DEFAULT'
? Object.create(null)
: { execArgv: ['--max-old-space-size=' + self._loaderMaxOldSpaceSize] };
return pipeData(self, strDataLoaderPath, options);
};

156
src/DataPipeManager.ts Normal file
View file

@ -0,0 +1,156 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import { ChildProcess, fork } from 'child_process';
import * as path from 'path';
import { log, generateError } from './FsOps';
import Conversion from './Conversion';
import MessageToDataLoader from './MessageToDataLoader';
import processConstraints from './ConstraintsProcessor';
import decodeBinaryData from './BinaryDataDecoder';
/**
* Kills a process specified by the pid.
*/
async function killProcess(pid: number, conversion: Conversion): Promise<void> {
try {
process.kill(pid);
} catch (killError) {
await generateError(conversion, `\t--[killProcess] ${ killError }`);
}
}
/**
* Checks if all data chunks were processed.
*/
function dataPoolProcessed(conversion: Conversion): boolean {
return conversion._processedChunks === conversion._dataPool.length;
}
/**
* Gets a size (in MB) of the smallest, non processed data chunk.
* If all data chunks are processed then returns 0.
*/
function getSmallestDataChunkSizeInMb(conversion: Conversion): number {
for (let i: number = conversion._dataPool.length - 1; i >= 0; --i) {
if (conversion._dataPool[i]._processed === false) {
return conversion._dataPool[i]._size_in_mb;
}
}
return 0;
}
/**
* Creates an array of indexes, that point to data chunks, that will be processed during current COPY operation.
*/
function fillBandwidth(conversion: Conversion): number[] {
const dataChunkIndexes: number[] = [];
// Loop through the data pool from the beginning to the end.
// Note, the data pool is created with predefined order, the order by data chunk size descending.
// Note, the "bandwidth" variable represents an actual amount of data, that will be loaded during current COPY operation.
for (let i: number = 0, bandwidth = 0; i < conversion._dataPool.length; ++i) {
// Check if current chunk has already been marked as "processed".
// If yes, then continue to the next iteration.
if (conversion._dataPool[i]._processed === false) {
// Sum a size of data chunks, that are yet to be processed.
bandwidth += conversion._dataPool[i]._size_in_mb;
if (conversion._dataChunkSize - bandwidth >= getSmallestDataChunkSizeInMb(conversion)) {
// Currently, the bandwidth is smaller than "data_chunk_size",
// and the difference between "data_chunk_size" and the bandwidth
// is larger or equal to currently-smallest data chunk.
// This means, that more data chunks can be processed during current COPY operation.
dataChunkIndexes.push(i);
conversion._dataPool[i]._processed = true;
continue;
}
if (conversion._dataChunkSize >= bandwidth) {
// Currently, the "data_chunk_size" is greater or equal to the bandwidth.
// This means, that no more data chunks can be processed during current COPY operation.
// Current COPY operation will be performed with maximal possible bandwidth capacity.
dataChunkIndexes.push(i);
conversion._dataPool[i]._processed = true;
break;
}
// This data chunk will not be processed during current COPY operation, because when it is added
// to the bandwidth, the bandwidth's size may become larger than "data_chunk_size".
// The bandwidth's value should be decreased prior the next iteration.
bandwidth -= conversion._dataPool[i]._size_in_mb;
}
}
return dataChunkIndexes;
}
/**
* Instructs DataLoader which data chunks should be loaded.
* No need to check the state-log.
* If dataPool's length is zero, then nmig will proceed to the next step.
*/
async function pipeData(conversion: Conversion, dataLoaderPath: string, options: any): Promise<void> {
if (dataPoolProcessed(conversion)) {
conversion = await decodeBinaryData(conversion);
return processConstraints(conversion);
}
const loaderProcess: ChildProcess = fork(dataLoaderPath, options);
const bandwidth: number[] = fillBandwidth(conversion);
const chunksToLoad: any[] = bandwidth.map((index: number) => conversion._dataPool[index]);
loaderProcess.on('message', async (signal: any) => {
if (typeof signal === 'object') {
conversion._dicTables[signal.tableName].totalRowsInserted += signal.rowsInserted;
const msg: string = `\t--[pipeData] For now inserted: ${ conversion._dicTables[signal.tableName].totalRowsInserted } rows,
Total rows to insert into "${ conversion._schema }"."${ signal.tableName }": ${ signal.totalRowsToInsert }`;
log(conversion, msg);
return;
}
await killProcess(loaderProcess.pid, conversion);
conversion._processedChunks += chunksToLoad.length;
return pipeData(conversion, dataLoaderPath, options);
});
loaderProcess.send(new MessageToDataLoader(conversion._config, chunksToLoad));
}
/**
* Manages the DataPipe.
*/
export default async function(conversion: Conversion): Promise<void> {
if (dataPoolProcessed(conversion)) {
conversion = await decodeBinaryData(conversion);
return processConstraints(conversion);
}
// In runtime it points to ../dist/src/DataLoader.js and not DataLoader.ts
const dataLoaderPath: string = path.join(__dirname, 'DataLoader.js');
const options: any = conversion._loaderMaxOldSpaceSize === 'DEFAULT'
? Object.create(null)
: { execArgv: [`--max-old-space-size=${ conversion._loaderMaxOldSpaceSize }`] };
return pipeData(conversion, dataLoaderPath, options);
}

View file

@ -1,138 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const connect = require('./Connector');
const log = require('./Logger');
const generateError = require('./ErrorGenerator');
/**
* Create the "{schema}"."data_pool_{self._schema + self._mySqlDbName} temporary table."
*
* @param {Conversion} self
*
* @returns {Promise}
*/
module.exports.createDataPoolTable = self => {
return connect(self).then(() => {
return new Promise(resolve => {
self._pg.connect((error, client, done) => {
if (error) {
generateError(self, '\t--[DataPoolManager.createDataPoolTable] Cannot connect to PostgreSQL server...\n' + error);
process.exit();
} else {
const sql = 'CREATE TABLE IF NOT EXISTS "' + self._schema + '"."data_pool_' + self._schema + self._mySqlDbName
+ '"("id" BIGSERIAL, "json" TEXT, "is_started" BOOLEAN, "size_in_mb" DOUBLE PRECISION);';
client.query(sql, err => {
done();
if (err) {
generateError(self, '\t--[DataPoolManager.createDataPoolTable] ' + err, sql);
process.exit();
} else {
log(self, '\t--[DataPoolManager.createDataPoolTable] table "' + self._schema + '"."data_pool_' + self._schema + self._mySqlDbName + '" is created...');
resolve(self);
}
});
}
});
});
});
};
/**
* Drop the "{schema}"."data_pool_{self._schema + self._mySqlDbName} temporary table."
*
* @param {Conversion} self
*
* @returns {Promise}
*/
module.exports.dropDataPoolTable = self => {
return connect(self).then(() => {
return new Promise(resolve => {
self._pg.connect((error, client, done) => {
if (error) {
generateError(self, '\t--[DataPoolManager.dropDataPoolTable] Cannot connect to PostgreSQL server...\n' + error);
resolve();
} else {
const sql = 'DROP TABLE "' + self._schema + '"."data_pool_' + self._schema + self._mySqlDbName + '";';
client.query(sql, err => {
done();
if (err) {
generateError(self, '\t--[DataPoolManager.dropDataPoolTable] ' + err, sql);
} else {
log(self, '\t--[DataPoolManager.dropDataPoolTable] table "' + self._schema + '"."data_pool_' + self._schema + self._mySqlDbName + '" is dropped...');
}
resolve();
});
}
});
});
});
};
/**
* Reads temporary table, and generates Data-pool.
*
* @param {Conversion} self
*
* @returns {Promise}
*/
module.exports.readDataPool = self => {
return connect(self).then(() => {
return new Promise(resolve => {
self._pg.connect((error, client, done) => {
if (error) {
generateError(self, '\t--[DataPoolManager.readDataPool] Cannot connect to PostgreSQL server...\n' + error);
process.exit();
} else {
const sql = 'SELECT id AS id, json AS json, size_in_mb AS size_in_mb FROM "'
+ self._schema + '"."data_pool_' + self._schema + self._mySqlDbName
+ '" ORDER BY size_in_mb DESC;';
client.query(sql, (err, arrDataPool) => {
done();
if (err) {
generateError(self, '\t--[DataPoolManager.readDataPool] ' + err, sql);
process.exit();
}
for (let i = 0; i < arrDataPool.rows.length; ++i) {
const obj = JSON.parse(arrDataPool.rows[i].json);
obj._id = arrDataPool.rows[i].id;
obj._size_in_mb = +arrDataPool.rows[i].size_in_mb;
obj._processed = false;
self._dataPool.push(obj);
}
log(self, '\t--[DataPoolManager.readDataPool] Data-Pool is loaded...');
resolve(self);
});
}
});
});
});
};

71
src/DataPoolManager.ts Normal file
View file

@ -0,0 +1,71 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import DBAccess from './DBAccess';
import DBAccessQueryResult from './DBAccessQueryResult';
import DBVendors from './DBVendors';
import { log } from './FsOps';
import Conversion from './Conversion';
/**
* Creates the "{schema}"."data_pool_{self._schema + self._mySqlDbName}" temporary table.
*/
export async function createDataPoolTable(conversion: Conversion): Promise<Conversion> {
const dbAccess: DBAccess = new DBAccess(conversion);
const table: string = `"${ conversion._schema }"."data_pool_${ conversion._schema }${ conversion._mySqlDbName }"`;
const sql: string = `CREATE TABLE IF NOT EXISTS ${ table }
("id" BIGSERIAL, "json" TEXT, "is_started" BOOLEAN, "size_in_mb" DOUBLE PRECISION);`;
await dbAccess.query('DataPoolManager::createDataPoolTable', sql, DBVendors.PG, true, false);
log(conversion, `\t--[DataPoolManager.createDataPoolTable] table ${ table } is created...`);
return conversion;
}
/**
* Drops the "{schema}"."data_pool_{self._schema + self._mySqlDbName}" temporary table.
*/
export async function dropDataPoolTable(conversion: Conversion): Promise<void> {
const dbAccess: DBAccess = new DBAccess(conversion);
const table: string = `"${ conversion._schema }"."data_pool_${ conversion._schema }${ conversion._mySqlDbName }"`;
const sql: string = `DROP TABLE ${ table };`;
await dbAccess.query('DataPoolManager::dropDataPoolTable', sql, DBVendors.PG, false, false);
log(conversion, `\t--[DataPoolManager.dropDataPoolTable] table ${ table } is dropped...`);
}
/**
* Reads temporary table, and generates Data-pool.
*/
export async function readDataPool(conversion: Conversion): Promise<Conversion> {
const dbAccess: DBAccess = new DBAccess(conversion);
const table: string = `"${ conversion._schema }"."data_pool_${ conversion._schema }${ conversion._mySqlDbName }"`;
const sql: string = `SELECT id AS id, json AS json, size_in_mb AS size_in_mb FROM ${ table } ORDER BY size_in_mb DESC;`;
const result: DBAccessQueryResult = await dbAccess.query('DataPoolManager::dropDataPoolTable', sql, DBVendors.PG, true, false);
result.data.rows.forEach((row: any) => {
const obj: any = JSON.parse(row.json);
obj._id = row.id;
obj._size_in_mb = +row.size_in_mb;
obj._processed = false;
conversion._dataPool.push(obj);
});
log(conversion, '\t--[DataPoolManager.readDataPool] Data-Pool is loaded...');
return conversion;
}

View file

@ -1,116 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const connect = require('./Connector');
const log = require('./Logger');
const generateError = require('./ErrorGenerator');
const extraConfigProcessor = require('./ExtraConfigProcessor');
const { mapDataTypes } = require('./TableProcessor');
/**
* Define which columns of the given table have default value.
* Set default values, if need.
*
* @param {Conversion} self
* @param {String} tableName
*
* @returns {Promise}
*/
module.exports = (self, tableName) => {
return connect(self).then(() => {
return new Promise(resolve => {
log(self, '\t--[processDefault] Defines default values for table: "' + self._schema + '"."' + tableName + '"', self._dicTables[tableName].tableLogPath);
const processDefaultPromises = [];
const originalTableName = extraConfigProcessor.getTableName(self, tableName, true);
const pgSqlNumericTypes = ['money', 'numeric', 'decimal', 'double precision', 'real', 'bigint', 'int', 'smallint'];
const sqlReservedValues = {
'CURRENT_DATE' : 'CURRENT_DATE',
'0000-00-00' : "'-INFINITY'",
'CURRENT_TIME' : 'CURRENT_TIME',
'00:00:00' : '00:00:00',
'CURRENT_TIMESTAMP' : 'CURRENT_TIMESTAMP',
'0000-00-00 00:00:00' : "'-INFINITY'",
'LOCALTIME' : 'LOCALTIME',
'LOCALTIMESTAMP' : 'LOCALTIMESTAMP',
'NULL' : 'NULL',
'UTC_DATE' : "(CURRENT_DATE AT TIME ZONE 'UTC')",
'UTC_TIME' : "(CURRENT_TIME AT TIME ZONE 'UTC')",
'UTC_TIMESTAMP' : "(NOW() AT TIME ZONE 'UTC')"
};
for (let i = 0; i < self._dicTables[tableName].arrTableColumns.length; ++i) {
if (self._dicTables[tableName].arrTableColumns[i].Default) {
processDefaultPromises.push(
new Promise(resolveProcessDefault => {
self._pg.connect((error, client, done) => {
if (error) {
const msg = '\t--[processDefault] Cannot connect to PostgreSQL server...\n' + error;
generateError(self, msg);
resolveProcessDefault();
} else {
const pgSqlDataType = mapDataTypes(self._dataTypesMap, self._dicTables[tableName].arrTableColumns[i].Type);
const columnName = extraConfigProcessor.getColumnName(
self,
originalTableName,
self._dicTables[tableName].arrTableColumns[i].Field,
false
);
let sql = 'ALTER TABLE "' + self._schema + '"."' + tableName
+ '" ' + 'ALTER COLUMN "' + columnName + '" SET DEFAULT ';
if (sqlReservedValues[self._dicTables[tableName].arrTableColumns[i].Default]) {
sql += sqlReservedValues[self._dicTables[tableName].arrTableColumns[i].Default] + ';';
} else if (pgSqlNumericTypes.indexOf(pgSqlDataType) === -1) {
sql += "'" + self._dicTables[tableName].arrTableColumns[i].Default + "';";
} else {
sql += self._dicTables[tableName].arrTableColumns[i].Default + ';';
}
client.query(sql, err => {
done();
if (err) {
const msg2 = '\t--[processDefault] Error occurred when tried to set default value for "'
+ self._schema + '"."' + tableName + '"."' + columnName + '"...\n' + err;
generateError(self, msg2, sql);
resolveProcessDefault();
} else {
const success = '\t--[processDefault] Set default value for "' + self._schema + '"."'
+ tableName + '"."' + columnName + '"...';
log(self, success, self._dicTables[tableName].tableLogPath);
resolveProcessDefault();
}
});
}
});
})
);
}
}
Promise.all(processDefaultPromises).then(() => resolve());
});
});
};

78
src/DefaultProcessor.ts Normal file
View file

@ -0,0 +1,78 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import { log } from './FsOps';
import Conversion from './Conversion';
import DBAccess from './DBAccess';
import DBVendors from './DBVendors';
import * as extraConfigProcessor from './ExtraConfigProcessor';
import { mapDataTypes } from './TableProcessor';
import DBAccessQueryResult from './DBAccessQueryResult';
/**
* Defines which columns of the given table have default value.
* Sets default values, if need.
*/
export default async function(conversion: Conversion, tableName: string): Promise<void> {
const logTitle: string = 'DefaultValuesProcessor';
const dbAccess: DBAccess = new DBAccess(conversion);
const msg: string = `\t--[${ logTitle }] Defines default values for table: "${ conversion._schema }"."${ tableName }"`;
log(conversion, msg, conversion._dicTables[tableName].tableLogPath);
const originalTableName: string = extraConfigProcessor.getTableName(conversion, tableName, true);
const pgSqlNumericTypes: string[] = ['money', 'numeric', 'decimal', 'double precision', 'real', 'bigint', 'int', 'smallint'];
const sqlReservedValues: any = {
'CURRENT_DATE': 'CURRENT_DATE',
'0000-00-00': "'-INFINITY'",
'CURRENT_TIME': 'CURRENT_TIME',
'00:00:00': '00:00:00',
'CURRENT_TIMESTAMP': 'CURRENT_TIMESTAMP',
'0000-00-00 00:00:00': "'-INFINITY'",
'LOCALTIME': 'LOCALTIME',
'LOCALTIMESTAMP': 'LOCALTIMESTAMP',
'NULL': 'NULL',
'null': 'NULL',
'UTC_DATE': "(CURRENT_DATE AT TIME ZONE 'UTC')",
'UTC_TIME': "(CURRENT_TIME AT TIME ZONE 'UTC')",
'UTC_TIMESTAMP': "(NOW() AT TIME ZONE 'UTC')"
};
const promises: Promise<void>[] = conversion._dicTables[tableName].arrTableColumns.map(async (column: any) => {
const pgSqlDataType: string = mapDataTypes(conversion._dataTypesMap, column.Type);
const columnName: string = extraConfigProcessor.getColumnName(conversion, originalTableName, column.Field, false);
let sql: string = `ALTER TABLE "${ conversion._schema }"."${ tableName }" ALTER COLUMN "${ columnName }" SET DEFAULT `;
if (sqlReservedValues[column.Default]) {
sql += `${ sqlReservedValues[column.Default] };`;
} else if (pgSqlNumericTypes.indexOf(pgSqlDataType) === -1) {
sql += `'${ column.Default }';`;
} else {
sql += `${ column.Default };`;
}
const result: DBAccessQueryResult = await dbAccess.query(logTitle, sql, DBVendors.PG, false, false);
if (!result.error) {
const successMsg: string = `\t--[${ logTitle }] Set default value for "${ conversion._schema }"."${ tableName }"."${ columnName }"...`;
log(conversion, successMsg, conversion._dicTables[tableName].tableLogPath);
}
});
await Promise.all(promises);
}

View file

@ -1,95 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const connect = require('./Connector');
const log = require('./Logger');
const generateError = require('./ErrorGenerator');
const extraConfigProcessor = require('./ExtraConfigProcessor');
/**
* Define which columns of the given table are of type "enum".
* Set an appropriate constraint, if need.
*
* @param {Conversion} self
* @param {String} tableName
*
* @returns {Promise}
*/
module.exports = (self, tableName) => {
return connect(self).then(() => {
return new Promise(resolve => {
log(self, '\t--[processEnum] Defines "ENUMs" for table "' + self._schema + '"."' + tableName + '"', self._dicTables[tableName].tableLogPath);
const processEnumPromises = [];
const originalTableName = extraConfigProcessor.getTableName(self, tableName, true);
for (let i = 0; i < self._dicTables[tableName].arrTableColumns.length; ++i) {
if (self._dicTables[tableName].arrTableColumns[i].Type.indexOf('(') !== -1) {
const arrType = self._dicTables[tableName].arrTableColumns[i].Type.split('(');
if (arrType[0] === 'enum') {
processEnumPromises.push(
new Promise(resolveProcessEnum => {
self._pg.connect((error, client, done) => {
if (error) {
const msg = '\t--[processEnum] Cannot connect to PostgreSQL server...\n' + error;
generateError(self, msg);
resolveProcessEnum();
} else {
const columnName = extraConfigProcessor.getColumnName(
self,
originalTableName,
self._dicTables[tableName].arrTableColumns[i].Field,
false
);
const sql = 'ALTER TABLE "' + self._schema + '"."' + tableName + '" '
+ 'ADD CHECK ("' + columnName + '" IN (' + arrType[1] + ');';
client.query(sql, err => {
done();
if (err) {
const msg2 = '\t--[processEnum] Error while setting ENUM for "' + self._schema + '"."'
+ tableName + '"."' + columnName + '"...\n' + err;
generateError(self, msg2, sql);
resolveProcessEnum();
} else {
const success = '\t--[processEnum] Set "ENUM" for "' + self._schema + '"."' + tableName
+ '"."' + columnName + '"...';
log(self, success, self._dicTables[tableName].tableLogPath);
resolveProcessEnum();
}
});
}
});
})
);
}
}
}
Promise.all(processEnumPromises).then(() => resolve());
});
});
};

62
src/EnumProcessor.ts Normal file
View file

@ -0,0 +1,62 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import { log } from './FsOps';
import Conversion from './Conversion';
import DBAccess from './DBAccess';
import DBVendors from './DBVendors';
import * as extraConfigProcessor from './ExtraConfigProcessor';
import DBAccessQueryResult from './DBAccessQueryResult';
/**
* Defines which columns of the given table are of type "enum".
* Sets an appropriate constraint, if need.
*/
export default async function(conversion: Conversion, tableName: string): Promise<void> {
const dbAccess: DBAccess = new DBAccess(conversion);
const msg: string = `\t--[EnumProcessor] Defines "ENUMs" for table "${ conversion._schema }"."${ tableName }"`;
log(conversion, msg, conversion._dicTables[tableName].tableLogPath);
const originalTableName: string = extraConfigProcessor.getTableName(conversion, tableName, true);
const processEnumPromises: Promise<void>[] = conversion._dicTables[tableName].arrTableColumns.map(async (column: any) => {
if (column.Type.indexOf('(') !== -1) {
const arrType: string[] = column.Type.split('(');
if (arrType[0] === 'enum') {
const columnName: string = extraConfigProcessor.getColumnName(
conversion,
originalTableName,
column.Field,
false
);
const sql: string = `ALTER TABLE "${ conversion._schema }"."${ tableName }" ADD CHECK ("${ columnName }" IN (${ arrType[1] });`;
const result: DBAccessQueryResult = await dbAccess.query('EnumProcessor', sql, DBVendors.PG, false, false);
if (!result.error) {
const successMsg: string = `\t--[EnumProcessor] Set "ENUM" for "${ conversion._schema }"."${ tableName }"."${ columnName }"...`;
log(conversion, successMsg, conversion._dicTables[tableName].tableLogPath);
}
}
}
});
await Promise.all(processEnumPromises);
}

View file

@ -1,49 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const fs = require('fs');
const log = require('./Logger');
/**
* Writes a ditailed error message to the "/errors-only.log" file
*
* @param {Conversion} self
* @param {String} message
* @param {String} sql
*
* @returns {undefined}
*/
module.exports = (self, message, sql = '') => {
message += '\n\n\tSQL: ' + sql + '\n\n';
const buffer = Buffer.from(message, self._encoding);
log(self, message, undefined, true);
fs.open(self._errorLogsPath, 'a', self._0777, (error, fd) => {
if (!error) {
fs.write(fd, buffer, 0, buffer.length, null, () => {
fs.close(fd, () => {
// Each async function MUST have a callback (according to Node.js >= 7).
});
});
}
});
};

View file

@ -1,100 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
/**
* Get current table's name.
*
* @param {Conversion} self
* @param {String} currentTableName
* @param {Boolean} shouldGetOriginal
*
* @returns {String}
*/
module.exports.getTableName = (self, currentTableName, shouldGetOriginal) => {
if (self._extraConfig !== null && 'tables' in self._extraConfig) {
for (let i = 0; i < self._extraConfig.tables.length; ++i) {
if ((shouldGetOriginal ? self._extraConfig.tables[i].name.new : self._extraConfig.tables[i].name.original) === currentTableName) {
return shouldGetOriginal ? self._extraConfig.tables[i].name.original : self._extraConfig.tables[i].name.new;
}
}
}
return currentTableName;
};
/**
* Get current column's name.
*
* @param {Conversion} self
* @param {String} originalTableName
* @param {String} currentColumnName
* @param {Boolean} shouldGetOriginal
*
* @returns {String}
*/
module.exports.getColumnName = (self, originalTableName, currentColumnName, shouldGetOriginal) => {
if (self._extraConfig !== null && 'tables' in self._extraConfig) {
for (let i = 0; i < self._extraConfig.tables.length; ++i) {
if (self._extraConfig.tables[i].name.original === originalTableName && 'columns' in self._extraConfig.tables[i]) {
for (let columnsCount = 0; columnsCount < self._extraConfig.tables[i].columns.length; ++columnsCount) {
if (self._extraConfig.tables[i].columns[columnsCount].original === currentColumnName) {
return shouldGetOriginal
? self._extraConfig.tables[i].columns[columnsCount].original
: self._extraConfig.tables[i].columns[columnsCount].new;
}
}
}
}
}
return currentColumnName;
};
/**
* Parse the extra_config foreign_keys attributes and generate
* an output array required by ForeignKeyProcessor::processForeignKeyWorker.
*
* @param {Conversion} self
* @param {String} tableName
*
* @returns {Array}
*/
module.exports.parseForeignKeys = (self, tableName) => {
const retVal = [];
if (self._extraConfig !== null && 'foreign_keys' in self._extraConfig) {
for (let i = 0; i < self._extraConfig.foreign_keys.length; ++i) {
if (self._extraConfig.foreign_keys[i].table_name === tableName) {
// There may be several FKs in a single table.
const objFk = Object.create(null);
for (const attribute in self._extraConfig.foreign_keys[i]) {
objFk[attribute.toUpperCase()] = self._extraConfig.foreign_keys[i][attribute];
}
retVal.push(objFk);
}
}
}
return retVal;
};

View file

@ -0,0 +1,82 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import Conversion from './Conversion';
/**
* Retrieves current table's name.
*/
export function getTableName(conversion: Conversion, currentTableName: string, shouldGetOriginal: boolean): string {
if (conversion._extraConfig !== null && 'tables' in conversion._extraConfig) {
for (let i: number = 0; i < conversion._extraConfig.tables.length; ++i) {
if ((shouldGetOriginal ? conversion._extraConfig.tables[i].name.new : conversion._extraConfig.tables[i].name.original) === currentTableName) {
return shouldGetOriginal ? conversion._extraConfig.tables[i].name.original : conversion._extraConfig.tables[i].name.new;
}
}
}
return currentTableName;
}
/**
* Retrieves current column's name.
*/
export function getColumnName(conversion: Conversion, originalTableName: string, currentColumnName: string, shouldGetOriginal: boolean): string {
if (conversion._extraConfig !== null && 'tables' in conversion._extraConfig) {
for (let i: number = 0; i < conversion._extraConfig.tables.length; ++i) {
if (conversion._extraConfig.tables[i].name.original === originalTableName && 'columns' in conversion._extraConfig.tables[i]) {
for (let columnsCount: number = 0; columnsCount < conversion._extraConfig.tables[i].columns.length; ++columnsCount) {
if (conversion._extraConfig.tables[i].columns[columnsCount].original === currentColumnName) {
return shouldGetOriginal
? conversion._extraConfig.tables[i].columns[columnsCount].original
: conversion._extraConfig.tables[i].columns[columnsCount].new;
}
}
}
}
}
return currentColumnName;
}
/**
* Parses the extra_config foreign_keys attributes and generate
* an output array required by ForeignKeyProcessor::processForeignKeyWorker.
*/
export function parseForeignKeys(conversion: Conversion, tableName: string): any[] {
const retVal: any[] = [];
if (conversion._extraConfig !== null && 'foreign_keys' in conversion._extraConfig) {
for (let i: number = 0; i < conversion._extraConfig.foreign_keys.length; ++i) {
if (conversion._extraConfig.foreign_keys[i].table_name === tableName) {
// There may be several FKs in a single table.
const objFk: any = Object.create(null);
for (const attribute in conversion._extraConfig.foreign_keys[i]) {
objFk[attribute.toUpperCase()] = conversion._extraConfig.foreign_keys[i][attribute];
}
retVal.push(objFk);
}
}
}
return retVal;
}

View file

@ -1,174 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const log = require('./Logger');
const generateError = require('./ErrorGenerator');
const migrationStateManager = require('./MigrationStateManager');
const extraConfigProcessor = require('./ExtraConfigProcessor');
/**
* Creates foreign keys for given table.
*
* @param {Conversion} self
* @param {String} tableName
* @param {Array} rows
*
* @returns {Promise}
*/
const processForeignKeyWorker = (self, tableName, rows) => {
return new Promise(resolve => {
const constraintsPromises = [];
const objConstraints = Object.create(null);
const originalTableName = extraConfigProcessor.getTableName(self, tableName, true);
for (let i = 0; i < rows.length; ++i) {
const currentColumnName = extraConfigProcessor.getColumnName(self, originalTableName, rows[i].COLUMN_NAME, false);
const currentReferencedTableName = extraConfigProcessor.getTableName(self, rows[i].REFERENCED_TABLE_NAME, false);
const originalReferencedTableName = extraConfigProcessor.getTableName(self, rows[i].REFERENCED_TABLE_NAME, true);
const currentReferencedColumnName = extraConfigProcessor.getColumnName(
self,
originalReferencedTableName,
rows[i].REFERENCED_COLUMN_NAME,
false
);
if (rows[i].CONSTRAINT_NAME in objConstraints) {
objConstraints[rows[i].CONSTRAINT_NAME].column_name.push('"' + currentColumnName + '"');
objConstraints[rows[i].CONSTRAINT_NAME].referenced_column_name.push('"' + currentReferencedColumnName + '"');
} else {
objConstraints[rows[i].CONSTRAINT_NAME] = Object.create(null);
objConstraints[rows[i].CONSTRAINT_NAME].column_name = ['"' + currentColumnName + '"'];
objConstraints[rows[i].CONSTRAINT_NAME].referenced_column_name = ['"' + currentReferencedColumnName + '"'];
objConstraints[rows[i].CONSTRAINT_NAME].referenced_table_name = currentReferencedTableName;
objConstraints[rows[i].CONSTRAINT_NAME].update_rule = rows[i].UPDATE_RULE;
objConstraints[rows[i].CONSTRAINT_NAME].delete_rule = rows[i].DELETE_RULE;
}
}
rows = null;
for (const attr in objConstraints) {
constraintsPromises.push(
new Promise(resolveConstraintPromise => {
self._pg.connect((error, client, done) => {
if (error) {
objConstraints[attr] = null;
generateError(self, '\t--[processForeignKeyWorker] Cannot connect to PostgreSQL server...');
resolveConstraintPromise();
} else {
const sql = 'ALTER TABLE "' + self._schema + '"."' + tableName + '" ADD FOREIGN KEY ('
+ objConstraints[attr].column_name.join(',') + ') REFERENCES "' + self._schema + '"."'
+ objConstraints[attr].referenced_table_name + '" (' + objConstraints[attr].referenced_column_name.join(',')
+ ') ON UPDATE ' + objConstraints[attr].update_rule + ' ON DELETE ' + objConstraints[attr].delete_rule + ';';
objConstraints[attr] = null;
client.query(sql, err => {
done();
if (err) {
generateError(self, '\t--[processForeignKeyWorker] ' + err, sql);
resolveConstraintPromise();
} else {
resolveConstraintPromise();
}
});
}
});
})
);
}
Promise.all(constraintsPromises).then(() => resolve());
});
}
/**
* Starts a process of foreign keys creation.
*
* @param {Conversion} self
*
* @returns {Promise}
*/
module.exports = self => {
return migrationStateManager.get(self, 'foreign_keys_loaded').then(isForeignKeysProcessed => {
return new Promise(resolve => {
const fkPromises = [];
if (!isForeignKeysProcessed) {
for (let i = 0; i < self._tablesToMigrate.length; ++i) {
const tableName = self._tablesToMigrate[i];
log(self, '\t--[processForeignKey] Search foreign keys for table "' + self._schema + '"."' + tableName + '"...');
fkPromises.push(
new Promise(fkResolve => {
self._mysql.getConnection((error, connection) => {
if (error) {
// The connection is undefined.
generateError(self, '\t--[processForeignKey] Cannot connect to MySQL server...\n' + error);
fkResolve();
} else {
const sql = "SELECT cols.COLUMN_NAME, refs.REFERENCED_TABLE_NAME, refs.REFERENCED_COLUMN_NAME, "
+ "cRefs.UPDATE_RULE, cRefs.DELETE_RULE, cRefs.CONSTRAINT_NAME "
+ "FROM INFORMATION_SCHEMA.`COLUMNS` AS cols "
+ "INNER JOIN INFORMATION_SCHEMA.`KEY_COLUMN_USAGE` AS refs "
+ "ON refs.TABLE_SCHEMA = cols.TABLE_SCHEMA "
+ "AND refs.REFERENCED_TABLE_SCHEMA = cols.TABLE_SCHEMA "
+ "AND refs.TABLE_NAME = cols.TABLE_NAME "
+ "AND refs.COLUMN_NAME = cols.COLUMN_NAME "
+ "LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS cRefs "
+ "ON cRefs.CONSTRAINT_SCHEMA = cols.TABLE_SCHEMA "
+ "AND cRefs.CONSTRAINT_NAME = refs.CONSTRAINT_NAME "
+ "LEFT JOIN INFORMATION_SCHEMA.`KEY_COLUMN_USAGE` AS links "
+ "ON links.TABLE_SCHEMA = cols.TABLE_SCHEMA "
+ "AND links.REFERENCED_TABLE_SCHEMA = cols.TABLE_SCHEMA "
+ "AND links.REFERENCED_TABLE_NAME = cols.TABLE_NAME "
+ "AND links.REFERENCED_COLUMN_NAME = cols.COLUMN_NAME "
+ "LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS cLinks "
+ "ON cLinks.CONSTRAINT_SCHEMA = cols.TABLE_SCHEMA "
+ "AND cLinks.CONSTRAINT_NAME = links.CONSTRAINT_NAME "
+ "WHERE cols.TABLE_SCHEMA = '" + self._mySqlDbName + "' "
+ "AND cols.TABLE_NAME = '" + extraConfigProcessor.getTableName(self, tableName, true) + "';";
connection.query(sql, (err, rows) => {
connection.release();
if (err) {
generateError(self, '\t--[processForeignKey] ' + err, sql);
}
const extraRows = extraConfigProcessor.parseForeignKeys(self, tableName);
const fullRows = (rows || []).concat(extraRows); // Prevent failure if "rows" is undefined.
processForeignKeyWorker(self, tableName, fullRows).then(() => {
log(self, '\t--[processForeignKey] Foreign keys for table "' + self._schema + '"."' + tableName + '" are set...');
fkResolve();
});
});
}
});
})
);
}
}
Promise.all(fkPromises).then(() => resolve());
});
});
};

125
src/ForeignKeyProcessor.ts Normal file
View file

@ -0,0 +1,125 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import * as migrationStateManager from './MigrationStateManager';
import { log } from './FsOps';
import Conversion from './Conversion';
import DBAccess from './DBAccess';
import DBVendors from './DBVendors';
import DBAccessQueryResult from './DBAccessQueryResult';
import * as extraConfigProcessor from './ExtraConfigProcessor';
/**
* Creates foreign keys for given table.
*/
async function processForeignKeyWorker(conversion: Conversion, dbAccess: DBAccess, tableName: string, rows: any[]): Promise<void> {
const objConstraints: any = Object.create(null);
const originalTableName: string = extraConfigProcessor.getTableName(conversion, tableName, true);
const logTitle: string = 'processForeignKeyWorker';
rows.forEach((row: any) => {
const currentColumnName: string = extraConfigProcessor.getColumnName(conversion, originalTableName, row.COLUMN_NAME, false);
const currentReferencedTableName: string = extraConfigProcessor.getTableName(conversion, row.REFERENCED_TABLE_NAME, false);
const originalReferencedTableName: string = extraConfigProcessor.getTableName(conversion, row.REFERENCED_TABLE_NAME, true);
const currentReferencedColumnName: string = extraConfigProcessor.getColumnName(
conversion,
originalReferencedTableName,
row.REFERENCED_COLUMN_NAME,
false
);
if (row.CONSTRAINT_NAME in objConstraints) {
objConstraints[row.CONSTRAINT_NAME].column_name.push(`"${ currentColumnName }"`);
objConstraints[row.CONSTRAINT_NAME].referenced_column_name.push(`"${ currentReferencedColumnName }"`);
return;
}
objConstraints[row.CONSTRAINT_NAME] = Object.create(null);
objConstraints[row.CONSTRAINT_NAME].column_name = [`"${ currentColumnName }"`];
objConstraints[row.CONSTRAINT_NAME].referenced_column_name = [`"${ currentReferencedColumnName }"`];
objConstraints[row.CONSTRAINT_NAME].referenced_table_name = currentReferencedTableName;
objConstraints[row.CONSTRAINT_NAME].update_rule = row.UPDATE_RULE;
objConstraints[row.CONSTRAINT_NAME].delete_rule = row.DELETE_RULE;
});
const constraintsPromises: Promise<void>[] = Object.keys(objConstraints).map(async (attr: string) => {
const sql: string = `ALTER TABLE "${ conversion._schema }"."${ tableName }"
ADD FOREIGN KEY (${ objConstraints[attr].column_name.join(',') })
REFERENCES "${ conversion._schema }"."${ objConstraints[attr].referenced_table_name }"
(${ objConstraints[attr].referenced_column_name.join(',') })
ON UPDATE ${ objConstraints[attr].update_rule }
ON DELETE ${ objConstraints[attr].delete_rule };`;
await dbAccess.query(logTitle, sql, DBVendors.PG, false, false);
});
await Promise.all(constraintsPromises);
}
/**
* Starts a process of foreign keys creation.
*/
export default async function(conversion: Conversion): Promise<void> {
const logTitle: string = 'processForeignKey';
const isForeignKeysProcessed: boolean = await migrationStateManager.get(conversion, 'foreign_keys_loaded');
if (isForeignKeysProcessed) {
return;
}
const fkPromises: Promise<void>[] = conversion._tablesToMigrate.map(async (tableName: string) => {
log(conversion, `\t--[${ logTitle }] Search foreign keys for table "${ conversion._schema }"."${ tableName }"...`);
const sql: string = `SELECT cols.COLUMN_NAME, refs.REFERENCED_TABLE_NAME, refs.REFERENCED_COLUMN_NAME,
cRefs.UPDATE_RULE, cRefs.DELETE_RULE, cRefs.CONSTRAINT_NAME
FROM INFORMATION_SCHEMA.\`COLUMNS\` AS cols
INNER JOIN INFORMATION_SCHEMA.\`KEY_COLUMN_USAGE\` AS refs
ON refs.TABLE_SCHEMA = cols.TABLE_SCHEMA
AND refs.REFERENCED_TABLE_SCHEMA = cols.TABLE_SCHEMA
AND refs.TABLE_NAME = cols.TABLE_NAME
AND refs.COLUMN_NAME = cols.COLUMN_NAME
LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS cRefs
ON cRefs.CONSTRAINT_SCHEMA = cols.TABLE_SCHEMA
AND cRefs.CONSTRAINT_NAME = refs.CONSTRAINT_NAME
LEFT JOIN INFORMATION_SCHEMA.\`KEY_COLUMN_USAGE\` AS links
ON links.TABLE_SCHEMA = cols.TABLE_SCHEMA
AND links.REFERENCED_TABLE_SCHEMA = cols.TABLE_SCHEMA
AND links.REFERENCED_TABLE_NAME = cols.TABLE_NAME
AND links.REFERENCED_COLUMN_NAME = cols.COLUMN_NAME
LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS cLinks
ON cLinks.CONSTRAINT_SCHEMA = cols.TABLE_SCHEMA
AND cLinks.CONSTRAINT_NAME = links.CONSTRAINT_NAME
WHERE cols.TABLE_SCHEMA = '${ conversion._mySqlDbName }'
AND cols.TABLE_NAME = '${ extraConfigProcessor.getTableName(conversion, tableName, true) }';`;
const dbAccess: DBAccess = new DBAccess(conversion);
const result: DBAccessQueryResult = await dbAccess.query(logTitle, sql, DBVendors.MYSQL, false, false);
if (result.error) {
return;
}
const extraRows: any[] = extraConfigProcessor.parseForeignKeys(conversion, tableName);
const fullRows: any[] = (result.data || []).concat(extraRows); // Prevent failure if "result.data" is undefined.
await processForeignKeyWorker(conversion, dbAccess, tableName, fullRows);
log(conversion, `\t--[${ logTitle }] Foreign keys for table "${ conversion._schema }"."${ tableName }" are set...`);
});
await Promise.all(fkPromises);
}

172
src/FsOps.ts Normal file
View file

@ -0,0 +1,172 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import * as fs from 'fs';
import * as path from 'path';
import Conversion from './Conversion';
/**
* Writes a detailed error message to the "/errors-only.log" file.
*/
export function generateError(conversion: Conversion, message: string, sql: string = ''): Promise<void> {
return new Promise<void>(resolve => {
message += `\n\n\tSQL: ${sql}\n\n`;
const buffer: Buffer = Buffer.from(message, conversion._encoding);
log(conversion, message, undefined, true);
fs.open(conversion._errorLogsPath, 'a', conversion._0777, (error: Error, fd: number) => {
if (error) {
return resolve();
}
fs.write(fd, buffer, 0, buffer.length, null, () => {
fs.close(fd, () => resolve());
});
});
});
}
/**
* Outputs given log.
* Writes given log to the "/all.log" file.
* If necessary, writes given log to the "/{tableName}.log" file.
*/
export function log(conversion: Conversion, log: string | NodeJS.ErrnoException, tableLogPath?: string, isErrorLog?: boolean): void {
const buffer: Buffer = Buffer.from(`${ log }\n\n`, conversion._encoding);
if (!isErrorLog) {
console.log(log);
}
fs.open(conversion._allLogsPath, 'a', conversion._0777, (error: Error, fd: number) => {
if (!error) {
fs.write(fd, buffer, 0, buffer.length, null, () => {
fs.close(fd, () => {
if (tableLogPath) {
fs.open(tableLogPath, 'a', conversion._0777, (error: Error, fd: number) => {
if (!error) {
fs.write(fd, buffer, 0, buffer.length, null, () => {
fs.close(fd, () => {
// Each async function MUST have a callback (according to Node.js >= 7).
});
});
}
});
}
});
});
}
});
}
/**
* Reads the configuration file.
*/
export function readConfig(baseDir: string, configFileName: string = 'config.json'): Promise<any> {
return new Promise<any>(resolve => {
const strPathToConfig = path.join(baseDir, 'config', configFileName);
fs.readFile(strPathToConfig, (error: Error, data: Buffer) => {
if (error) {
console.log(`\n\t--Cannot run migration\nCannot read configuration info from ${ strPathToConfig }`);
process.exit();
}
const config: any = JSON.parse(data.toString());
config.logsDirPath = path.join(baseDir, 'logs_directory');
config.dataTypesMapAddr = path.join(baseDir, 'config', 'data_types_map.json');
resolve(config);
});
});
}
/**
* Reads the extra configuration file, if necessary.
*/
export function readExtraConfig(config: any, baseDir: string): Promise<any> {
return new Promise<any>(resolve => {
if (config.enable_extra_config !== true) {
config.extraConfig = null;
return resolve(config);
}
const strPathToExtraConfig = path.join(baseDir, 'config', 'extra_config.json');
fs.readFile(strPathToExtraConfig, (error: Error, data: Buffer) => {
if (error) {
console.log(`\n\t--Cannot run migration\nCannot read configuration info from ${ strPathToExtraConfig }`);
process.exit();
}
config.extraConfig = JSON.parse(data.toString());
resolve(config);
});
});
}
/**
* Creates logs directory.
*/
export function createLogsDirectory(conversion: Conversion): Promise<Conversion> {
return new Promise<Conversion>(resolve => {
const logTitle: string = 'FsOps::createLogsDirectory';
console.log(`\t--[${ logTitle }] Creating logs directory...`);
fs.stat(conversion._logsDirPath, (directoryDoesNotExist: Error, stat: fs.Stats) => {
if (directoryDoesNotExist) {
fs.mkdir(conversion._logsDirPath, conversion._0777, e => {
if (e) {
console.log(`\t--[${ logTitle }] Cannot perform a migration due to impossibility to create "logs_directory": ${ conversion._logsDirPath }`);
process.exit();
} else {
log(conversion, '\t--[logTitle] Logs directory is created...');
resolve(conversion);
}
});
} else if (!stat.isDirectory()) {
console.log(`\t--[${ logTitle }] Cannot perform a migration due to unexpected error`);
process.exit();
} else {
log(conversion, `\t--[${ logTitle }] Logs directory already exists...`);
resolve(conversion);
}
});
});
}
/**
* Reads "./config/data_types_map.json" and converts its json content to js object.
*/
export function readDataTypesMap(conversion: Conversion): Promise<Conversion> {
return new Promise<Conversion>(resolve => {
fs.readFile(conversion._dataTypesMapAddr, (error: Error, data: Buffer) => {
const logTitle: string = 'FsOps::readDataTypesMap';
if (error) {
console.log(`\t--[${ logTitle }] Cannot read "DataTypesMap" from ${conversion._dataTypesMapAddr}`);
process.exit();
}
conversion._dataTypesMap = JSON.parse(data.toString());
console.log(`\t--[${ logTitle }] Data Types Map is loaded...`);
resolve(conversion);
});
});
}

View file

@ -1,124 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const connect = require('./Connector');
const log = require('./Logger');
const generateError = require('./ErrorGenerator');
const extraConfigProcessor = require('./ExtraConfigProcessor');
/**
* Create primary key and indices.
*
* @param {Conversion} self
* @param {String} tableName
*
* @returns {Promise}
*/
module.exports = (self, tableName) => {
return connect(self).then(() => {
return new Promise(resolveProcessIndexAndKey => {
self._mysql.getConnection((error, connection) => {
if (error) {
// The connection is undefined.
generateError(self, '\t--[processIndexAndKey] Cannot connect to MySQL server...\n\t' + error);
resolveProcessIndexAndKey();
} else {
const originalTableName = extraConfigProcessor.getTableName(self, tableName, true);
let sql = 'SHOW INDEX FROM `' + originalTableName + '`;';
connection.query(sql, (err, arrIndices) => {
connection.release();
if (err) {
generateError(self, '\t--[processIndexAndKey] ' + err, sql);
resolveProcessIndexAndKey();
} else {
const objPgIndices = Object.create(null);
const processIndexAndKeyPromises = [];
let cnt = 0;
let indexType = '';
for (let i = 0; i < arrIndices.length; ++i) {
const pgColumnName = extraConfigProcessor.getColumnName(self, originalTableName, arrIndices[i].Column_name, false);
if (arrIndices[i].Key_name in objPgIndices) {
objPgIndices[arrIndices[i].Key_name].column_name.push('"' + pgColumnName + '"');
} else {
objPgIndices[arrIndices[i].Key_name] = {
is_unique : arrIndices[i].Non_unique === 0 ? true : false,
column_name : ['"' + pgColumnName + '"'],
Index_type : ' USING ' + (arrIndices[i].Index_type === 'SPATIAL' ? 'GIST' : arrIndices[i].Index_type)
};
}
}
for (let attr in objPgIndices) {
processIndexAndKeyPromises.push(
new Promise(resolveProcessIndexAndKeySql => {
self._pg.connect((pgError, pgClient, done) => {
if (pgError) {
const msg = '\t--[processIndexAndKey] Cannot connect to PostgreSQL server...\n' + pgError;
generateError(self, msg);
resolveProcessIndexAndKeySql();
} else {
if (attr.toLowerCase() === 'primary') {
indexType = 'PK';
sql = 'ALTER TABLE "' + self._schema + '"."' + tableName + '" '
+ 'ADD PRIMARY KEY(' + objPgIndices[attr].column_name.join(',') + ');';
} else {
// "schema_idxname_{integer}_idx" - is NOT a mistake.
const columnName = objPgIndices[attr].column_name[0].slice(1, -1) + cnt++;
indexType = 'index';
sql = 'CREATE ' + (objPgIndices[attr].is_unique ? 'UNIQUE ' : '') + 'INDEX "'
+ self._schema + '_' + tableName + '_' + columnName + '_idx" ON "'
+ self._schema + '"."' + tableName + '" '
+ objPgIndices[attr].Index_type + ' (' + objPgIndices[attr].column_name.join(',') + ');';
}
pgClient.query(sql, err2 => {
done();
if (err2) {
generateError(self, '\t--[processIndexAndKey] ' + err2, sql);
resolveProcessIndexAndKeySql();
} else {
resolveProcessIndexAndKeySql();
}
});
}
});
})
);
}
Promise.all(processIndexAndKeyPromises).then(() => {
const success = '\t--[processIndexAndKey] "' + self._schema + '"."' + tableName + '": PK/indices are successfully set...';
log(self, success, self._dicTables[tableName].tableLogPath);
resolveProcessIndexAndKey();
});
}
});
}
});
});
});
};

View file

@ -0,0 +1,83 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import { log } from './FsOps';
import Conversion from './Conversion';
import DBAccess from './DBAccess';
import DBVendors from './DBVendors';
import DBAccessQueryResult from './DBAccessQueryResult';
import * as extraConfigProcessor from './ExtraConfigProcessor';
/**
* Creates primary key and indices.
*/
export default async function(conversion: Conversion, tableName: string): Promise<void> {
const dbAccess: DBAccess = new DBAccess(conversion);
const logTitle: string = 'IndexAndKeyProcessor'
const originalTableName: string = extraConfigProcessor.getTableName(conversion, tableName, true);
const sqlShowIndex: string = `SHOW INDEX FROM \`${ originalTableName }\`;`;
const showIndexResult: DBAccessQueryResult = await dbAccess.query(logTitle, sqlShowIndex, DBVendors.MYSQL, false, false);
if (showIndexResult.error) {
return;
}
const objPgIndices: any = Object.create(null);
let cnt: number = 0;
let indexType: string = '';
showIndexResult.data.forEach((index: any) => {
const pgColumnName: string = extraConfigProcessor.getColumnName(conversion, originalTableName, index.Column_name, false);
if (index.Key_name in objPgIndices) {
objPgIndices[index.Key_name].column_name.push(`"${ pgColumnName }"`);
return;
}
objPgIndices[index.Key_name] = {
is_unique: index.Non_unique === 0,
column_name: [`"${ pgColumnName }"`],
Index_type: ` USING ${ index.Index_type === 'SPATIAL' ? 'GIST' : index.Index_type }`
};
});
const addIndexPromises: Promise<void>[] = Object.keys(objPgIndices).map(async (index: string) => {
let sqlAddIndex: string = '';
if (index.toLowerCase() === 'primary') {
indexType = 'PK';
sqlAddIndex = `ALTER TABLE "${ conversion._schema }"."${ tableName }"
ADD PRIMARY KEY(${ objPgIndices[index].column_name.join(',') });`;
} else {
// "schema_idxname_{integer}_idx" - is NOT a mistake.
const columnName: string = objPgIndices[index].column_name[0].slice(1, -1) + cnt++;
indexType = 'index';
sqlAddIndex = `CREATE ${ (objPgIndices[index].is_unique ? 'UNIQUE ' : '') }INDEX "${ conversion._schema }_${ tableName }_${ columnName }_idx"
ON "${ conversion._schema }"."${ tableName }"
${ objPgIndices[index].Index_type } (${ objPgIndices[index].column_name.join(',') });`;
}
await dbAccess.query(logTitle, sqlAddIndex, DBVendors.PG, false, false);
});
await Promise.all(addIndexPromises);
const successMsg: string = `\t--[${ logTitle }] "${ conversion._schema }"."${ tableName }": PK/indices are successfully set...`;
log(conversion, successMsg, conversion._dicTables[tableName].tableLogPath);
}

View file

@ -1,63 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const fs = require('fs');
/**
* Outputs given log.
* Writes given log to the "/all.log" file.
* If necessary, writes given log to the "/{tableName}.log" file.
*
* @param {Conversion} self
* @param {String} log
* @param {String} tableLogPath
* @param {Boolean} isErrorLog
*
* @returns {undefined}
*/
module.exports = (self, log, tableLogPath, isErrorLog) => {
const buffer = Buffer.from(log + '\n\n', self._encoding);
if (!isErrorLog) {
console.log(log);
}
fs.open(self._allLogsPath, 'a', self._0777, (error, fd) => {
if (!error) {
fs.write(fd, buffer, 0, buffer.length, null, () => {
fs.close(fd, () => {
if (tableLogPath) {
fs.open(tableLogPath, 'a', self._0777, (error, fd) => {
if (!error) {
fs.write(fd, buffer, 0, buffer.length, null, () => {
fs.close(fd, () => {
// Each async function MUST have a callback (according to Node.js >= 7).
});
});
}
});
}
});
});
}
});
};

View file

@ -1,159 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const fs = require('fs');
const path = require('path');
const readDataTypesMap = require('./DataTypesMapReader');
const Conversion = require('./Classes/Conversion');
const SchemaProcessor = require('./SchemaProcessor');
const loadStructureToMigrate = require('./StructureLoader');
const pipeData = require('./DataPipeManager');
const boot = require('./BootProcessor');
const { createStateLogsTable } = require('./MigrationStateManager');
const { createDataPoolTable, readDataPool } = require('./DataPoolManager');
const log = require('./Logger');
const Main = class {
/**
* Read the configuration file.
*
* @param {String} baseDir
* @param {String} configFileName
*
* @returns {Promise}
*/
readConfig(baseDir, configFileName = 'config.json') {
return new Promise(resolve => {
const strPathToConfig = path.join(baseDir, 'config', configFileName);
fs.readFile(strPathToConfig, (error, data) => {
if (error) {
console.log(`\n\t--Cannot run migration\nCannot read configuration info from ${ strPathToConfig }`);
process.exit();
}
const config = JSON.parse(data);
config.logsDirPath = path.join(baseDir, 'logs_directory');
config.dataTypesMapAddr = path.join(baseDir, 'config', 'data_types_map.json');
resolve(config);
});
});
}
/**
* Read the extra configuration file, if necessary.
*
* @param {Object} config
* @param {String} baseDir
*
* @returns {Promise}
*/
readExtraConfig(config, baseDir) {
return new Promise(resolve => {
if (config.enable_extra_config !== true) {
config.extraConfig = null;
return resolve(config);
}
const strPathToExtraConfig = path.join(baseDir, 'config', 'extra_config.json');
fs.readFile(strPathToExtraConfig, (error, data) => {
if (error) {
console.log(`\n\t--Cannot run migration\nCannot read configuration info from ${ strPathToExtraConfig }`);
process.exit();
}
config.extraConfig = JSON.parse(data);
resolve(config);
});
});
}
/**
* Initialize Conversion instance.
*
* @param {Object} config
*
* @returns {Promise}
*/
initializeConversion(config) {
return Promise.resolve(new Conversion(config));
}
/**
* Creates logs directory.
*
* @param {Conversion} self
*
* @returns {Promise}
*/
createLogsDirectory(self) {
return new Promise(resolve => {
console.log('\t--[DirectoriesManager.createLogsDirectory] Creating logs directory...');
fs.stat(self._logsDirPath, (directoryDoesNotExist, stat) => {
if (directoryDoesNotExist) {
fs.mkdir(self._logsDirPath, self._0777, e => {
if (e) {
const msg = `\t--[DirectoriesManager.createLogsDirectory] Cannot perform a migration due to impossibility to create
"logs_directory": ${ self._logsDirPath }`;
console.log(msg);
process.exit();
} else {
log(self, '\t--[DirectoriesManager.createLogsDirectory] Logs directory is created...');
resolve(self);
}
});
} else if (!stat.isDirectory()) {
console.log('\t--[DirectoriesManager.createLogsDirectory] Cannot perform a migration due to unexpected error');
process.exit();
} else {
log(self, '\t--[DirectoriesManager.createLogsDirectory] Logs directory already exists...');
resolve(self);
}
});
});
}
};
module.exports = Main;
const app = new Main();
const baseDir = path.join(__dirname, '..');
app.readConfig(baseDir)
.then(config => {
return app.readExtraConfig(config, baseDir);
})
.then(app.initializeConversion)
.then(boot)
.then(readDataTypesMap)
.then(app.createLogsDirectory)
.then(conversion => {
return (new SchemaProcessor(conversion)).createSchema();
})
.then(createStateLogsTable)
.then(createDataPoolTable)
.then(loadStructureToMigrate)
.then(readDataPool)
.then(pipeData)
.catch(error => console.log(error));

45
src/Main.ts Normal file
View file

@ -0,0 +1,45 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import * as path from 'path';
import Conversion from './Conversion';
import createSchema from './SchemaProcessor';
import loadStructureToMigrate from './StructureLoader';
import pipeData from './DataPipeManager';
import { boot } from './BootProcessor';
import { createStateLogsTable } from './MigrationStateManager';
import { createDataPoolTable, readDataPool } from './DataPoolManager';
import { readConfig, readExtraConfig, createLogsDirectory, readDataTypesMap } from './FsOps';
const baseDir: string = path.join(__dirname, '..', '..');
readConfig(baseDir)
.then(config => readExtraConfig(config, baseDir))
.then(Conversion.initializeConversion)
.then(boot)
.then(readDataTypesMap)
.then(createLogsDirectory)
.then(createSchema)
.then(createStateLogsTable)
.then(createDataPoolTable)
.then(loadStructureToMigrate)
.then(readDataPool)
.then(pipeData)
.catch(error => console.log(error));

View file

@ -18,19 +18,23 @@
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
export default class MessageToDataLoader {
/**
* Parsed Nmig's configuration object.
*/
public readonly config: any;
/**
* An array of data chunks.
*/
public readonly chunks: any[];
module.exports = class MessageToDataLoader {
/**
* Representation of a message of the master process to DataLoader process.
* Contents migration's configuration and an array of "data-chunks".
* Constructor.
*
* @param {Object} config
* @param {Array} chunks
* Contains migration's configuration and an array of "data-chunks".
*/
constructor(config, chunks) {
public constructor(config: any, chunks: any[]) {
this.config = config;
this.chunks = chunks;
}
};
}

View file

@ -18,21 +18,29 @@
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
export default class MessageToMaster {
/**
* A name of a table, to insert the data into.
*/
public readonly tableName: string;
/**
* A number of rows, that have already been inserted into given table.
*/
public rowsInserted: number;
/**
* A number of rows to insert into given table.
*/
public readonly totalRowsToInsert: number;
module.exports = class MessageToMaster {
/**
* Representation of a message of DataLoader process to the master process regarding records,
* inserted to specified table.
* Constructor.
*
* @param {String} tableName
* @param {Number} rowsInserted
* @param {Number} totalRowsToInsert
*/
constructor(tableName, rowsInserted, totalRowsToInsert) {
this.tableName = tableName;
this.rowsInserted = rowsInserted;
public constructor(tableName: string, rowsInserted: number, totalRowsToInsert: number) {
this.tableName = tableName;
this.rowsInserted = rowsInserted;
this.totalRowsToInsert = totalRowsToInsert;
}
};
}

View file

@ -1,195 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const connect = require('./Connector');
const log = require('./Logger');
const generateError = require('./ErrorGenerator');
/**
* Get state-log.
*
* @param {Conversion} self
* @param {String} param
*
* @returns {Promise}
*/
module.exports.get = (self, param) => {
return connect(self).then(() => {
return new Promise(resolve => {
self._pg.connect((error, client, done) => {
if (error) {
generateError(self, '\t--[MigrationStateManager.get] Cannot connect to PostgreSQL server...\n' + error);
resolve(false);
} else {
const sql = 'SELECT ' + param + ' FROM "' + self._schema + '"."state_logs_' + self._schema + self._mySqlDbName + '";';
client.query(sql, (err, data) => {
done();
if (err) {
generateError(self, '\t--[MigrationStateManager.get] ' + err, sql);
resolve(false);
} else {
resolve(data.rows[0][param]);
}
});
}
});
});
});
};
/**
* Update the state-log.
*
* @param {Conversion} self
* @param {String} param
*
* @returns {Promise}
*/
module.exports.set = (self, param) => {
return connect(self).then(() => {
return new Promise(resolve => {
self._pg.connect((error, client, done) => {
if (error) {
generateError(self, '\t--[MigrationStateManager.set] Cannot connect to PostgreSQL server...\n' + error);
resolve();
} else {
const sql = 'UPDATE "' + self._schema + '"."state_logs_'
+ self._schema + self._mySqlDbName + '" SET ' + param + ' = TRUE;';
client.query(sql, err => {
done();
if (err) {
generateError(self, '\t--[MigrationStateManager.set] ' + err, sql);
}
resolve();
});
}
});
});
});
};
/**
* Create the "{schema}"."state_logs_{self._schema + self._mySqlDbName} temporary table."
*
* @param {Conversion} self
*
* @returns {Promise}
*/
module.exports.createStateLogsTable = self => {
return connect(self).then(() => {
return new Promise(resolve => {
self._pg.connect((error, client, done) => {
if (error) {
generateError(self, '\t--[createStateLogsTable] Cannot connect to PostgreSQL server...\n' + error);
process.exit();
} else {
let sql = 'CREATE TABLE IF NOT EXISTS "' + self._schema + '"."state_logs_' + self._schema + self._mySqlDbName
+ '"('
+ '"tables_loaded" BOOLEAN,'
+ '"per_table_constraints_loaded" BOOLEAN,'
+ '"foreign_keys_loaded" BOOLEAN,'
+ '"views_loaded" BOOLEAN'
+ ');';
client.query(sql, err => {
if (err) {
done();
generateError(self, '\t--[createStateLogsTable] ' + err, sql);
process.exit();
} else {
sql = 'SELECT COUNT(1) AS cnt FROM "' + self._schema + '"."state_logs_' + self._schema + self._mySqlDbName + '";';
client.query(sql, (errorCount, result) => {
if (errorCount) {
done();
generateError(self, '\t--[createStateLogsTable] ' + errorCount, sql);
process.exit();
} else if (+result.rows[0].cnt === 0) {
sql = 'INSERT INTO "' + self._schema + '"."state_logs_' + self._schema + self._mySqlDbName
+ '" VALUES(FALSE, FALSE, FALSE, FALSE);';
client.query(sql, errorInsert => {
done();
if (errorInsert) {
generateError(self, '\t--[createStateLogsTable] ' + errorInsert, sql);
process.exit();
} else {
const msg = '\t--[createStateLogsTable] table "' + self._schema + '"."state_logs_'
+ self._schema + self._mySqlDbName + '" is created...';
log(self, msg);
resolve(self);
}
});
} else {
const msg2 = '\t--[createStateLogsTable] table "' + self._schema + '"."state_logs_'
+ self._schema + self._mySqlDbName + '" is created...';
log(self, msg2);
resolve(self);
}
});
}
});
}
});
});
});
};
/**
* Drop the "{schema}"."state_logs_{self._schema + self._mySqlDbName} temporary table."
*
* @param {Conversion} self
*
* @returns {Promise}
*/
module.exports.dropStateLogsTable = self => {
return connect(self).then(() => {
return new Promise(resolve => {
self._pg.connect((error, client, done) => {
if (error) {
generateError(self, '\t--[dropStateLogsTable] Cannot connect to PostgreSQL server...\n' + error);
resolve();
} else {
const sql = 'DROP TABLE "' + self._schema + '"."state_logs_' + self._schema + self._mySqlDbName + '";';
client.query(sql, err => {
done();
if (err) {
generateError(self, '\t--[dropStateLogsTable] ' + err, sql);
} else {
log(self, '\t--[dropStateLogsTable] table "' + self._schema + '"."state_logs_' + self._schema + self._mySqlDbName + '" is dropped...');
}
resolve();
});
}
});
});
});
};

View file

@ -0,0 +1,78 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import DBAccess from './DBAccess';
import DBAccessQueryResult from './DBAccessQueryResult';
import DBVendors from './DBVendors';
import { log } from './FsOps';
import Conversion from './Conversion';
/**
* Retrieves state-log.
*/
export async function get(conversion: Conversion, param: string): Promise<boolean> {
const dbAccess: DBAccess = new DBAccess(conversion);
const sql: string = `SELECT ${ param } FROM "${ conversion._schema }"."state_logs_${ conversion._schema }${ conversion._mySqlDbName }";`;
const result: DBAccessQueryResult = await dbAccess.query('MigrationStateManager::get', sql, DBVendors.PG, true, false);
return result.data.rows[0][param];
}
/**
* Updates the state-log.
*/
export async function set(conversion: Conversion, param: string): Promise<void> {
const dbAccess: DBAccess = new DBAccess(conversion);
const sql: string = `UPDATE "${ conversion._schema }"."state_logs_${ conversion._schema }${ conversion._mySqlDbName }" SET ${ param } = TRUE;`;
await dbAccess.query('MigrationStateManager::set', sql, DBVendors.PG, true, false);
}
/**
* Creates the "{schema}"."state_logs_{self._schema + self._mySqlDbName}" temporary table.
*/
export async function createStateLogsTable(conversion: Conversion): Promise<Conversion> {
const dbAccess: DBAccess = new DBAccess(conversion);
let sql: string = `CREATE TABLE IF NOT EXISTS "${ conversion._schema }"."state_logs_${ conversion._schema }${ conversion._mySqlDbName }"(
"tables_loaded" BOOLEAN, "per_table_constraints_loaded" BOOLEAN, "foreign_keys_loaded" BOOLEAN, "views_loaded" BOOLEAN);`;
let result: DBAccessQueryResult = await dbAccess.query('MigrationStateManager::createStateLogsTable', sql, DBVendors.PG, true, true);
sql = `SELECT COUNT(1) AS cnt FROM "${ conversion._schema }"."state_logs_${ conversion._schema }${ conversion._mySqlDbName }";`;
result = await dbAccess.query('MigrationStateManager::createStateLogsTable', sql, DBVendors.PG, true, true, result.client);
if (+result.data.rows[0].cnt === 0) {
sql = `INSERT INTO "${ conversion._schema }"."state_logs_${ conversion._schema }${ conversion._mySqlDbName }" VALUES (FALSE, FALSE, FALSE, FALSE);`;
await await dbAccess.query('MigrationStateManager::createStateLogsTable', sql, DBVendors.PG, true, false, result.client);
return conversion;
}
const msg: string = '\t--[MigrationStateManager::createStateLogsTable] table ' +
'"${ conversion._schema }"."state_logs_${ conversion._schema }${ conversion._mySqlDbName }" is created...';
log(conversion, msg);
return conversion;
}
/**
* Drop the "{schema}"."state_logs_{self._schema + self._mySqlDbName}" temporary table.
*/
export async function dropStateLogsTable(conversion: Conversion): Promise<void> {
const dbAccess: DBAccess = new DBAccess(conversion);
const sql: string = `DROP TABLE "${ conversion._schema }"."state_logs_${ conversion._schema }${ conversion._mySqlDbName }";`;
await dbAccess.query('MigrationStateManager::dropStateLogsTable', sql, DBVendors.PG, false, false);
}

View file

@ -1,91 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const connect = require('./Connector');
const log = require('./Logger');
const generateError = require('./ErrorGenerator');
const extraConfigProcessor = require('./ExtraConfigProcessor');
/**
* Define which columns of the given table can contain the "NULL" value.
* Set an appropriate constraint, if need.
*
* @param {Conversion} self
* @param {String} tableName
*
* @returns {Promise}
*/
module.exports = (self, tableName) => {
return connect(self).then(() => {
return new Promise(resolve => {
log(self, '\t--[processNull] Defines "NOT NULLs" for table: "' + self._schema + '"."' + tableName + '"', self._dicTables[tableName].tableLogPath);
const processNullPromises = [];
const originalTableName = extraConfigProcessor.getTableName(self, tableName, true);
for (let i = 0; i < self._dicTables[tableName].arrTableColumns.length; ++i) {
if (self._dicTables[tableName].arrTableColumns[i].Null.toLowerCase() === 'no') {
processNullPromises.push(
new Promise(resolveProcessNull => {
self._pg.connect((error, client, done) => {
if (error) {
const msg = '\t--[processNull] Cannot connect to PostgreSQL server...\n' + error;
generateError(self, msg);
resolveProcessNull();
} else {
const columnName = extraConfigProcessor.getColumnName(
self,
originalTableName,
self._dicTables[tableName].arrTableColumns[i].Field,
false
);
const sql = 'ALTER TABLE "' + self._schema + '"."' + tableName
+ '" ALTER COLUMN "' + columnName + '" SET NOT NULL;';
client.query(sql, err => {
done();
if (err) {
const msg2 = '\t--[processNull] Error while setting NOT NULL for "' + self._schema + '"."'
+ tableName + '"."' + columnName + '"...\n' + err;
generateError(self, msg2, sql);
resolveProcessNull();
} else {
const success = '\t--[processNull] Set NOT NULL for "' + self._schema + '"."' + tableName
+ '"."' + columnName + '"...';
log(self, success, self._dicTables[tableName].tableLogPath);
resolveProcessNull();
}
});
}
});
})
);
}
}
Promise.all(processNullPromises).then(() => resolve());
});
});
};

52
src/NullProcessor.ts Normal file
View file

@ -0,0 +1,52 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import { log } from './FsOps';
import Conversion from './Conversion';
import DBAccess from './DBAccess';
import DBVendors from './DBVendors';
import DBAccessQueryResult from './DBAccessQueryResult';
import * as extraConfigProcessor from './ExtraConfigProcessor';
/**
* Defines which columns of the given table can contain the "NULL" value.
* Sets an appropriate constraint, if need.
*/
export default async function(conversion: Conversion, tableName: string): Promise<void> {
const dbAccess: DBAccess = new DBAccess(conversion);
const msg: string = `\t--[NullConstraintsProcessor] Defines "NOT NULLs" for table: "${ conversion._schema }"."${ tableName }"`;
log(conversion, msg, conversion._dicTables[tableName].tableLogPath);
const originalTableName: string = extraConfigProcessor.getTableName(conversion, tableName, true);
const promises: Promise<void>[] = conversion._dicTables[tableName].arrTableColumns.map(async (column: any) => {
if (column.Null.toLowerCase() === 'no') {
const columnName: string = extraConfigProcessor.getColumnName(conversion, originalTableName, column.Field, false);
const sql: string = `ALTER TABLE "${ conversion._schema }"."${ tableName }" ALTER COLUMN "${ columnName }" SET NOT NULL;`;
const result: DBAccessQueryResult = await dbAccess.query('NullConstraintsProcessor', sql, DBVendors.PG, false, false);
if (!result.error) {
const successMsg: string = `\t--[NullConstraintsProcessor] Set NOT NULL for "${ conversion._schema }"."${ tableName }"."${ columnName }"...`;
log(conversion, successMsg, conversion._dicTables[tableName].tableLogPath);
}
}
});
await Promise.all(promises);
}

View file

@ -1,54 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const log = require('./Logger');
/**
* Generates a summary report.
*
* @param {Conversion} self
* @param {String} endMsg
*
* @returns {undefined}
*/
module.exports = (self, endMsg) => {
let differenceSec = ((new Date()) - self._timeBegin) / 1000;
let seconds = Math.floor(differenceSec % 60);
differenceSec = differenceSec / 60;
let minutes = Math.floor(differenceSec % 60);
let hours = Math.floor(differenceSec / 60);
hours = hours < 10 ? '0' + hours : hours;
minutes = minutes < 10 ? '0' + minutes : minutes;
seconds = seconds < 10 ? '0' + seconds : seconds;
const output = '\t--[generateReport] ' + endMsg
+ '\n\t--[generateReport] Total time: ' + hours + ':' + minutes + ':' + seconds
+ '\n\t--[generateReport] (hours:minutes:seconds)';
log(self, output);
if (self._runsInTestMode) {
self._eventEmitter.emit(self._migrationCompletedEvent);
return;
}
process.exit();
};

49
src/ReportGenerator.ts Normal file
View file

@ -0,0 +1,49 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import { log } from './FsOps';
import Conversion from './Conversion';
import { EventEmitter } from 'events';
/**
* Generates a summary report.
*/
export default (conversion: Conversion, endMsg: string): void => {
let differenceSec: number = ((new Date()).getTime() - conversion._timeBegin.getTime()) / 1000;
const seconds: number = Math.floor(differenceSec % 60);
differenceSec = differenceSec / 60;
const minutes: number = Math.floor(differenceSec % 60);
const hours: number = Math.floor(differenceSec / 60);
const formattedHours: string = hours < 10 ? `0${ hours }` : `${ hours }`;
const formattedMinutes: string = minutes < 10 ? `0${ minutes }` : `${ minutes }`;
const formattedSeconds: string = seconds < 10 ? `0${ seconds }` : `${ seconds }`;
const output: string = `\t--[generateReport] ${ endMsg }
\n\t--[generateReport] Total time: ${ formattedHours }:${ formattedMinutes }:${ formattedSeconds }
\n\t--[generateReport] (hours:minutes:seconds)`;
log(conversion, output);
if (conversion._runsInTestMode) {
(<EventEmitter>conversion._eventEmitter).emit(conversion._migrationCompletedEvent);
return;
}
process.exit();
}

View file

@ -1,63 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const ConnectionEmitter = require('./ConnectionEmitter');
const generateError = require('./ErrorGenerator');
module.exports = class SchemaProcessor {
/**
* SchemaProcessor constructor.
*
* @param {Conversion} conversion
*/
constructor(conversion) {
this._conversion = conversion;
this._connectionEmitter = new ConnectionEmitter(this._conversion);
}
/**
* Create a new database schema if it does not exist yet.
*
* @returns {Promise<Conversion>}
*/
async createSchema() {
const client = await this._connectionEmitter.getPgClient();
let sql = `SELECT schema_name FROM information_schema.schemata WHERE schema_name = '${ this._conversion._schema }';`;
try {
const result = await client.query(sql);
if (result.rows.length === 0) {
sql = `CREATE SCHEMA "${ this._conversion._schema }";`;
await client.query(sql);
}
this._connectionEmitter.releasePgClient(client);
return Promise.resolve(this._conversion);
} catch (err) {
generateError(this._conversion, `\t--[createSchema] ${ err }`, sql);
process.exit();
}
}
};

41
src/SchemaProcessor.ts Normal file
View file

@ -0,0 +1,41 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import Conversion from './Conversion';
import DBAccess from './DBAccess';
import DBAccessQueryResult from './DBAccessQueryResult';
import DBVendors from './DBVendors';
/**
* Creates a new PostgreSQL schema if it does not exist yet.
*/
export default async function(conversion: Conversion): Promise<Conversion> {
const logTitle: string = 'SchemaProcessor::createSchema';
let sql: string = `SELECT schema_name FROM information_schema.schemata WHERE schema_name = '${ conversion._schema }';`;
const dbAccess: DBAccess = new DBAccess(conversion);
const result: DBAccessQueryResult = await dbAccess.query(logTitle, sql, DBVendors.PG, true, true);
if (result.data.rows.length === 0) {
sql = `CREATE SCHEMA "${ conversion._schema }";`;
await dbAccess.query(logTitle, sql, DBVendors.PG, true, false, result.client);
}
return conversion;
}

View file

@ -1,199 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const connect = require('./Connector');
const log = require('./Logger');
const generateError = require('./ErrorGenerator');
const extraConfigProcessor = require('./ExtraConfigProcessor');
/**
* Set sequence value.
*
* @param {Conversion} self
* @param {String} tableName
*
* @returns {Promise}
*/
module.exports.setSequenceValue = (self, tableName) => {
return connect(self).then(() => {
return new Promise(resolve => {
let hasAutoIncrementColumnFound = false;
const originalTableName = extraConfigProcessor.getTableName(self, tableName, true);
for (let i = 0; i < self._dicTables[tableName].arrTableColumns.length; ++i) {
if (self._dicTables[tableName].arrTableColumns[i].Extra === 'auto_increment') {
hasAutoIncrementColumnFound = true;
self._pg.connect((error, client, done) => {
if (error) {
const msg = '\t--[setSequenceValue] Cannot connect to PostgreSQL server...\n' + error;
generateError(self, msg);
resolve();
} else {
const columnName = extraConfigProcessor.getColumnName(
self,
originalTableName,
self._dicTables[tableName].arrTableColumns[i].Field,
false
);
const seqName = tableName + '_' + columnName + '_seq';
const sql = 'SELECT SETVAL(\'"' + self._schema + '"."' + seqName + '"\', '
+ '(SELECT MAX("' + columnName + '") FROM "'
+ self._schema + '"."' + tableName + '"));';
client.query(sql, err => {
done();
if (err) {
const errMsg = '\t--[setSequenceValue] Failed to set max-value of "' + self._schema + '"."'
+ tableName + '"."' + columnName + '" '
+ 'as the "NEXTVAL of "' + self._schema + '"."' + seqName + '"...';
generateError(self, errMsg, sql);
resolve();
} else {
const success = '\t--[setSequenceValue] Sequence "' + self._schema + '"."' + seqName + '" is created...';
log(self, success, self._dicTables[tableName].tableLogPath);
resolve();
}
});
}
});
break; // The AUTO_INCREMENTed column was just processed.
}
}
if (!hasAutoIncrementColumnFound) {
resolve();
}
});
});
}
/**
* Define which column in given table has the "auto_increment" attribute.
* Create an appropriate sequence.
*
* @param {Conversion} self
* @param {String} tableName
*
* @returns {Promise}
*/
module.exports.createSequence = (self, tableName) => {
return connect(self).then(() => {
return new Promise(resolve => {
const createSequencePromises = [];
const originalTableName = extraConfigProcessor.getTableName(self, tableName, true);
for (let i = 0; i < self._dicTables[tableName].arrTableColumns.length; ++i) {
if (self._dicTables[tableName].arrTableColumns[i].Extra === 'auto_increment') {
createSequencePromises.push(
new Promise(resolveCreateSequence => {
const columnName = extraConfigProcessor.getColumnName(
self,
originalTableName,
self._dicTables[tableName].arrTableColumns[i].Field,
false
);
const seqName = tableName + '_' + columnName + '_seq';
log(self, '\t--[createSequence] Trying to create sequence : "' + self._schema + '"."' + seqName + '"', self._dicTables[tableName].tableLogPath);
self._pg.connect((error, client, done) => {
if (error) {
const msg = '\t--[createSequence] Cannot connect to PostgreSQL server...\n' + error;
generateError(self, msg);
resolveCreateSequence();
} else {
let sql = 'CREATE SEQUENCE "' + self._schema + '"."' + seqName + '";';
client.query(sql, err => {
if (err) {
done();
const errMsg = '\t--[createSequence] Failed to create sequence "' + self._schema + '"."' + seqName + '"';
generateError(self, errMsg, sql);
resolveCreateSequence();
} else {
sql = 'ALTER TABLE "' + self._schema + '"."' + tableName + '" '
+ 'ALTER COLUMN "' + columnName + '" '
+ 'SET DEFAULT NEXTVAL(\'"' + self._schema + '"."' + seqName + '"\');';
client.query(sql, err2 => {
if (err2) {
done();
const err2Msg = '\t--[createSequence] Failed to set default value for "' + self._schema + '"."'
+ tableName + '"."' + columnName + '"...'
+ '\n\t--[createSequence] Note: sequence "' + self._schema + '"."' + seqName + '" was created...';
generateError(self, err2Msg, sql);
resolveCreateSequence();
} else {
sql = 'ALTER SEQUENCE "' + self._schema + '"."' + seqName + '" '
+ 'OWNED BY "' + self._schema + '"."' + tableName + '"."' + columnName + '";';
client.query(sql, err3 => {
if (err3) {
done();
const err3Msg = '\t--[createSequence] Failed to relate sequence "' + self._schema + '"."' + seqName + '" to '
+ '"' + self._schema + '"."' + tableName + '"."' + columnName + '"...';
generateError(self, err3Msg, sql);
resolveCreateSequence();
} else {
sql = 'SELECT SETVAL(\'"' + self._schema + '"."' + seqName + '"\', '
+ '(SELECT MAX("' + columnName + '") FROM "'
+ self._schema + '"."' + tableName + '"));';
client.query(sql, err4 => {
done();
if (err4) {
const err4Msg = '\t--[createSequence] Failed to set max-value of "' + self._schema + '"."'
+ tableName + '"."' + columnName + '" '
+ 'as the "NEXTVAL of "' + self._schema + '"."' + seqName + '"...';
generateError(self, err4Msg, sql);
resolveCreateSequence();
} else {
const success = '\t--[createSequence] Sequence "' + self._schema + '"."' + seqName + '" is created...';
log(self, success, self._dicTables[tableName].tableLogPath);
resolveCreateSequence();
}
});
}
});
}
});
}
});
}
});
})
);
break; // The AUTO_INCREMENTed column was just processed.
}
}
Promise.all(createSequencePromises).then(() => resolve());
});
});
};

108
src/SequencesProcessor.ts Normal file
View file

@ -0,0 +1,108 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import { log } from './FsOps';
import Conversion from './Conversion';
import DBAccess from './DBAccess';
import DBVendors from './DBVendors';
import DBAccessQueryResult from './DBAccessQueryResult';
import { PoolClient } from 'pg';
import * as extraConfigProcessor from './ExtraConfigProcessor';
/**
* Sets sequence value.
*/
export async function setSequenceValue(conversion: Conversion, tableName: string): Promise<void> {
const originalTableName: string = extraConfigProcessor.getTableName(conversion, tableName, true);
const autoIncrementedColumn: any = conversion._dicTables[tableName].arrTableColumns.find((column: any) => column.Extra === 'auto_increment');
if (!autoIncrementedColumn) {
// No auto-incremented column found.
return;
}
const dbAccess: DBAccess = new DBAccess(conversion);
const columnName: string = extraConfigProcessor.getColumnName(conversion, originalTableName, autoIncrementedColumn.Field, false);
const seqName: string = `${ tableName }_${ columnName }_seq`;
const sql: string = `SELECT SETVAL(\'"${ conversion._schema }"."${ seqName }"\',
(SELECT MAX("${ columnName }") FROM "${ conversion._schema }"."${ tableName }"));`;
const result: DBAccessQueryResult = await dbAccess.query('SequencesProcessor::setSequenceValue', sql, DBVendors.PG, false, false);
if (!result.error) {
const successMsg: string = `\t--[setSequenceValue] Sequence "${ conversion._schema }"."${ seqName }" is created...`;
log(conversion, successMsg, conversion._dicTables[tableName].tableLogPath);
}
}
/**
* Defines which column in given table has the "auto_increment" attribute.
* Creates an appropriate sequence.
*/
export async function createSequence(conversion: Conversion, tableName: string): Promise<void> {
const originalTableName: string = extraConfigProcessor.getTableName(conversion, tableName, true);
const autoIncrementedColumn: any = conversion._dicTables[tableName].arrTableColumns.find((column: any) => column.Extra === 'auto_increment');
if (!autoIncrementedColumn) {
// No auto-incremented column found.
return;
}
const columnName: string = extraConfigProcessor.getColumnName(conversion, originalTableName, autoIncrementedColumn.Field, false);
const logTitle: string = 'SequencesProcessor::createSequence';
const dbAccess: DBAccess = new DBAccess(conversion);
const seqName: string = `${ tableName }_${ columnName }_seq`;
const sqlCreateSequence: string = `CREATE SEQUENCE "${ conversion._schema }"."${ seqName }";`;
const createSequenceResult: DBAccessQueryResult = await dbAccess.query(logTitle, sqlCreateSequence, DBVendors.PG, false, true);
if (createSequenceResult.error) {
dbAccess.releaseDbClient(<PoolClient>createSequenceResult.client);
return;
}
const sqlSetNextVal: string = `ALTER TABLE "${ conversion._schema }"."${ tableName }" ALTER COLUMN "${ columnName }"
SET DEFAULT NEXTVAL('${ conversion._schema }.${ seqName }');`;
const setNextValResult: DBAccessQueryResult = await dbAccess.query(logTitle, sqlSetNextVal, DBVendors.PG, false, true, createSequenceResult.client);
if (setNextValResult.error) {
dbAccess.releaseDbClient(<PoolClient>setNextValResult.client);
return;
}
const sqlSetSequenceOwner: string = `ALTER SEQUENCE "${ conversion._schema }"."${ seqName }" OWNED BY "${ conversion._schema }"."${ tableName }"."${ columnName }";`;
const setSequenceOwnerResult: DBAccessQueryResult = await dbAccess.query(logTitle, sqlSetSequenceOwner, DBVendors.PG, false, true, setNextValResult.client);
if (setSequenceOwnerResult.error) {
dbAccess.releaseDbClient(<PoolClient>setSequenceOwnerResult.client);
return;
}
const sqlSetSequenceValue: string = `SELECT SETVAL(\'"${ conversion._schema }"."${ seqName }"\', (SELECT MAX("${ columnName }") FROM "${ conversion._schema }"."${ tableName }"));`;
const sqlSetSequenceValueResult: DBAccessQueryResult = await dbAccess.query(logTitle, sqlSetSequenceValue, DBVendors.PG, false, false, setSequenceOwnerResult.client);
if (!sqlSetSequenceValueResult.error) {
const successMsg: string = `\t--[${ logTitle }] Sequence "${ conversion._schema }"."${ seqName }" is created...`;
log(conversion, successMsg, conversion._dicTables[tableName].tableLogPath);
}
}

View file

@ -1,152 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const Table = require('./Classes/Table');
const { createTable } = require('./TableProcessor');
const connect = require('./Connector');
const log = require('./Logger');
const generateError = require('./ErrorGenerator');
const prepareDataChunks = require('./DataChunksProcessor');
const migrationStateManager = require('./MigrationStateManager');
const extraConfigProcessor = require('./ExtraConfigProcessor');
/**
* Processes current table before data loading.
*
* @param {Conversion} self
* @param {String} tableName
* @param {Boolean} stateLog
*
* @returns {Promise}
*/
const processTableBeforeDataLoading = (self, tableName, stateLog) => {
return connect(self).then(() => {
return createTable(self, tableName);
}).then(() => {
return prepareDataChunks(self, tableName, stateLog);
}).catch(() => {
generateError(self, '\t--[processTableBeforeDataLoading] Cannot create table "' + self._schema + '"."' + tableName + '"...');
});
}
/**
* Get the MySQL version.
*
* @param {Conversion} self
*
* @returns {Promise}
*/
const getMySqlVersion = self => {
return connect(self).then(() => {
return new Promise(resolve => {
self._mysql.getConnection((error, connection) => {
if (error) {
// The connection is undefined.
generateError(self, '\t--[getMySqlVersion] Cannot connect to MySQL server...\n' + error);
resolve();
} else {
const sql = 'SELECT VERSION() AS mysql_version;';
connection.query(sql, (err, rows) => {
connection.release();
if (err) {
generateError(self, '\t--[getMySqlVersion] ' + err, sql);
resolve();
} else {
const arrVersion = rows[0].mysql_version.split('.');
const majorVersion = arrVersion[0];
const minorVersion = arrVersion.slice(1).join('');
self._mysqlVersion = +(majorVersion + '.' + minorVersion);
resolve();
}
});
}
});
});
});
}
/**
* Load source tables and views, that need to be migrated.
*
* @param {Conversion} self
*
* @returns {Promise}
*/
module.exports = self => {
return getMySqlVersion(self).then(() => {
return migrationStateManager.get(self, 'tables_loaded').then(haveTablesLoaded => {
return new Promise(resolve => {
self._mysql.getConnection((error, connection) => {
if (error) {
// The connection is undefined.
generateError(self, '\t--[loadStructureToMigrate] Cannot connect to MySQL server...\n' + error);
process.exit();
} else {
const sql = 'SHOW FULL TABLES IN `' + self._mySqlDbName + '`;';
connection.query(sql, (strErr, rows) => {
connection.release();
if (strErr) {
generateError(self, '\t--[loadStructureToMigrate] ' + strErr, sql);
process.exit();
} else {
let tablesCnt = 0;
let viewsCnt = 0;
const processTablePromises = [];
for (let i = 0; i < rows.length; ++i) {
let relationName = rows[i]['Tables_in_' + self._mySqlDbName];
if (rows[i].Table_type === 'BASE TABLE' && self._excludeTables.indexOf(relationName) === -1) {
relationName = extraConfigProcessor.getTableName(self, relationName, false);
self._tablesToMigrate.push(relationName);
self._dicTables[relationName] = new Table(self._logsDirPath + '/' + relationName + '.log');
processTablePromises.push(processTableBeforeDataLoading(self, relationName, haveTablesLoaded));
tablesCnt++;
} else if (rows[i].Table_type === 'VIEW') {
self._viewsToMigrate.push(relationName);
viewsCnt++;
}
}
rows = null;
let message = '\t--[loadStructureToMigrate] Source DB structure is loaded...\n'
+ '\t--[loadStructureToMigrate] Tables to migrate: ' + tablesCnt + '\n'
+ '\t--[loadStructureToMigrate] Views to migrate: ' + viewsCnt;
log(self, message);
Promise.all(processTablePromises).then(
() => {
migrationStateManager.set(self, 'tables_loaded').then(() => resolve(self));
},
() => process.exit()
);
}
});
}
});
});
});
});
};

94
src/StructureLoader.ts Normal file
View file

@ -0,0 +1,94 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import DBAccess from './DBAccess';
import DBAccessQueryResult from './DBAccessQueryResult';
import DBVendors from './DBVendors';
import { log } from './FsOps';
import Conversion from './Conversion';
import Table from './Table';
import { createTable } from './TableProcessor';
import prepareDataChunks from './DataChunksProcessor';
import * as migrationStateManager from './MigrationStateManager';
import * as extraConfigProcessor from './ExtraConfigProcessor';
/**
* Processes current table before data loading.
*/
async function processTableBeforeDataLoading(conversion: Conversion, tableName: string, stateLog: boolean): Promise<void> {
await createTable(conversion, tableName);
await prepareDataChunks(conversion, tableName, stateLog);
}
/**
* Retrieves the source db (MySQL) version.
*/
async function getMySqlVersion(conversion: Conversion): Promise<void> {
const dbAccess: DBAccess = new DBAccess(conversion);
const sql: string = 'SELECT VERSION() AS mysql_version;';
const result: DBAccessQueryResult = await dbAccess.query('StructureLoader::getMySqlVersion', sql, DBVendors.MYSQL, false, false);
if (result.error) {
return;
}
const arrVersion: string[] = result.data[0].mysql_version.split('.');
const majorVersion: string = arrVersion[0];
const minorVersion: string = arrVersion.slice(1).join('');
conversion._mysqlVersion = +(`${ majorVersion }.${ minorVersion }`);
}
/**
* Loads source tables and views, that need to be migrated.
*/
export default async (conversion: Conversion): Promise<Conversion> => {
await getMySqlVersion(conversion);
const dbAccess: DBAccess = new DBAccess(conversion);
const haveTablesLoaded: boolean = await migrationStateManager.get(conversion, 'tables_loaded');
const sql: string = `SHOW FULL TABLES IN \`${ conversion._mySqlDbName }\`;`;
const result: DBAccessQueryResult = await dbAccess.query('StructureLoader::default', sql, DBVendors.MYSQL, true, false);
let tablesCnt: number = 0;
let viewsCnt: number = 0;
const processTablePromises: Promise<void>[] = [];
result.data.forEach((row: any) => {
let relationName: string = row[`Tables_in_${ conversion._mySqlDbName }`];
if (row.Table_type === 'BASE TABLE' && conversion._excludeTables.indexOf(relationName) === -1) {
relationName = extraConfigProcessor.getTableName(conversion, relationName, false);
conversion._tablesToMigrate.push(relationName);
conversion._dicTables[relationName] = new Table(`${ conversion._logsDirPath }/${ relationName }.log`);
processTablePromises.push(processTableBeforeDataLoading(conversion, relationName, haveTablesLoaded));
tablesCnt++;
} else if (row.Table_type === 'VIEW') {
conversion._viewsToMigrate.push(relationName);
viewsCnt++;
}
});
const message: string = `\t--[loadStructureToMigrate] Source DB structure is loaded...\n
\t--[loadStructureToMigrate] Tables to migrate: ${ tablesCnt }\n
\t--[loadStructureToMigrate] Views to migrate: ${ viewsCnt }`;
log(conversion, message);
await Promise.all(processTablePromises);
await migrationStateManager.set(conversion, 'tables_loaded');
return conversion;
}

45
src/Table.ts Normal file
View file

@ -0,0 +1,45 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
export default class Table {
/**
* The path to current table's log file.
*/
public readonly tableLogPath: string;
/**
* Representation of given table's columns metadata.
*/
public readonly arrTableColumns: any[];
/**
* Total rows inserted into given table.
*/
public totalRowsInserted: number;
/**
* Constructor.
*/
public constructor(tableLogPath: string) {
this.tableLogPath = tableLogPath;
this.arrTableColumns = [];
this.totalRowsInserted = 0;
}
}

View file

@ -1,155 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const connect = require('./Connector');
const log = require('./Logger');
const generateError = require('./ErrorGenerator');
const extraConfigProcessor = require('./ExtraConfigProcessor');
/**
* Converts MySQL data types to corresponding PostgreSQL data types.
* This conversion performs in accordance to mapping rules in './config/data_types_map.json'.
* './config/data_types_map.json' can be customized.
*
* @param {Object} objDataTypesMap
* @param {String} mySqlDataType
*
* @returns {String}
*/
const mapDataTypes = (objDataTypesMap, mySqlDataType) => {
let retVal = '';
let arrDataTypeDetails = mySqlDataType.split(' ');
mySqlDataType = arrDataTypeDetails[0].toLowerCase();
const increaseOriginalSize = arrDataTypeDetails.indexOf('unsigned') !== -1 || arrDataTypeDetails.indexOf('zerofill') !== -1;
arrDataTypeDetails = null;
if (mySqlDataType.indexOf('(') === -1) {
// No parentheses detected.
retVal = increaseOriginalSize ? objDataTypesMap[mySqlDataType].increased_size : objDataTypesMap[mySqlDataType].type;
} else {
// Parentheses detected.
let arrDataType = mySqlDataType.split('(');
const strDataType = arrDataType[0].toLowerCase();
const strDataTypeDisplayWidth = arrDataType[1];
arrDataType = null;
if ('enum' === strDataType || 'set' === strDataType) {
retVal = 'character varying(255)';
} else if ('decimal' === strDataType || 'numeric' === strDataType) {
retVal = objDataTypesMap[strDataType].type + '(' + strDataTypeDisplayWidth;
} else if ('decimal(19,2)' === mySqlDataType || objDataTypesMap[strDataType].mySqlVarLenPgSqlFixedLen) {
// Should be converted without a length definition.
retVal = increaseOriginalSize
? objDataTypesMap[strDataType].increased_size
: objDataTypesMap[strDataType].type;
} else {
// Should be converted with a length definition.
retVal = increaseOriginalSize
? objDataTypesMap[strDataType].increased_size + '(' + strDataTypeDisplayWidth
: objDataTypesMap[strDataType].type + '(' + strDataTypeDisplayWidth;
}
}
// Prevent incompatible length (CHARACTER(0) or CHARACTER VARYING(0)).
if (retVal === 'character(0)') {
retVal = 'character(1)';
} else if (retVal === 'character varying(0)') {
retVal = 'character varying(1)';
}
return retVal;
}
module.exports.mapDataTypes = mapDataTypes;
/**
* Migrates structure of a single table to PostgreSql server.
*
* @param {Conversion} self
* @param {String} tableName
*
* @returns {Promise}
*/
module.exports.createTable = (self, tableName) => {
return connect(self).then(() => {
return new Promise((resolveCreateTable, rejectCreateTable) => {
log(self, '\t--[createTable] Currently creating table: `' + tableName + '`', self._dicTables[tableName].tableLogPath);
self._mysql.getConnection((error, connection) => {
if (error) {
// The connection is undefined.
generateError(self, '\t--[createTable] Cannot connect to MySQL server...\n' + error);
rejectCreateTable();
} else {
const originalTableName = extraConfigProcessor.getTableName(self, tableName, true);
let sql = 'SHOW FULL COLUMNS FROM `' + originalTableName + '`;';
connection.query(sql, (err, rows) => {
connection.release();
if (err) {
generateError(self, '\t--[createTable] ' + err, sql);
rejectCreateTable();
} else {
self._dicTables[tableName].arrTableColumns = rows;
if (self._migrateOnlyData) {
return resolveCreateTable();
}
self._pg.connect((error, client, done) => {
if (error) {
generateError(self, '\t--[createTable] Cannot connect to PostgreSQL server...\n' + error, sql);
rejectCreateTable();
} else {
sql = 'CREATE TABLE IF NOT EXISTS "' + self._schema + '"."' + tableName + '"(';
for (let i = 0; i < rows.length; ++i) {
sql += '"' + extraConfigProcessor.getColumnName(self, originalTableName, rows[i].Field, false)
+ '" ' + mapDataTypes(self._dataTypesMap, rows[i].Type) + ',';
}
sql += '"' + self._schema + '_' + originalTableName + '_data_chunk_id_temp" BIGINT);';
client.query(sql, err => {
done();
if (err) {
generateError(self, '\t--[createTable] ' + err, sql);
rejectCreateTable();
} else {
log(
self,
'\t--[createTable] Table "' + self._schema + '"."' + tableName + '" is created...',
self._dicTables[tableName].tableLogPath
);
resolveCreateTable();
}
});
}
});
}
});
}
});
});
});
};

120
src/TableProcessor.ts Normal file
View file

@ -0,0 +1,120 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import { log, generateError } from './FsOps';
import Conversion from './Conversion';
import DBAccess from './DBAccess';
import DBAccessQueryResult from './DBAccessQueryResult';
import DBVendors from './DBVendors';
import * as extraConfigProcessor from './ExtraConfigProcessor';
/**
* Converts MySQL data types to corresponding PostgreSQL data types.
* This conversion performs in accordance to mapping rules in './config/data_types_map.json'.
* './config/data_types_map.json' can be customized.
*/
export function mapDataTypes(objDataTypesMap: any, mySqlDataType: string): string {
let retVal: string = '';
const arrDataTypeDetails: string[] = mySqlDataType.split(' ');
mySqlDataType = arrDataTypeDetails[0].toLowerCase();
const increaseOriginalSize: boolean = arrDataTypeDetails.indexOf('unsigned') !== -1 || arrDataTypeDetails.indexOf('zerofill') !== -1;
if (mySqlDataType.indexOf('(') === -1) {
// No parentheses detected.
retVal = increaseOriginalSize ? objDataTypesMap[mySqlDataType].increased_size : objDataTypesMap[mySqlDataType].type;
} else {
// Parentheses detected.
const arrDataType: string[] = mySqlDataType.split('(');
const strDataType: string = arrDataType[0].toLowerCase();
const strDataTypeDisplayWidth: string = arrDataType[1];
if ('enum' === strDataType || 'set' === strDataType) {
retVal = 'character varying(255)';
} else if ('decimal' === strDataType || 'numeric' === strDataType) {
retVal = `${ objDataTypesMap[strDataType].type }(${ strDataTypeDisplayWidth }`;
} else if ('decimal(19,2)' === mySqlDataType || objDataTypesMap[strDataType].mySqlVarLenPgSqlFixedLen) {
// Should be converted without a length definition.
retVal = increaseOriginalSize ? objDataTypesMap[strDataType].increased_size : objDataTypesMap[strDataType].type;
} else {
// Should be converted with a length definition.
retVal = increaseOriginalSize
? `${ objDataTypesMap[strDataType].increased_size }(${ strDataTypeDisplayWidth }`
: `${ objDataTypesMap[strDataType].type }(${ strDataTypeDisplayWidth }`;
}
}
// Prevent incompatible length (CHARACTER(0) or CHARACTER VARYING(0)).
if (retVal === 'character(0)') {
retVal = 'character(1)';
} else if (retVal === 'character varying(0)') {
retVal = 'character varying(1)';
}
return retVal;
}
/**
* Migrates structure of a single table to PostgreSql server.
*/
export async function createTable(conversion: Conversion, tableName: string): Promise<void> {
const logTitle: string = 'TableProcessor::createTable';
log(conversion, `\t--[${ logTitle }] Currently creating table: \`${ tableName }\``, conversion._dicTables[tableName].tableLogPath);
const dbAccess: DBAccess = new DBAccess(conversion);
const originalTableName: string = extraConfigProcessor.getTableName(conversion, tableName, true);
const sqlShowColumns: string = `SHOW FULL COLUMNS FROM \`${ originalTableName }\`;`;
const columns: DBAccessQueryResult = await dbAccess.query(logTitle, sqlShowColumns, DBVendors.MYSQL, false, false);
if (columns.error) {
return;
}
conversion._dicTables[tableName].arrTableColumns = columns.data;
if (conversion._migrateOnlyData) {
// Although the schema is preset, the data chunk id column must be added.
// This is due to the need to enforce data consistency in case of failures.
const sqlAddDataChunkIdColumn: string = `ALTER TABLE "${ conversion._schema }"."${ tableName }"
ADD COLUMN "${ conversion._schema }_${ originalTableName }_data_chunk_id_temp" BIGINT;`;
const result: DBAccessQueryResult = await dbAccess.query(logTitle, sqlAddDataChunkIdColumn, DBVendors.PG, false, false);
if (result.error) {
await generateError(conversion, `\t--[${ logTitle }] ${ result.error }`, sqlAddDataChunkIdColumn);
}
return;
}
let sqlCreateTable: string = `CREATE TABLE IF NOT EXISTS "${ conversion._schema }"."${ tableName }"(`;
columns.data.forEach((column: any) => {
const colName: string = extraConfigProcessor.getColumnName(conversion, originalTableName, column.Field, false);
const colType: string = mapDataTypes(conversion._dataTypesMap, column.Type);
sqlCreateTable += `"${ colName }" ${ colType },`;
});
sqlCreateTable += `"${ conversion._schema }_${ originalTableName }_data_chunk_id_temp" BIGINT);`;
const createTableResult: DBAccessQueryResult = await dbAccess.query(logTitle, sqlCreateTable, DBVendors.PG, false, false);
if (!createTableResult.error) {
log(conversion, `\t--[${ logTitle }] Table "${ conversion._schema }"."${ tableName }" is created...`, conversion._dicTables[tableName].tableLogPath);
}
}

View file

@ -1,78 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const connect = require('./Connector');
const log = require('./Logger');
const generateError = require('./ErrorGenerator');
const extraConfigProcessor = require('./ExtraConfigProcessor');
/**
* Runs "vacuum full" and "analyze".
*
* @param {Conversion} self
*
* @returns {Promise}
*/
module.exports = self => {
return connect(self).then(() => {
return new Promise(resolve => {
const vacuumPromises = [];
for (let i = 0; i < self._tablesToMigrate.length; ++i) {
if (self._noVacuum.indexOf(extraConfigProcessor.getTableName(self, self._tablesToMigrate[i], true)) === -1) {
const msg = '\t--[runVacuumFullAndAnalyze] Running "VACUUM FULL and ANALYZE" query for table "'
+ self._schema + '"."' + self._tablesToMigrate[i] + '"...';
log(self, msg);
vacuumPromises.push(
new Promise(resolveVacuum => {
self._pg.connect((error, client, done) => {
if (error) {
generateError(self, '\t--[runVacuumFullAndAnalyze] Cannot connect to PostgreSQL server...');
resolveVacuum();
} else {
const sql = 'VACUUM (FULL, ANALYZE) "' + self._schema + '"."' + self._tablesToMigrate[i] + '";';
client.query(sql, err => {
done();
if (err) {
generateError(self, '\t--[runVacuumFullAndAnalyze] ' + err, sql);
resolveVacuum();
} else {
const msg2 = '\t--[runVacuumFullAndAnalyze] Table "' + self._schema
+ '"."' + self._tablesToMigrate[i] + '" is VACUUMed...';
log(self, msg2);
resolveVacuum();
}
});
}
});
})
);
}
}
Promise.all(vacuumPromises).then(() => resolve());
});
});
};

51
src/VacuumProcessor.ts Normal file
View file

@ -0,0 +1,51 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import { log } from './FsOps';
import Conversion from './Conversion';
import DBAccess from './DBAccess';
import DBVendors from './DBVendors';
import DBAccessQueryResult from './DBAccessQueryResult';
import * as extraConfigProcessor from './ExtraConfigProcessor';
/**
* Runs "vacuum full" and "analyze".
*/
export default async function(conversion: Conversion): Promise<void> {
const dbAccess: DBAccess = new DBAccess(conversion);
const vacuumPromises: Promise<void>[] = conversion._tablesToMigrate.map(async (table: string) => {
if (conversion._noVacuum.indexOf(extraConfigProcessor.getTableName(conversion, table, true)) === -1) {
const msg: string = `\t--[runVacuumFullAndAnalyze] Running "VACUUM FULL and ANALYZE" query for table
"${ conversion._schema }"."${ table }"...`;
log(conversion, msg);
const sql: string = `VACUUM (FULL, ANALYZE) "${ conversion._schema }"."${ table }";`;
const result: DBAccessQueryResult = await dbAccess.query('runVacuumFullAndAnalyze', sql, DBVendors.PG, false, false);
if (!result.error) {
const msgSuccess: string = `\t--[runVacuumFullAndAnalyze] Table "${ conversion._schema }"."${ table }" is VACUUMed...`;
log(conversion, msgSuccess);
}
}
});
await Promise.all(vacuumPromises);
}

View file

@ -1,172 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const fs = require('fs');
const path = require('path');
const log = require('./Logger');
const generateError = require('./ErrorGenerator');
const migrationStateManager = require('./MigrationStateManager');
/**
* Attempts to convert MySQL view to PostgreSQL view.
*
* @param {String} schema
* @param {String} viewName
* @param {String} mysqlViewCode
*
* @returns {String}
*/
const generateView = (schema, viewName, mysqlViewCode) => {
mysqlViewCode = mysqlViewCode.split('`').join('"');
const queryStart = mysqlViewCode.indexOf('AS');
mysqlViewCode = mysqlViewCode.slice(queryStart);
const arrMysqlViewCode = mysqlViewCode.split(' ');
for (let i = 0; i < arrMysqlViewCode.length; ++i) {
if (
arrMysqlViewCode[i].toLowerCase() === 'from'
|| arrMysqlViewCode[i].toLowerCase() === 'join'
&& i + 1 < arrMysqlViewCode.length
) {
arrMysqlViewCode[i + 1] = '"' + schema + '".' + arrMysqlViewCode[i + 1];
}
}
return 'CREATE OR REPLACE VIEW "' + schema + '"."' + viewName + '" ' + arrMysqlViewCode.join(' ') + ';';
}
/**
* Writes a log, containing a view code.
*
* @param {Conversion} self
* @param {String} viewName
* @param {String} sql
*
* @returns {undefined}
*/
const logNotCreatedView = (self, viewName, sql) => {
fs.stat(self._notCreatedViewsPath, (directoryDoesNotExist, stat) => {
if (directoryDoesNotExist) {
fs.mkdir(self._notCreatedViewsPath, self._0777, e => {
if (e) {
log(self, '\t--[logNotCreatedView] ' + e);
} else {
log(self, '\t--[logNotCreatedView] "not_created_views" directory is created...');
// "not_created_views" directory is created. Can write the log...
fs.open(path.join(self._notCreatedViewsPath, viewName + '.sql'), 'w', self._0777, (error, fd) => {
if (error) {
log(self, error);
} else {
const buffer = Buffer.from(sql, self._encoding);
fs.write(fd, buffer, 0, buffer.length, null, () => {
fs.close(fd, () => {
// Each async function MUST have a callback (according to Node.js >= 7).
});
});
}
});
}
});
} else if (!stat.isDirectory()) {
log(self, '\t--[logNotCreatedView] Cannot write the log due to unexpected error');
} else {
// "not_created_views" directory already exists. Can write the log...
fs.open(path.join(self._notCreatedViewsPath, viewName + '.sql'), 'w', self._0777, (error, fd) => {
if (error) {
log(self, error);
} else {
const buffer = Buffer.from(sql, self._encoding);
fs.write(fd, buffer, 0, buffer.length, null, () => {
fs.close(fd, () => {
// Each async function MUST have a callback (according to Node.js >= 7).
});
});
}
});
}
});
}
/**
* Attempts to convert MySQL view to PostgreSQL view.
*
* @param {Conversion} self
*
* @returns {Promise}
*/
module.exports = self => {
return migrationStateManager.get(self, 'views_loaded').then(hasViewsLoaded => {
return new Promise(resolve => {
const createViewPromises = [];
if (!hasViewsLoaded) {
for (let i = 0; i < self._viewsToMigrate.length; ++i) {
createViewPromises.push(
new Promise(resolveProcessView2 => {
self._mysql.getConnection((error, connection) => {
if (error) {
// The connection is undefined.
generateError(self, '\t--[processView] Cannot connect to MySQL server...\n' + error);
resolveProcessView2();
} else {
let sql = 'SHOW CREATE VIEW `' + self._viewsToMigrate[i] + '`;';
connection.query(sql, (strErr, rows) => {
connection.release();
if (strErr) {
generateError(self, '\t--[processView] ' + strErr, sql);
resolveProcessView2();
} else {
self._pg.connect((error, client, done) => {
if (error) {
generateError(self, '\t--[processView] Cannot connect to PostgreSQL server...');
resolveProcessView2();
} else {
sql = generateView(self._schema, self._viewsToMigrate[i], rows[0]['Create View']);
rows = null;
client.query(sql, err => {
done();
if (err) {
generateError(self, '\t--[processView] ' + err, sql);
logNotCreatedView(self, self._viewsToMigrate[i], sql);
resolveProcessView2();
} else {
log(self, '\t--[processView] View "' + self._schema + '"."' + self._viewsToMigrate[i] + '" is created...');
resolveProcessView2();
}
});
}
});
}
});
}
});
})
);
}
}
Promise.all(createViewPromises).then(() => resolve());
});
});
};

138
src/ViewGenerator.ts Normal file
View file

@ -0,0 +1,138 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import * as fs from 'fs';
import { Stats } from 'fs';
import * as path from 'path';
import { log } from './FsOps';
import Conversion from './Conversion';
import * as migrationStateManager from './MigrationStateManager';
import DBAccess from './DBAccess';
import DBVendors from './DBVendors';
import DBAccessQueryResult from './DBAccessQueryResult';
/**
* Attempts to convert MySQL view to PostgreSQL view.
*/
function generateView(schema: string, viewName: string, mysqlViewCode: string): string {
mysqlViewCode = mysqlViewCode.split('`').join('"');
const queryStart: number = mysqlViewCode.indexOf('AS');
mysqlViewCode = mysqlViewCode.slice(queryStart);
const arrMysqlViewCode: string[] = mysqlViewCode.split(' ');
arrMysqlViewCode.forEach((str: string, index: number) => {
if (str.toLowerCase() === 'from' || str.toLowerCase() === 'join' && index + 1 < arrMysqlViewCode.length) {
arrMysqlViewCode[index + 1] = `"${ schema }".${ arrMysqlViewCode[index + 1] }`;
}
});
return `CREATE OR REPLACE VIEW "${ schema }"."${ viewName }" ${ arrMysqlViewCode.join(' ') };`;
}
/**
* Writes a log, containing a view code.
*/
function logNotCreatedView(conversion: Conversion, viewName: string, sql: string): Promise<void> {
return new Promise<void>((resolve) => {
fs.stat(conversion._notCreatedViewsPath, (directoryDoesNotExist: NodeJS.ErrnoException, stat: Stats) => {
if (directoryDoesNotExist) {
fs.mkdir(conversion._notCreatedViewsPath, conversion._0777, (e: NodeJS.ErrnoException) => {
if (e) {
log(conversion, `\t--[logNotCreatedView] ${ e }`);
return resolve();
}
log(conversion, '\t--[logNotCreatedView] "not_created_views" directory is created...');
// "not_created_views" directory is created. Can write the log...
fs.open(
path.join(conversion._notCreatedViewsPath, `${ viewName }.sql`),
'w',
conversion._0777,
(error: NodeJS.ErrnoException, fd: number
) => {
if (error) {
log(conversion, error);
return resolve();
}
const buffer = Buffer.from(sql, conversion._encoding);
fs.write(fd, buffer, 0, buffer.length, null, () => {
fs.close(fd, () => {
return resolve();
});
});
});
});
} else if (!stat.isDirectory()) {
log(conversion, '\t--[logNotCreatedView] Cannot write the log due to unexpected error');
return resolve();
} else {
// "not_created_views" directory already exists. Can write the log...
const viewFilePath: string = path.join(conversion._notCreatedViewsPath, `${ viewName }.sql`);
fs.open(viewFilePath,'w', conversion._0777, (error: NodeJS.ErrnoException, fd: number) => {
if (error) {
log(conversion, error);
return resolve();
}
const buffer = Buffer.from(sql, conversion._encoding);
fs.write(fd, buffer, 0, buffer.length, null, () => {
fs.close(fd, () => {
return resolve();
});
});
});
}
});
});
}
/**
* Attempts to convert MySQL view to PostgreSQL view.
*/
export default async function(conversion: Conversion): Promise<void> {
const hasViewsLoaded: boolean = await migrationStateManager.get(conversion, 'views_loaded');
if (hasViewsLoaded) {
return;
}
const createViewPromises: Promise<void>[] = conversion._viewsToMigrate.map(async (view: string) => {
const sqlShowCreateView: string = `SHOW CREATE VIEW \`${ view }\`;`;
const logTitle: string = 'ViewGenerator';
const dbAccess: DBAccess = new DBAccess(conversion);
const showCreateViewResult: DBAccessQueryResult = await dbAccess.query(logTitle, sqlShowCreateView, DBVendors.MYSQL, false, false);
if (showCreateViewResult.error) {
return;
}
const sqlCreatePgView: string = generateView(conversion._schema, view, showCreateViewResult.data[0]['Create View']);
const createPgViewResult: DBAccessQueryResult = await dbAccess.query(logTitle, sqlCreatePgView, DBVendors.PG, false, false);
if (createPgViewResult.error) {
return logNotCreatedView(conversion, view, sqlCreatePgView);
}
log(conversion, `\t--[${ logTitle }] View "${ conversion._schema }"."${ view }" is created...`);
});
await Promise.all(createViewPromises);
}

View file

@ -18,49 +18,45 @@
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const test = require('tape');
const TestSchemaProcessor = require('./TestModules/TestSchemaProcessor');
const testSchema = require('./TestModules/SchemaProcessorTest');
const testDataContent = require('./TestModules/DataContentTest');
const testColumnTypes = require('./TestModules/ColumnTypesTest');
import * as test from 'tape';
import { EventEmitter } from 'events';
import Conversion from '../src/Conversion';
import TestSchemaProcessor from './TestModules/TestSchemaProcessor';
import testSchema from './TestModules/SchemaProcessorTest';
import testDataContent from './TestModules/DataContentTest';
import testColumnTypes from './TestModules/ColumnTypesTest';
/**
* Runs test suites.
*
* @param {TestSchemaProcessor} testSchemaProcessor
*
* @returns {Function}
*/
const runTestSuites = testSchemaProcessor => {
function runTestSuites(testSchemaProcessor: TestSchemaProcessor): () => void {
return () => {
test.onFinish(() => {
testSchemaProcessor.removeTestResources()
.then(() => process.exit());
test.onFinish(async () => {
await testSchemaProcessor.removeTestResources();
process.exit();
});
test('Test schema should be created', tapeTestSchema => {
test('Test schema should be created', (tapeTestSchema: test.Test) => {
testSchema(testSchemaProcessor, tapeTestSchema);
});
test('Test the data content', tapeTestDataContent => {
test('Test the data content', (tapeTestDataContent: test.Test) => {
testDataContent(testSchemaProcessor, tapeTestDataContent);
});
test('Test column types', tapeTestColumnTypes => {
test('Test column types', (tapeTestColumnTypes: test.Test) => {
testColumnTypes(testSchemaProcessor, tapeTestColumnTypes);
});
};
};
}
const testSchemaProcessor = new TestSchemaProcessor();
const testSchemaProcessor: TestSchemaProcessor = new TestSchemaProcessor();
testSchemaProcessor
.initializeConversion()
.then(conversion => {
.then((conversion: Conversion) => {
// Registers callback, that will be invoked when the test database arrangement will be completed.
conversion._eventEmitter.on(conversion._migrationCompletedEvent, runTestSuites(testSchemaProcessor));
(<EventEmitter>conversion._eventEmitter).on(conversion._migrationCompletedEvent, runTestSuites(testSchemaProcessor));
// Continues the test database arrangement.
return Promise.resolve(conversion);

View file

@ -1,117 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
/**
* Returns `table_a` column types.
*
* @param {TestSchemaProcessor} testSchemaProcessor
*
* @returns {Promise<Array>}
*/
const getColumnTypes = testSchemaProcessor => {
const sql = `SELECT column_name, data_type
FROM information_schema.columns
WHERE table_catalog = '${ testSchemaProcessor._conversion._targetConString.database }'
AND table_schema = '${ testSchemaProcessor._conversion._schema }'
AND table_name = 'table_a';`;
return testSchemaProcessor
.queryPg(sql)
.then(data => data.rows);
};
/**
* Returns expected column types.
*
* @returns {Object}
*/
const getExpectedColumnTypes = () => {
return {
id_test_sequence : 'bigint',
id_test_unique_index : 'integer',
id_test_composite_unique_index_1 : 'integer',
id_test_composite_unique_index_2 : 'integer',
id_test_index : 'integer',
int_test_not_null : 'integer',
id_test_composite_index_1 : 'integer',
id_test_composite_index_2 : 'integer',
json_test_comment : 'json',
bit : 'bit varying',
year : 'smallint',
tinyint_test_default : 'smallint',
smallint : 'smallint',
mediumint : 'integer',
bigint : 'bigint',
float : 'real',
double : 'double precision',
double_precision : 'double precision',
numeric : 'numeric',
decimal : 'numeric',
decimal_19_2 : 'numeric',
char_5 : 'character',
varchar_5 : 'character varying',
date : 'date',
time : 'time without time zone',
datetime : 'timestamp without time zone',
timestamp : 'timestamp without time zone',
enum : 'character varying',
set : 'character varying',
tinytext : 'text',
mediumtext : 'text',
longtext : 'text',
text : 'text',
blob : 'bytea',
longblob : 'bytea',
mediumblob : 'bytea',
tinyblob : 'bytea',
varbinary : 'bytea',
binary : 'bytea',
};
};
/**
* The data content testing.
*
* @param {TestSchemaProcessor} testSchemaProcessor
* @param {Tape} tape
*
* @returns {undefined}
*/
module.exports = (testSchemaProcessor, tape) => {
getColumnTypes(testSchemaProcessor).then(data => {
const expectedColumnTypes = getExpectedColumnTypes();
const autoTimeoutMs = 3 * 1000; // 3 seconds.
const numberOfPlannedAssertions = data.length;
tape.plan(numberOfPlannedAssertions);
tape.timeoutAfter(autoTimeoutMs);
for (let i = 0; i < numberOfPlannedAssertions; ++i) {
const columnName = data[i].column_name;
const actualColumnType = data[i].data_type;
const expectedColumnType = expectedColumnTypes[columnName];
tape.comment(`Test ${ columnName } column type`);
tape.equal(actualColumnType, expectedColumnType);
}
});
};

View file

@ -0,0 +1,121 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import TestSchemaProcessor from './TestSchemaProcessor';
import Conversion from '../../src/Conversion';
import DBAccess from '../../src/DBAccess';
import DBVendors from '../../src/DBVendors';
import DBAccessQueryResult from '../../src/DBAccessQueryResult';
import { Test } from 'tape';
/**
* Returns `table_a` column types.
*/
async function getColumnTypes(testSchemaProcessor: TestSchemaProcessor): Promise<any[]> {
const logTitle: string = 'ColumnTypesTest::getColumnTypes';
const sql: string = `SELECT column_name, data_type
FROM information_schema.columns
WHERE table_catalog = '${ (<Conversion>testSchemaProcessor.conversion)._targetConString.database }'
AND table_schema = '${ (<Conversion>testSchemaProcessor.conversion)._schema }'
AND table_name = 'table_a';`;
const result: DBAccessQueryResult = await (<DBAccess>testSchemaProcessor.dbAccess).query(
logTitle,
sql,
DBVendors.PG,
false,
false
);
if (result.error) {
await testSchemaProcessor.processFatalError(result.error);
}
return result.data.rows;
}
/**
* Returns expected column types map.
*/
function getExpectedColumnTypes(): Map<string, string> {
return new Map<string, string>([
['id_test_sequence', 'bigint'],
['id_test_unique_index', 'integer'],
['id_test_composite_unique_index_1', 'integer'],
['id_test_composite_unique_index_2', 'integer'],
['id_test_index', 'integer'],
['int_test_not_null', 'integer'],
['id_test_composite_index_1', 'integer'],
['id_test_composite_index_2', 'integer'],
['json_test_comment', 'json'],
['bit', 'bit varying'],
['year', 'smallint'],
['tinyint_test_default', 'smallint'],
['smallint', 'smallint'],
['mediumint', 'integer'],
['bigint', 'bigint'],
['float', 'real'],
['double', 'double precision'],
['double_precision', 'double precision'],
['numeric', 'numeric'],
['decimal', 'numeric'],
['decimal_19_2', 'numeric'],
['char_5', 'character'],
['varchar_5', 'character varying'],
['date', 'date'],
['time', 'time without time zone'],
['datetime', 'timestamp without time zone'],
['timestamp', 'timestamp without time zone'],
['enum', 'character varying'],
['set', 'character varying'],
['tinytext', 'text'],
['mediumtext', 'text'],
['longtext', 'text'],
['text', 'text'],
['blob', 'bytea'],
['longblob', 'bytea'],
['mediumblob', 'bytea'],
['tinyblob', 'bytea'],
['varbinary', 'bytea'],
['binary', 'bytea']
]);
}
/**
* The data content testing.
*/
export default async function(testSchemaProcessor: TestSchemaProcessor, tape: Test): Promise<void> {
const data: any[] = await getColumnTypes(testSchemaProcessor);
const expectedColumnTypesMap: Map<string, string> = getExpectedColumnTypes();
const autoTimeoutMs: number = 3 * 1000; // 3 seconds.
const numberOfPlannedAssertions: number = data.length;
tape.plan(numberOfPlannedAssertions);
tape.timeoutAfter(autoTimeoutMs);
for (let i: number = 0; i < numberOfPlannedAssertions; ++i) {
const columnName: string = data[i].column_name;
const actualColumnType: string = data[i].data_type;
const expectedColumnType: string = <string>expectedColumnTypesMap.get(columnName);
tape.comment(`Test ${ columnName } column type`);
tape.equal(actualColumnType, expectedColumnType);
}
}

View file

@ -1,130 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
/**
* Retrieve a data from `table_a`.
*
* @param {TestSchemaProcessor} testSchemaProcessor
*
* @returns {Promise<Object>}
*/
const retrieveData = testSchemaProcessor => {
const sql = `SELECT ENCODE(table_a.blob, 'escape') AS blob_text, table_a.*
FROM ${ testSchemaProcessor._conversion._schema }.table_a AS table_a;`;
return testSchemaProcessor
.queryPg(sql)
.then(data => data.rows[0]);
};
/**
* The data content testing.
*
* @param {TestSchemaProcessor} testSchemaProcessor
* @param {Tape} tape
*
* @returns {undefined}
*/
module.exports = (testSchemaProcessor, tape) => {
retrieveData(testSchemaProcessor).then(data => {
const autoTimeoutMs = 3 * 1000; // 3 seconds.
const numberOfPlannedAssertions = 24;
const originalTestBlobText = testSchemaProcessor.getTestBlob(testSchemaProcessor._conversion).toString();
tape.plan(numberOfPlannedAssertions);
tape.timeoutAfter(autoTimeoutMs);
tape.comment('Test blob_text column value');
tape.equal(data.blob_text, originalTestBlobText);
tape.comment('Test bit column value');
tape.equal(data.bit, '1'); // BIT is actually a "bit string", for example: '1110' -> 14
tape.comment('Test id_test_unique_index column value');
tape.equal(data.id_test_unique_index, 7384);
tape.comment('Test id_test_composite_unique_index_1 column value');
tape.equal(data.id_test_composite_unique_index_1, 125);
tape.comment('Test id_test_composite_unique_index_2 column value');
tape.equal(data.id_test_composite_unique_index_2, 234);
tape.comment('Test id_test_index column value');
tape.equal(data.id_test_index, 123);
tape.comment('Test int_test_not_null column value');
tape.equal(data.int_test_not_null, 123);
tape.comment('Test id_test_composite_index_1 column value');
tape.equal(data.id_test_composite_index_1, 11);
tape.comment('Test id_test_composite_index_2 column value');
tape.equal(data.id_test_composite_index_2, 22);
tape.comment('Test json_test_comment column value');
tape.equal(JSON.stringify(data.json_test_comment), '{"prop1":"First","prop2":2}');
tape.comment('Test year column value');
tape.equal(data.year, 1984);
tape.comment('Test bigint column value');
tape.equal(data.bigint, '1234567890123456800');
tape.comment('Test float column value');
tape.equal(data.float, 12345.5);
tape.comment('Test double column value');
tape.equal(data.double, 123456789.23);
tape.comment('Test numeric column value');
tape.equal(data.numeric, '1234567890');
tape.comment('Test decimal column value');
tape.equal(data.decimal, '1234567890');
tape.comment('Test char_5 column value');
tape.equal(data.char_5, 'fghij');
tape.comment('Test varchar_5 column value');
tape.equal(data.varchar_5, 'abcde');
tape.comment('Test date column value');
tape.equal(`${ data.date.getFullYear() }-${ data.date.getMonth() + 1 }-${ data.date.getDate() }`, '1984-11-30');
tape.comment('Test time column value');
tape.equal(data.time, '21:12:33');
tape.comment('Test text column value');
tape.equal(data.text, 'Test text');
tape.comment('Test enum column value');
tape.equal(data.enum, 'e1');
tape.comment('Test set column value');
tape.equal(data.set, 's2');
const date = `${ data.timestamp.getFullYear() }-${ data.timestamp.getMonth() + 1 }-${ data.timestamp.getDate() }`;
const time = `${ data.timestamp.getHours() }:${ data.timestamp.getMinutes() }:${ data.timestamp.getSeconds() }`;
tape.comment('Test timestamp column value');
tape.equal(`${ date } ${ time }`, '2018-11-11 22:21:20');
});
};

View file

@ -0,0 +1,136 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import Conversion from '../../src/Conversion';
import TestSchemaProcessor from './TestSchemaProcessor';
import DBAccess from '../../src/DBAccess';
import DBVendors from '../../src/DBVendors';
import DBAccessQueryResult from '../../src/DBAccessQueryResult';
import { Test } from 'tape';
/**
* Retrieves a data from `table_a`.
*/
async function retrieveData(testSchemaProcessor: TestSchemaProcessor): Promise<any> {
const logTitle: string = 'DataContentTest::retrieveData';
const sql: string = `SELECT ENCODE(table_a.blob, 'escape') AS blob_text, table_a.*
FROM ${ (<Conversion>testSchemaProcessor.conversion)._schema }.table_a AS table_a;`;
const result: DBAccessQueryResult = await (<DBAccess>testSchemaProcessor.dbAccess).query(
logTitle,
sql,
DBVendors.PG,
false,
false
);
if (result.error) {
await testSchemaProcessor.processFatalError(result.error);
}
return result.data.rows[0];
}
/**
* The data content testing.
*/
export default async function(testSchemaProcessor: TestSchemaProcessor, tape: Test): Promise<void> {
const data: any = await retrieveData(testSchemaProcessor);
const autoTimeoutMs: number = 3 * 1000; // 3 seconds.
const numberOfPlannedAssertions: number = 24;
const originalTestBlobText: string = testSchemaProcessor.getTestBlob(<Conversion>testSchemaProcessor.conversion).toString();
tape.plan(numberOfPlannedAssertions);
tape.timeoutAfter(autoTimeoutMs);
tape.comment('Test blob_text column value');
tape.equal(data.blob_text, originalTestBlobText);
tape.comment('Test bit column value');
tape.equal(data.bit, '1'); // BIT is actually a "bit string", for example: '1110' -> 14
tape.comment('Test id_test_unique_index column value');
tape.equal(data.id_test_unique_index, 7384);
tape.comment('Test id_test_composite_unique_index_1 column value');
tape.equal(data.id_test_composite_unique_index_1, 125);
tape.comment('Test id_test_composite_unique_index_2 column value');
tape.equal(data.id_test_composite_unique_index_2, 234);
tape.comment('Test id_test_index column value');
tape.equal(data.id_test_index, 123);
tape.comment('Test int_test_not_null column value');
tape.equal(data.int_test_not_null, 123);
tape.comment('Test id_test_composite_index_1 column value');
tape.equal(data.id_test_composite_index_1, 11);
tape.comment('Test id_test_composite_index_2 column value');
tape.equal(data.id_test_composite_index_2, 22);
tape.comment('Test json_test_comment column value');
tape.equal(JSON.stringify(data.json_test_comment), '{"prop1":"First","prop2":2}');
tape.comment('Test year column value');
tape.equal(data.year, 1984);
tape.comment('Test bigint column value');
tape.equal(data.bigint, '1234567890123456800');
tape.comment('Test float column value');
tape.equal(data.float, 12345.5);
tape.comment('Test double column value');
tape.equal(data.double, 123456789.23);
tape.comment('Test numeric column value');
tape.equal(data.numeric, '1234567890');
tape.comment('Test decimal column value');
tape.equal(data.decimal, '1234567890');
tape.comment('Test char_5 column value');
tape.equal(data.char_5, 'fghij');
tape.comment('Test varchar_5 column value');
tape.equal(data.varchar_5, 'abcde');
tape.comment('Test date column value');
tape.equal(`${ data.date.getFullYear() }-${ data.date.getMonth() + 1 }-${ data.date.getDate() }`, '1984-11-30');
tape.comment('Test time column value');
tape.equal(data.time, '21:12:33');
tape.comment('Test text column value');
tape.equal(data.text, 'Test text');
tape.comment('Test enum column value');
tape.equal(data.enum, 'e1');
tape.comment('Test set column value');
tape.equal(data.set, 's2');
const date: string = `${ data.timestamp.getFullYear() }-${ data.timestamp.getMonth() + 1 }-${ data.timestamp.getDate() }`;
const time: string = `${ data.timestamp.getHours() }:${ data.timestamp.getMinutes() }:${ data.timestamp.getSeconds() }`;
tape.comment('Test timestamp column value');
tape.equal(`${ date } ${ time }`, '2018-11-11 22:21:20');
}

View file

@ -1,56 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
/**
* Checks if the schema exists.
*
* @param {TestSchemaProcessor} testSchemaProcessor
*
* @returns {Promise<Boolean>}
*/
const hasSchemaCreated = testSchemaProcessor => {
const sql = `SELECT EXISTS(SELECT schema_name FROM information_schema.schemata
WHERE schema_name = '${ testSchemaProcessor._conversion._schema }');`;
return testSchemaProcessor
.queryPg(sql)
.then(data => !!data.rows[0].exists);
};
/**
* Schema creation testing.
*
* @param {TestSchemaProcessor} testSchemaProcessor
* @param {Tape} tape
*
* @returns {undefined}
*/
module.exports = (testSchemaProcessor, tape) => {
hasSchemaCreated(testSchemaProcessor).then(schemaExists => {
const numberOfPlannedAssertions = 1;
const autoTimeoutMs = 3 * 1000; // 3 seconds.
tape.plan(numberOfPlannedAssertions);
tape.timeoutAfter(autoTimeoutMs);
tape.equal(schemaExists, true);
});
};

View file

@ -0,0 +1,62 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import TestSchemaProcessor from './TestSchemaProcessor';
import Conversion from '../../src/Conversion';
import DBAccess from '../../src/DBAccess';
import DBVendors from '../../src/DBVendors';
import DBAccessQueryResult from '../../src/DBAccessQueryResult';
import { Test } from 'tape';
/**
* Checks if the schema exists.
*/
async function hasSchemaCreated(testSchemaProcessor: TestSchemaProcessor): Promise<boolean> {
const logTitle: string = 'SchemaProcessorTest::hasSchemaCreated';
const sql: string = `SELECT EXISTS(SELECT schema_name FROM information_schema.schemata
WHERE schema_name = '${ (<Conversion>testSchemaProcessor.conversion)._schema }');`;
const result: DBAccessQueryResult = await (<DBAccess>testSchemaProcessor.dbAccess).query(
logTitle,
sql,
DBVendors.PG,
false,
false
);
if (result.error) {
await testSchemaProcessor.processFatalError(result.error);
}
return !!result.data.rows[0].exists;
}
/**
* Tests schema creation.
*/
export default async function(testSchemaProcessor: TestSchemaProcessor, tape: Test): Promise<void> {
const schemaExists: boolean = await hasSchemaCreated(testSchemaProcessor);
const numberOfPlannedAssertions: number = 1;
const autoTimeoutMs: number = 3 * 1000; // 3 seconds.
tape.plan(numberOfPlannedAssertions);
tape.timeoutAfter(autoTimeoutMs);
tape.equal(schemaExists, true);
}

View file

@ -1,356 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
'use strict';
const fs = require('fs');
const path = require('path');
const { EventEmitter } = require('events');
const connect = require('../../src/Connector');
const Main = require('../../src/Main');
const SchemaProcessor = require('../../src/SchemaProcessor');
const readDataTypesMap = require('../../src/DataTypesMapReader');
const loadStructureToMigrate = require('../../src/StructureLoader');
const pipeData = require('../../src/DataPipeManager');
const { createStateLogsTable } = require('../../src/MigrationStateManager');
const { createDataPoolTable, readDataPool } = require('../../src/DataPoolManager');
const generateError = require('../../src/ErrorGenerator');
module.exports = class TestSchemaProcessor {
/**
* TestSchemaProcessor constructor.
*/
constructor() {
this._app = new Main();
this._conversion = null;
}
/**
* Stops the process in case of fatal error.
*
* @param {Conversion} conversion
* @param {String} error
*
* @returns {undefined}
*/
processFatalError(conversion, error) {
console.log(error);
generateError(conversion, error);
process.exit();
}
/**
* Removes resources created by test scripts.
*
* @returns {Promise<any>}
*/
removeTestResources() {
return new Promise(resolve => {
if (!this._conversion._removeTestResources) {
return resolve();
}
return connect(this._conversion).then(() => {
this._conversion._mysql.getConnection((mysqlConErr, connection) => {
if (mysqlConErr) {
// The connection is undefined.
this.processFatalError(this._conversion, mysqlConErr);
}
connection.query(`DROP DATABASE ${ this._conversion._mySqlDbName };`, mysqlDropErr => {
connection.release();
if (mysqlDropErr) {
// Failed to drop test source database.
this.processFatalError(this._conversion, mysqlDropErr);
}
this._conversion._pg.connect((pgConErr, client, release) => {
if (pgConErr) {
//The connection is undefined.
this.processFatalError(this._conversion, pgConErr);
}
client.query(`DROP SCHEMA ${ this._conversion._schema } CASCADE;`, pgDropErr => {
release();
if (pgDropErr) {
// Failed to drop test target schema.
this.processFatalError(this._conversion, pgDropErr);
}
resolve();
});
});
});
});
});
});
}
/**
* Creates test source database.
*
* @param {Conversion} conversion
*
* @returns {Promise<Conversion>}
*/
createTestSourceDb(conversion) {
return connect(conversion).then(() => {
return new Promise(resolve => {
conversion._mysql.getConnection((error, connection) => {
if (error) {
// The connection is undefined.
this.processFatalError(conversion, error);
}
connection.query(`CREATE DATABASE IF NOT EXISTS ${ this._conversion._mySqlDbName };`, err => {
connection.release();
if (err) {
// Failed to create test source database.
this.processFatalError(conversion, err);
}
resolve(conversion);
});
});
});
});
}
/**
* Update the "database" part of MySQL connection.
*
* @param {Conversion} conversion
*
* @returns {Promise<Conversion>}
*/
updateMySqlConnection(conversion) {
return new Promise(resolve => {
conversion._mysql = null;
conversion._sourceConString.database = conversion._mySqlDbName;
resolve(conversion);
});
}
/**
* Reads contents from the specified resource.
*
* @param {String} filePath
*
* @returns {Promise<Buffer>}
*/
readFile(filePath) {
return new Promise(resolve => {
fs.readFile(filePath, (error, data) => {
if (error) {
console.log(`\t--[readFile] Cannot read file from ${ filePath }`);
process.exit();
}
resolve(data);
});
});
}
/**
* Reads test schema sql file.
*
* @returns {Promise<Buffer>}
*/
readTestSchema() {
const testSchemaFilePath = path.join(__dirname, '..', 'test_schema.sql');
return this.readFile(testSchemaFilePath);
}
/**
* Loads test schema into MySQL test database.
*
* @param {Conversion} conversion
*
* @returns {Promise<Conversion>}
*/
loadTestSchema(conversion) {
return connect(conversion)
.then(this.readTestSchema.bind(this))
.then(sqlBuffer => {
return new Promise(resolve => {
conversion._mysql.getConnection((error, connection) => {
if (error) {
this.processFatalError(conversion, error);
}
connection.query(sqlBuffer.toString(), err => {
connection.release();
if (err) {
this.processFatalError(conversion, err);
}
resolve(conversion);
});
});
});
});
}
/**
* Provides a blob for a sake of testing.
*
* @param {Conversion} conversion
*
* @returns {Buffer}
*/
getTestBlob(conversion) {
return Buffer.from('Automated tests development is in progress.', conversion._encoding);
}
/**
* Loads test data into MySQL test database.
*
* @param {Conversion} conversion
*
* @returns {Promise<Conversion>}
*/
loadTestData(conversion) {
return connect(conversion).then(() => {
return new Promise(resolve => {
conversion._mysql.getConnection((error, connection) => {
if (error) {
this.processFatalError(conversion, error);
}
const insertParams = {
id_test_unique_index : 7384,
id_test_composite_unique_index_1 : 125,
id_test_composite_unique_index_2 : 234,
id_test_index : 123,
int_test_not_null : 123,
id_test_composite_index_1 : 11,
id_test_composite_index_2 : 22,
json_test_comment : '{"prop1":"First","prop2":2}',
bit : 1,
year : 1984,
bigint : '1234567890123456800',
float : 12345.5,
double : 123456789.23,
numeric : '1234567890',
decimal : '1234567890',
char_5 : 'fghij',
varchar_5 : 'abcde',
date : '1984-11-30',
time : '21:12:33',
timestamp : '2018-11-11 22:21:20',
enum : 'e1',
set : 's2',
text : 'Test text',
blob : this.getTestBlob(conversion),
};
connection.query('INSERT INTO `table_a` SET ?;', insertParams, err => {
connection.release();
if (err) {
this.processFatalError(conversion, err);
}
resolve(conversion);
});
});
});
});
}
/**
* Initializes Conversion instance.
*
* @returns {Promise<Conversion>}
*/
initializeConversion() {
const baseDir = path.join(__dirname, '..', '..');
return this._app.readConfig(baseDir, 'test_config.json')
.then(config => this._app.readExtraConfig(config, baseDir))
.then(this._app.initializeConversion)
.then(conversion => {
this._conversion = conversion;
this._conversion._runsInTestMode = true;
this._conversion._eventEmitter = new EventEmitter();
delete this._conversion._sourceConString.database;
return Promise.resolve(this._conversion);
});
}
/**
* Arranges test migration.
* "migrationCompleted" event will fire on completion.
*
* @param {Conversion} conversion
*
* @returns {undefined}
*/
arrangeTestMigration(conversion) {
Promise.resolve(conversion)
.then(this.createTestSourceDb.bind(this))
.then(this.updateMySqlConnection.bind(this))
.then(this.loadTestSchema.bind(this))
.then(this.loadTestData.bind(this))
.then(readDataTypesMap)
.then(this._app.createLogsDirectory)
.then(conversion => (new SchemaProcessor(conversion)).createSchema())
.then(createStateLogsTable)
.then(createDataPoolTable)
.then(loadStructureToMigrate)
.then(readDataPool)
.then(pipeData)
.catch(error => console.log(error));
}
/**
* Query PostgreSQL server.
*
* @param {String} sql
*
* @returns {Promise<pg.Result>}
*/
queryPg(sql) {
return connect(this._conversion).then(() => {
return new Promise(resolve => {
this._conversion._pg.connect((error, client, release) => {
if (error) {
this.processFatalError(this._conversion, error);
}
client.query(sql, (err, data) => {
release();
if (err) {
this.processFatalError(this._conversion, err);
}
resolve(data);
});
});
});
});
}
};

View file

@ -0,0 +1,307 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import * as path from 'path';
import * as fs from 'fs';
import { EventEmitter } from 'events';
import Conversion from '../../src/Conversion';
import DBAccess from '../../src/DBAccess';
import DBVendors from '../../src/DBVendors';
import DBAccessQueryResult from '../../src/DBAccessQueryResult';
import createSchema from '../../src/SchemaProcessor';
import loadStructureToMigrate from '../../src/StructureLoader';
import pipeData from '../../src/DataPipeManager';
import { createStateLogsTable } from '../../src/MigrationStateManager';
import { createDataPoolTable, readDataPool } from '../../src/DataPoolManager';
import { checkConnection, getLogo } from '../../src/BootProcessor';
import {
readConfig,
readExtraConfig,
createLogsDirectory,
readDataTypesMap,
log,
generateError
} from '../../src/FsOps';
export default class TestSchemaProcessor {
/**
* Instance of class Conversion.
*/
public conversion?: Conversion;
/**
* Instance of class DBAccess.
*/
public dbAccess?: DBAccess;
/**
* TestSchemaProcessor constructor.
*/
public constructor() {
this.conversion = undefined;
this.dbAccess = undefined;
}
/**
* Stops the process in case of fatal error.
*/
public async processFatalError(error: string): Promise<void> {
console.log(error);
await generateError(<Conversion>this.conversion, error);
process.exit();
}
/**
* Removes resources created by test scripts.
*/
public async removeTestResources(): Promise<void> {
if (!(<Conversion>this.conversion)._removeTestResources) {
return;
}
const sqlDropMySqlDatabase: string = `DROP DATABASE ${ (<Conversion>this.conversion)._mySqlDbName };`;
await (<DBAccess>this.dbAccess).query(
'removeTestResources',
sqlDropMySqlDatabase,
DBVendors.MYSQL,
true,
false
);
const sqlDropPgDatabase: string = `DROP SCHEMA ${ (<Conversion>this.conversion)._schema } CASCADE;`;
await (<DBAccess>this.dbAccess).query(
'removeTestResources',
sqlDropPgDatabase,
DBVendors.PG,
true,
false
);
}
/**
* Prevents tests from running if test dbs (both MySQL and PostgreSQL) already exist.
*/
private async _checkResources(conversion: Conversion): Promise<Conversion> {
const logTitle: string = 'TestSchemaProcessor::_checkResources';
const sqlIsMySqlDbExist: string = `SELECT EXISTS (SELECT schema_name FROM information_schema.schemata
WHERE schema_name = '${ (<Conversion>this.conversion)._mySqlDbName }') AS \`exists\`;`;
const mySqlResult: DBAccessQueryResult = await (<DBAccess>this.dbAccess).query(
logTitle,
sqlIsMySqlDbExist,
DBVendors.MYSQL,
true,
false
);
const mySqlExists: boolean = !!mySqlResult.data[0].exists;
const sqlIsPgDbExist: string = `SELECT EXISTS(SELECT schema_name FROM information_schema.schemata
WHERE schema_name = '${ (<Conversion>this.conversion)._schema }');`;
const pgResult: DBAccessQueryResult = await (<DBAccess>this.dbAccess).query(
logTitle,
sqlIsPgDbExist,
DBVendors.PG,
true,
false
);
const pgExists: boolean = !!pgResult.data.rows[0].exists;
let msg: string = '';
if (mySqlExists) {
msg += `Please, remove '${ (<Conversion>this.conversion)._mySqlDbName }' database from your MySQL server prior to running tests.\n`;
}
if (pgExists) {
const schemaName: string = `'${ (<Conversion>this.conversion)._targetConString.database }.${ (<Conversion>this.conversion)._schema }'`;
msg += `Please, remove ${ schemaName } schema from your PostgreSQL server prior to running tests.`;
}
if (msg) {
log(<Conversion>this.conversion, msg);
process.exit();
}
return conversion;
}
/**
* Creates test source database.
*/
private async _createTestSourceDb(conversion: Conversion): Promise<Conversion> {
const sql: string = `CREATE DATABASE IF NOT EXISTS ${ (<Conversion>this.conversion)._mySqlDbName };`;
await (<DBAccess>this.dbAccess).query('_createTestSourceDb', sql, DBVendors.MYSQL, true, false);
return conversion;
}
/**
* Updates the "database" part of MySQL connection.
*/
private _updateMySqlConnection(conversion: Conversion): Promise<Conversion> {
return new Promise<Conversion>(resolve => {
conversion._mysql = undefined;
conversion._sourceConString.database = conversion._mySqlDbName;
resolve(conversion);
});
}
/**
* Reads contents from the specified resource.
*/
private _readFile(filePath: string): Promise<Buffer> {
return new Promise<Buffer>(resolve => {
fs.readFile(filePath, (error: Error, data: Buffer) => {
if (error) {
console.log(`\t--[_readFile] Cannot read file from ${ filePath }`);
process.exit();
}
resolve(data);
});
});
}
/**
* Reads test schema sql file.
*/
private _readTestSchema(): Promise<Buffer> {
const testSchemaFilePath: string = path.join(__dirname, '..', '..', '..', 'test', 'test_schema.sql');
return this._readFile(testSchemaFilePath);
}
/**
* Loads test schema into MySQL test database.
*/
private async _loadTestSchema(conversion: Conversion): Promise<Conversion> {
const sqlBuffer: Buffer = await this._readTestSchema();
await (<DBAccess>this.dbAccess).query(
'_loadTestSchema',
sqlBuffer.toString(),
DBVendors.MYSQL,
true,
false
);
return conversion;
}
/**
* Provides a blob for a sake of testing.
*/
public getTestBlob(conversion: Conversion): Buffer {
return Buffer.from('Automated tests development is in progress.', conversion._encoding);
}
/**
* Loads test data into MySQL test database.
*/
private async _loadTestData(conversion: Conversion): Promise<Conversion> {
const insertParams: any = {
id_test_unique_index: 7384,
id_test_composite_unique_index_1: 125,
id_test_composite_unique_index_2: 234,
id_test_index: 123,
int_test_not_null: 123,
id_test_composite_index_1: 11,
id_test_composite_index_2: 22,
json_test_comment: '{"prop1":"First","prop2":2}',
bit: 1,
year: 1984,
bigint: '1234567890123456800',
float: 12345.5,
double: 123456789.23,
numeric: '1234567890',
decimal: '1234567890',
char_5: 'fghij',
varchar_5: 'abcde',
date: '1984-11-30',
time: '21:12:33',
timestamp: '2018-11-11 22:21:20',
enum: 'e1',
set: 's2',
text: 'Test text',
blob: this.getTestBlob(conversion)
};
const insertParamsKeys: string[] = Object.keys(insertParams).map((k: string) => `\`${ k }\``);
const sql: string = `INSERT INTO \`table_a\`(${ insertParamsKeys.join(',') }) VALUES(${ insertParamsKeys.map((k: string) => '?').join(',') });`;
await (<DBAccess>this.dbAccess).query(
'TestSchemaProcessor::_loadTestData',
sql,
DBVendors.MYSQL,
true,
false,
undefined,
Object.values(insertParams)
);
return conversion;
}
/**
* Initializes Conversion instance.
*/
public async initializeConversion(): Promise<Conversion> {
const baseDir: string = path.join(__dirname, '..', '..', '..');
const config: any = await readConfig(baseDir, 'test_config.json');
const fullConfig: any = await readExtraConfig(config, baseDir);
this.conversion = await Conversion.initializeConversion(fullConfig);
this.conversion._runsInTestMode = true;
this.conversion._eventEmitter = new EventEmitter();
this.dbAccess = new DBAccess(this.conversion);
const logo: string = getLogo();
console.log(logo);
delete this.conversion._sourceConString.database;
return this.conversion;
}
/**
* Arranges test migration.
* "migrationCompleted" event will fire on completion.
*/
public async arrangeTestMigration(conversion: Conversion): Promise<void> {
const connectionErrorMessage = await checkConnection(conversion, <DBAccess>this.dbAccess);
if (connectionErrorMessage) {
console.log(connectionErrorMessage);
process.exit();
}
Promise.resolve(conversion)
.then(this._checkResources.bind(this))
.then(this._createTestSourceDb.bind(this))
.then(this._updateMySqlConnection.bind(this))
.then(this._loadTestSchema.bind(this))
.then(this._loadTestData.bind(this))
.then(readDataTypesMap)
.then(createLogsDirectory)
.then(createSchema)
.then(createStateLogsTable)
.then(createDataPoolTable)
.then(loadStructureToMigrate)
.then(readDataPool)
.then(pipeData)
.catch(error => console.log(error));
}
}

25
tsconfig.json Normal file
View file

@ -0,0 +1,25 @@
{
"compilerOptions": {
"removeComments": true,
"module": "commonjs",
"allowJs": true,
"target": "es2017",
"sourceMap": true,
"outDir": "./dist",
"strict": true,
"types": [
"node",
"pg",
"mysql"
]
},
"exclude": [
"node_modules"
],
"include": [
"src/**/*",
"test/**/*"
]
}