upgrade to version 2.2.0, see release notes for details
This commit is contained in:
parent
02cdc3817f
commit
d1aa593d4e
337
LICENSE.md
337
LICENSE.md
|
@ -1,16 +1,16 @@
|
|||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
|
@ -19,35 +19,35 @@ GNU General Public License for most of our software; it applies also to
|
|||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
|
@ -58,49 +58,49 @@ products. If such problems arise substantially in other domains, we
|
|||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
|
@ -109,18 +109,18 @@ work under this License, and how to view a copy of this License. If
|
|||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
|
@ -131,7 +131,7 @@ implementation is available to the public in source code form. A
|
|||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
|
@ -144,16 +144,16 @@ linked subprograms that the work is specifically designed to require,
|
|||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
|
@ -161,7 +161,7 @@ covered work is covered by this License only if the output, given its
|
|||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
|
@ -172,19 +172,19 @@ for you must do so exclusively on your behalf, under your direction
|
|||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
|
@ -192,9 +192,9 @@ modification of the work as a means of enforcing, against the work's
|
|||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
|
@ -202,37 +202,37 @@ non-permissive terms added in accord with section 7 apply to the code;
|
|||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
|
@ -242,59 +242,59 @@ beyond what the individual works permit. Inclusion of a covered work
|
|||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
|
@ -307,7 +307,7 @@ is a consumer product regardless of whether the product has substantial
|
|||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
|
@ -315,7 +315,7 @@ suffice to ensure that the continued functioning of the modified object
|
|||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
|
@ -326,7 +326,7 @@ if neither you nor any third party retains the ability to install
|
|||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
|
@ -334,15 +334,15 @@ network may be denied when the modification itself materially and
|
|||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
|
@ -351,41 +351,41 @@ apply only to part of the Program, that part may be used separately
|
|||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
|
@ -395,46 +395,46 @@ License, you may add to a covered work material governed by the terms
|
|||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
|
@ -443,14 +443,14 @@ modify any covered work. These actions infringe copyright if you do
|
|||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
|
@ -460,7 +460,7 @@ give under the previous paragraph, plus a right to possession of the
|
|||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
|
@ -468,13 +468,13 @@ rights granted under this License, and you may not initiate litigation
|
|||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
|
@ -484,19 +484,19 @@ purposes of this definition, "control" includes the right to grant
|
|||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
|
@ -510,7 +510,7 @@ covered work in a country, or your recipient's use of the covered work
|
|||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
|
@ -518,7 +518,7 @@ or convey a specific copy of the covered work, then the patent license
|
|||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
|
@ -533,13 +533,13 @@ for and in connection with specific products or compilations that
|
|||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
|
@ -549,9 +549,9 @@ to collect a royalty for further conveying from those to whom you convey
|
|||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
|
@ -560,14 +560,14 @@ but the special requirements of the GNU Affero General Public License,
|
|||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
|
@ -576,19 +576,19 @@ Foundation. If the Program does not specify a version number of the
|
|||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
|
@ -597,9 +597,9 @@ PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
|||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
|
@ -609,14 +609,13 @@ PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
|||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
|
51
README.md
51
README.md
|
@ -6,16 +6,14 @@ from MySQL to PostgreSQL as easy and smooth as possible.</p>
|
|||
|
||||
<h3>KEY FEATURES</h3>
|
||||
<ul>
|
||||
<li> Ease of use - the only thing needed to run this app is the Node.js runtime.</li>
|
||||
<li> Accuracy of migration the database structure - NMIG converts
|
||||
<li> Precise migration of the database structure - NMIG converts
|
||||
MySQL data types to corresponding PostgreSQL data types, creates constraints,
|
||||
indexes, primary and foreign keys exactly as they were before migration.</li>
|
||||
|
||||
<li>Ability to migrate big databases - in order to reduce RAM consumption NMIG will split each table's data into several chunks. <br />
|
||||
Chunk size can be adjusted easily via configuration file.</li>
|
||||
<li>Ability to migrate big databases - in order to eliminate "process out of memory" issues NMIG will split each table's data into several chunks.<br>Each group of chunks will be loaded via separate worker process.</li>
|
||||
|
||||
<li> Speed of data transfer - in order to migrate data fast NMIG uses PostgreSQL COPY protocol.</li>
|
||||
|
||||
<li>Ability to recover migration process if disaster took place (without restarting from the beginning).</li>
|
||||
<li>Ease of monitoring - NMIG will provide detailed output about every step, it takes during the execution.</li>
|
||||
<li>
|
||||
Ease of configuration - all the parameters required for migration should be put in one single JSON document.
|
||||
|
@ -24,7 +22,7 @@ Chunk size can be adjusted easily via configuration file.</li>
|
|||
|
||||
<h3>SYSTEM REQUIREMENTS</h3>
|
||||
<ul>
|
||||
<li> <b>Node.js 5.x.x</b></li>
|
||||
<li> <b>Node.js 5 or greater</b></li>
|
||||
</ul>
|
||||
|
||||
<h3>USAGE</h3>
|
||||
|
@ -35,39 +33,34 @@ Chunk size can be adjusted easily via configuration file.</li>
|
|||
<b>Sample:</b> <code>/path/to/nmig</code></p>
|
||||
|
||||
<p><b>3.</b> Edit configuration file located at <code>/path/to/nmig/config.json</code> with correct details.<br /></p>
|
||||
<b>Remarks:</b>
|
||||
<b>Notes:</b>
|
||||
<ul>
|
||||
<li> config.json contains brief description of each configuration parameter</li>
|
||||
<li>Make sure, that username, you use in your PostgreSQL connection details, defined as superuser (usually "postgres")<br> More info: <a href="http://www.postgresql.org/docs/current/static/app-createuser.html">http://www.postgresql.org/docs/current/static/app-createuser.html</a></li>
|
||||
</ul>
|
||||
|
||||
<p><b>4.</b> Go to nmig directory, install dependencies, and run the app with <code>--expose-gc</code> flag<br />
|
||||
<p><b>4.</b> Go to nmig directory, install dependencies, and run the app<br />
|
||||
<b>Sample:</b><br />
|
||||
<pre>$ cd /path/to/nmig</pre><br />
|
||||
<pre>$ npm install</pre><br />
|
||||
<pre>$ node --expose-gc main.js</pre><br />
|
||||
</p>
|
||||
<p>
|
||||
|
||||
<b>Remark</b>: you can increase node.js memory limit (RAM usage) using <code>--max-old-space-size</code> flag<br />
|
||||
</p>
|
||||
<p>
|
||||
|
||||
Following command will increase memory limit to ~2GB and run nmig
|
||||
<br /> <code>$ node --max-old-space-size=2048 --expose-gc main.js</code>
|
||||
<pre>$ node nmig.js</pre><br />
|
||||
</p>
|
||||
|
||||
<p><b>5.</b> At the end of migration check log files, if necessary.<br />
|
||||
<p><b>5.</b> If a disaster took place during migration (for what ever reason) - simply restart the process
|
||||
<code>$ node nmig.js</code><br> NMIG will restart from the point it was stopped at.
|
||||
</p>
|
||||
|
||||
<p><b>6.</b> At the end of migration check log files, if necessary.<br />
|
||||
Log files will be located under "logs_directory" folder in the root of the package.<br />
|
||||
<b>Note:</b> "logs_directory" will be created during script execution.</p>
|
||||
|
||||
|
||||
<p><b>6.</b> In case of any remarks, misunderstandings or errors during migration,<br />
|
||||
<p><b>7.</b> In case of any remarks, misunderstandings or errors during migration,<br />
|
||||
please feel free to email me
|
||||
<a href="mailto:anatolyuss@gmail.com?subject=NMIG">anatolyuss@gmail.com</a></p>
|
||||
|
||||
<h3>VERSION</h3>
|
||||
<p>Current version is 1.1.1<br />
|
||||
<p>Current version is 2.1.0<br />
|
||||
(major version . improvements . bug fixes)</p>
|
||||
|
||||
|
||||
|
@ -78,18 +71,22 @@ which includes data types mapping, creation of tables, constraints, indexes, <br
|
|||
PKs, FKs, migration of data, garbage-collection (VACUUM) and analyzing the newly created <br />
|
||||
PostgreSQL database took 1 minute 18 seconds.</p>
|
||||
<p>
|
||||
<b>Remark:</b> it is highly recommended to VACUUM newly created database! <br />
|
||||
<b>Note:</b> it is highly recommended to VACUUM newly created database! <br />
|
||||
Just keep in mind, that VACUUM is a very time-consuming procedure. <br />
|
||||
So if you are short in time - disable VACUUM via config.json ("no_vacuum" parameter). <br />
|
||||
Such step will save you ~25% of migration time. <br />
|
||||
The migration process described above without VACUUM took 58 seconds only.
|
||||
</p>
|
||||
|
||||
<h3>LICENSE</h3>
|
||||
<p>NMIG is available under "GNU GENERAL PUBLIC LICENSE" (v. 3) <br />
|
||||
<a href="http://www.gnu.org/licenses/gpl.txt">http://www.gnu.org/licenses/gpl.txt.</a></p>
|
||||
|
||||
|
||||
<h3>REMARKS</h3>
|
||||
<p>Errors/Exceptions are not passed silently.<br />
|
||||
Any error will be immediately written into the error log file.</p>
|
||||
|
||||
<h3>KNOWN ISSUES</h3>
|
||||
<ul>
|
||||
<li>Empty strings in char/varchar columns may be interpreted as NULL.</li>
|
||||
</ul>
|
||||
|
||||
<h3>LICENSE</h3>
|
||||
<p>NMIG is available under "GNU GENERAL PUBLIC LICENSE" (v. 3) <br />
|
||||
<a href="http://www.gnu.org/licenses/gpl.txt">http://www.gnu.org/licenses/gpl.txt.</a></p>
|
||||
|
|
31
config.json
Normal file → Executable file
31
config.json
Normal file → Executable file
|
@ -37,12 +37,19 @@
|
|||
],
|
||||
"max_pool_size_target" : 10,
|
||||
|
||||
"pipe_width_description" : [
|
||||
"pipe_width_description" : [
|
||||
"Maximal amount of data-chunks, processed in a single 'DataPipe' iteration.",
|
||||
"Note: 'pipe_width' should not be greater than 'max_pool_size_target'."
|
||||
],
|
||||
"pipe_width" : 10,
|
||||
|
||||
"loader_max_old_space_size_description" : [
|
||||
"V8 memory limit of the loader process.",
|
||||
"Possible values are: any number, representing memory limit (in MB),",
|
||||
"or DEFAULT, representing V8 default limit for your current hardware."
|
||||
],
|
||||
"loader_max_old_space_size" : "DEFAULT",
|
||||
|
||||
"encoding_description" : [
|
||||
"JavaScript encoding type.",
|
||||
"If not supplied, then utf-8 will be used as a default."
|
||||
|
@ -56,18 +63,10 @@
|
|||
"schema" : "public",
|
||||
|
||||
"data_chunk_size_description" : [
|
||||
"During migration each table's data will be split into chunks of data_chunk_size (in KB).",
|
||||
"If not supplied, then 100 KB will be used as a default."
|
||||
"During migration each table's data will be split into chunks of data_chunk_size (in MB).",
|
||||
"If not supplied, then 1 MB will be used as a default."
|
||||
],
|
||||
"data_chunk_size" : 100,
|
||||
|
||||
"copy_only_description" : [
|
||||
"During migration each table's data will be split into chunks, and each chunk will be transmitted via COPY protocol.",
|
||||
"By default, if transmission via COPY fails then current chunk's data will be transmitted via INSERT statments, which may last considerably longer.",
|
||||
"This behavior can be disabled for tables, included into the list (Array) below."
|
||||
],
|
||||
"copy_only" : [],
|
||||
|
||||
"data_chunk_size" : 1,
|
||||
"no_vacuum_description" : [
|
||||
"PostgreSQL VACUUM reclaims storage occupied by dead tuples. VACUUM is a very time-consuming procedure.",
|
||||
"By default, VACUUM will be performed automatically after migration (recommended)",
|
||||
|
@ -79,6 +78,12 @@
|
|||
"List (Array) of tables, that will not be migrated.",
|
||||
"By default, nmig will migrate all tables"
|
||||
],
|
||||
"exclude_tables": []
|
||||
"exclude_tables": [],
|
||||
|
||||
"migrate_only_data_description" : [
|
||||
"In order to skip schema migration, and just migrate data into a preset schema",
|
||||
" - set this parameter true"
|
||||
],
|
||||
"migrate_only_data" : false
|
||||
}
|
||||
|
||||
|
|
69
migration/fmtp/CleanupProcessor.js
Normal file
69
migration/fmtp/CleanupProcessor.js
Normal file
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const log = require('./Logger');
|
||||
const generateError = require('./ErrorGenerator');
|
||||
const directoriesManager = require('./DirectoriesManager');
|
||||
|
||||
/**
|
||||
* Closes DB connections.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function closeConnections(self) {
|
||||
return new Promise(resolve => {
|
||||
if (self._mysql) {
|
||||
self._mysql.end(error => {
|
||||
if (error) {
|
||||
log(self, '\t--[closeConnections] ' + error);
|
||||
}
|
||||
|
||||
log(self, '\t--[closeConnections] All DB connections to both MySQL and PostgreSQL servers have been closed...');
|
||||
self._pg = null;
|
||||
resolve();
|
||||
});
|
||||
} else {
|
||||
log(self, '\t--[closeConnections] All DB connections to both MySQL and PostgreSQL servers have been closed...');
|
||||
self._pg = null;
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes DB connections and removes the "./temporary_directory".
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports = function(self) {
|
||||
return new Promise(resolve => {
|
||||
log(self, '\t--[cleanup] Cleanup resources...');
|
||||
return directoriesManager.removeTemporaryDirectory(self).then(() => {
|
||||
return closeConnections(self);
|
||||
}).then(() => {
|
||||
log(self, '\t--[cleanup] Cleanup finished...');
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
};
|
|
@ -51,8 +51,8 @@ module.exports = function(arrTableColumns) {
|
|||
|| arrTableColumns[i].Type.indexOf('date') !== -1
|
||||
) {
|
||||
strRetVal += 'IF(`' + arrTableColumns[i].Field
|
||||
+ '` IN(\'0000-00-00\', \'0000-00-00 00:00:00\'), \'-INFINITY\', `'
|
||||
+ arrTableColumns[i].Field + '`),';
|
||||
+ '` IN(\'0000-00-00\', \'0000-00-00 00:00:00\'), \'-INFINITY\', CAST(`'
|
||||
+ arrTableColumns[i].Field + '` AS CHAR)),';
|
||||
} else {
|
||||
strRetVal += '`' + arrTableColumns[i].Field + '`,';
|
||||
}
|
||||
|
|
158
migration/fmtp/CommentsProcessor.js
Normal file
158
migration/fmtp/CommentsProcessor.js
Normal file
|
@ -0,0 +1,158 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const log = require('./Logger');
|
||||
const generateError = require('./ErrorGenerator');
|
||||
const connect = require('./Connector');
|
||||
|
||||
/**
|
||||
* Create table comments.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {String} tableName
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function processTableComments(self, tableName) {
|
||||
return new Promise(resolve => {
|
||||
self._mysql.getConnection((error, connection) => {
|
||||
if (error) {
|
||||
// The connection is undefined.
|
||||
generateError(self, '\t--[processTableComments] Cannot connect to MySQL server...\n\t' + error);
|
||||
resolve();
|
||||
} else {
|
||||
let sql = "SELECT table_comment AS table_comment "
|
||||
+ "FROM information_schema.tables "
|
||||
+ "WHERE table_schema = '" + self._mySqlDbName + "' "
|
||||
+ "AND table_name = '" + tableName + "';";
|
||||
|
||||
connection.query(sql, (err, rows) => {
|
||||
connection.release();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[processTableComments] ' + err, sql);
|
||||
resolve();
|
||||
} else {
|
||||
self._pg.connect((e, client, done) => {
|
||||
if (e) {
|
||||
let msg = '\t--[processTableComments] Cannot connect to PostgreSQL server...\n' + e;
|
||||
generateError(self, msg);
|
||||
resolve();
|
||||
} else {
|
||||
sql = 'COMMENT ON TABLE "' + self._schema + '"."' + tableName + '" IS ' + '\'' + rows[0].table_comment + '\';';
|
||||
|
||||
client.query(sql, queryError => {
|
||||
done();
|
||||
|
||||
if (queryError) {
|
||||
let msg = '\t--[processTableComments] Error while processing comment for "'
|
||||
+ self._schema + '"."' + tableName + '"...\n' + queryError;
|
||||
|
||||
generateError(self, msg, sql);
|
||||
resolve();
|
||||
} else {
|
||||
let success = '\t--[processTableComments] Successfully set comment for table "'
|
||||
+ self._schema + '"."' + tableName + '"';
|
||||
|
||||
log(self, success, self._dicTables[tableName].tableLogPath);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create columns comments.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {String} tableName
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function processColumnsComments(self, tableName) {
|
||||
return new Promise(resolve => {
|
||||
let arrCommentPromises = [];
|
||||
|
||||
for (let i = 0; i < self._dicTables[tableName].arrTableColumns.length; ++i) {
|
||||
if (self._dicTables[tableName].arrTableColumns[i].Comment !== '') {
|
||||
arrCommentPromises.push(
|
||||
new Promise(resolveComment => {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
let msg = '\t--[processColumnsComments] Cannot connect to PostgreSQL server...\n' + error;
|
||||
generateError(self, msg);
|
||||
resolveComment();
|
||||
} else {
|
||||
let sql = 'COMMENT ON COLUMN "' + self._schema + '"."' + tableName + '"."'
|
||||
+ self._dicTables[tableName].arrTableColumns[i].Field
|
||||
+ '" IS \'' + self._dicTables[tableName].arrTableColumns[i].Comment + '\';';
|
||||
|
||||
client.query(sql, err => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
let msg = '\t--[processColumnsComments] Error while processing comment for "' + self._schema + '"."'
|
||||
+ tableName + '"."' + self._dicTables[tableName].arrTableColumns[i].Field + '"...\n' + err;
|
||||
|
||||
generateError(self, msg, sql);
|
||||
resolveComment();
|
||||
} else {
|
||||
let success = '\t--[processColumnsComments] Set comment for "' + self._schema + '"."' + tableName
|
||||
+ '" column: "' + self._dicTables[tableName].arrTableColumns[i].Field + '"...';
|
||||
|
||||
log(self, success, self._dicTables[tableName].tableLogPath);
|
||||
resolveComment();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Promise.all(arrCommentPromises).then(() => resolve());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate comments.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {String} tableName
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports = function(self, tableName) {
|
||||
return connect(self).then(() => {
|
||||
return new Promise(resolve => {
|
||||
let msg = '\t--[CommentsProcessor] Creates comments for table "' + self._schema + '"."' + tableName + '"...';
|
||||
log(self, msg, self._dicTables[tableName].tableLogPath);
|
||||
let tableCommentsPromise = processTableComments(self, tableName);
|
||||
let columnsCommentsPromise = processColumnsComments(self, tableName);
|
||||
Promise.all([tableCommentsPromise, columnsCommentsPromise]).then(() => resolve());
|
||||
});
|
||||
});
|
||||
};
|
85
migration/fmtp/Connector.js
Normal file
85
migration/fmtp/Connector.js
Normal file
|
@ -0,0 +1,85 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const mysql = require('mysql');
|
||||
const pg = require('pg');
|
||||
const log = require('./Logger');
|
||||
const generateError = require('./ErrorGenerator');
|
||||
const generateReport = require('./ReportGenerator');
|
||||
|
||||
/**
|
||||
* Check if both servers are connected.
|
||||
* If not, than create connections.
|
||||
* Kill current process if can not connect.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports = function(self) {
|
||||
return new Promise(resolve => {
|
||||
let mysqlConnectionPromise = new Promise((mysqlResolve, mysqlReject) => {
|
||||
if (!self._mysql) {
|
||||
self._sourceConString.connectionLimit = self._maxPoolSizeSource;
|
||||
let pool = mysql.createPool(self._sourceConString);
|
||||
|
||||
if (pool) {
|
||||
self._mysql = pool;
|
||||
mysqlResolve();
|
||||
} else {
|
||||
log(self, '\t--[connect] Cannot connect to MySQL server...');
|
||||
mysqlReject();
|
||||
}
|
||||
} else {
|
||||
mysqlResolve();
|
||||
}
|
||||
});
|
||||
|
||||
let pgConnectionPromise = new Promise((pgResolve, pgReject) => {
|
||||
if (!self._pg) {
|
||||
self._targetConString.max = self._maxPoolSizeTarget;
|
||||
let pool = new pg.Pool(self._targetConString);
|
||||
|
||||
if (pool) {
|
||||
self._pg = pool;
|
||||
|
||||
self._pg.on('error', error => {
|
||||
let message = 'Cannot connect to PostgreSQL server...\n' + error.message + '\n' + error.stack;
|
||||
generateError(self, message);
|
||||
generateReport(self, message);
|
||||
});
|
||||
|
||||
pgResolve();
|
||||
} else {
|
||||
log(self, '\t--[connect] Cannot connect to PostgreSQL server...');
|
||||
pgReject();
|
||||
}
|
||||
} else {
|
||||
pgResolve();
|
||||
}
|
||||
});
|
||||
|
||||
Promise.all([mysqlConnectionPromise, pgConnectionPromise]).then(
|
||||
() => resolve(),
|
||||
() => generateReport(self, 'NMIG just failed to establish db-connections.')
|
||||
);
|
||||
});
|
||||
};
|
108
migration/fmtp/ConsistencyEnforcer.js
Normal file
108
migration/fmtp/ConsistencyEnforcer.js
Normal file
|
@ -0,0 +1,108 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Update consistency state.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {Number} dataPoolId
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function updateConsistencyState(self, dataPoolId) {
|
||||
return new Promise(resolve => {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
generateError(self, '\t--[ConsistencyEnforcer.updateConsistencyState] Cannot connect to PostgreSQL server...\n' + error);
|
||||
resolve();
|
||||
} else {
|
||||
let sql = 'UPDATE "' + self._schema + '"."data_pool_' + self._schema
|
||||
+ self._mySqlDbName + '" SET is_started = TRUE WHERE id = ' + dataPoolId + ';';
|
||||
|
||||
client.query(sql, err => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[ConsistencyEnforcer.updateConsistencyState] ' + err, sql);
|
||||
}
|
||||
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get consistency state.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {Number} dataPoolId
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function getConsistencyState(self, dataPoolId) {
|
||||
return new Promise(resolve => {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
generateError(self, '\t--[ConsistencyEnforcer.getConsistencyState] Cannot connect to PostgreSQL server...\n' + error);
|
||||
resolve(false);
|
||||
} else {
|
||||
let sql = 'SELECT is_started AS is_started FROM "' + self._schema + '"."data_pool_' + self._schema
|
||||
+ self._mySqlDbName + '" WHERE id = ' + dataPoolId + ';';
|
||||
|
||||
client.query(sql, (err, data) => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[ConsistencyEnforcer.getConsistencyState] ' + err, sql);
|
||||
resolve(false);
|
||||
} else {
|
||||
resolve(data.rows[0].is_started);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Enforce consistency before processing a chunk of data.
|
||||
* Ensure there are no any data duplications.
|
||||
* In case of normal execution - it is a good practice.
|
||||
* In case of rerunning nmig after unexpected failure - it is absolutely mandatory.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {Number} chunkId
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports = function(self, chunkId) {
|
||||
return new Promise(resolve => {
|
||||
getConsistencyState(self, chunkId).then(isStarted => {
|
||||
if (isStarted) {
|
||||
// Current data chunk runs after a disaster recovery.
|
||||
resolve(false);
|
||||
} else {
|
||||
// Normal migration flow.
|
||||
updateConsistencyState(self, chunkId).then(() => resolve(true));
|
||||
}
|
||||
})
|
||||
});
|
||||
};
|
83
migration/fmtp/Conversion.js
Normal file
83
migration/fmtp/Conversion.js
Normal file
|
@ -0,0 +1,83 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software= you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful;
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not; see <http=//www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const isIntNumeric = require('./IntegerValidator');
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
* @param {Object} config
|
||||
*/
|
||||
module.exports = function Conversion(config) {
|
||||
this._config = config;
|
||||
this._sourceConString = this._config.source;
|
||||
this._targetConString = this._config.target;
|
||||
this._tempDirPath = this._config.tempDirPath;
|
||||
this._logsDirPath = this._config.logsDirPath;
|
||||
this._dataTypesMapAddr = __dirname + '/DataTypesMap.json';
|
||||
this._allLogsPath = this._logsDirPath + '/all.log';
|
||||
this._errorLogsPath = this._logsDirPath + '/errors-only.log';
|
||||
this._notCreatedViewsPath = this._logsDirPath + '/not_created_views';
|
||||
this._noVacuum = this._config.no_vacuum;
|
||||
this._excludeTables = this._config.exclude_tables;
|
||||
this._timeBegin = new Date();
|
||||
this._encoding = this._config.encoding === undefined ? 'utf8' : this._config.encoding;
|
||||
this._dataChunkSize = this._config.data_chunk_size === undefined ? 1 : +this._config.data_chunk_size;
|
||||
this._dataChunkSize = this._dataChunkSize < 1 ? 1 : this._dataChunkSize;
|
||||
this._0777 = '0777';
|
||||
this._mysql = null;
|
||||
this._pg = null;
|
||||
this._tablesToMigrate = [];
|
||||
this._viewsToMigrate = [];
|
||||
this._tablesCnt = 0;
|
||||
this._viewsCnt = 0;
|
||||
this._dataPool = [];
|
||||
this._dicTables = Object.create(null);
|
||||
this._mySqlDbName = this._sourceConString.database;
|
||||
this._schema = this._config.schema === undefined ||
|
||||
this._config.schema === ''
|
||||
? this._mySqlDbName
|
||||
: this._config.schema;
|
||||
|
||||
this._maxPoolSizeSource = this._config.max_pool_size_source !== undefined &&
|
||||
isIntNumeric(this._config.max_pool_size_source)
|
||||
? +this._config.max_pool_size_source
|
||||
: 10;
|
||||
|
||||
this._maxPoolSizeTarget = this._config.max_pool_size_target !== undefined &&
|
||||
isIntNumeric(this._config.max_pool_size_target)
|
||||
? +this._config.max_pool_size_target
|
||||
: 10;
|
||||
|
||||
this._maxPoolSizeSource = this._maxPoolSizeSource > 0 ? this._maxPoolSizeSource : 10;
|
||||
this._maxPoolSizeTarget = this._maxPoolSizeTarget > 0 ? this._maxPoolSizeTarget : 10;
|
||||
|
||||
this._pipeWidth = this._config.pipe_width !== undefined &&
|
||||
isIntNumeric(this._config.pipe_width)
|
||||
? +this._config.pipe_width
|
||||
: this._maxPoolSizeTarget;
|
||||
|
||||
this._pipeWidth = this._pipeWidth > this._maxPoolSizeTarget ? this._maxPoolSizeTarget : this._pipeWidth;
|
||||
this._loaderMaxOldSpaceSize = this._config.loader_max_old_space_size;
|
||||
this._loaderMaxOldSpaceSize = isIntNumeric(this._loaderMaxOldSpaceSize) ? this._loaderMaxOldSpaceSize : 'DEFAULT';
|
||||
this._migrateOnlyData = this._config.migrate_only_data;
|
||||
};
|
124
migration/fmtp/DataChunksProcessor.js
Normal file
124
migration/fmtp/DataChunksProcessor.js
Normal file
|
@ -0,0 +1,124 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const connect = require('./Connector');
|
||||
const log = require('./Logger');
|
||||
const generateError = require('./ErrorGenerator');
|
||||
const arrangeColumnsData = require('./ColumnsDataArranger');
|
||||
|
||||
/**
|
||||
* Prepares an array of tables and chunk offsets.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {String} tableName
|
||||
* @param {Boolean} haveDataChunksProcessed
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports = function(self, tableName, haveDataChunksProcessed) {
|
||||
return connect(self).then(() => {
|
||||
return new Promise(resolve => {
|
||||
if (haveDataChunksProcessed) {
|
||||
return resolve();
|
||||
}
|
||||
|
||||
self._mysql.getConnection((error, connection) => {
|
||||
if (error) {
|
||||
// The connection is undefined.
|
||||
generateError(self, '\t--[prepareDataChunks] Cannot connect to MySQL server...\n\t' + error);
|
||||
resolve();
|
||||
} else {
|
||||
// Determine current table size, apply "chunking".
|
||||
let sql = "SELECT ((data_length + index_length) / 1024 / 1024) AS size_in_mb "
|
||||
+ "FROM information_schema.tables "
|
||||
+ "WHERE table_schema = '" + self._mySqlDbName + "' "
|
||||
+ "AND table_name = '" + tableName + "';";
|
||||
|
||||
connection.query(sql, (err, rows) => {
|
||||
if (err) {
|
||||
connection.release();
|
||||
generateError(self, '\t--[prepareDataChunks] ' + err, sql);
|
||||
resolve();
|
||||
} else {
|
||||
let tableSizeInMb = +rows[0].size_in_mb;
|
||||
tableSizeInMb = tableSizeInMb < 1 ? 1 : tableSizeInMb;
|
||||
rows = null;
|
||||
let strSelectFieldList = arrangeColumnsData(self._dicTables[tableName].arrTableColumns);
|
||||
sql = 'SELECT COUNT(1) AS rows_count FROM `' + tableName + '`;';
|
||||
|
||||
connection.query(sql, (err2, rows2) => {
|
||||
connection.release();
|
||||
|
||||
if (err2) {
|
||||
generateError(self, '\t--[prepareDataChunks] ' + err2, sql);
|
||||
resolve();
|
||||
} else {
|
||||
let rowsCnt = rows2[0].rows_count;
|
||||
rows2 = null;
|
||||
let chunksCnt = tableSizeInMb / self._dataChunkSize;
|
||||
chunksCnt = chunksCnt < 1 ? 1 : chunksCnt;
|
||||
let rowsInChunk = Math.ceil(rowsCnt / chunksCnt);
|
||||
let arrDataPoolPromises = [];
|
||||
let msg = '\t--[prepareDataChunks] Total rows to insert into '
|
||||
+ '"' + self._schema + '"."' + tableName + '": ' + rowsCnt;
|
||||
|
||||
log(self, msg, self._dicTables[tableName].tableLogPath);
|
||||
|
||||
for (let offset = 0; offset < rowsCnt; offset += rowsInChunk) {
|
||||
arrDataPoolPromises.push(new Promise(resolveDataUnit => {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
generateError(self, '\t--[prepareDataChunks] Cannot connect to PostgreSQL server...\n' + error);
|
||||
resolveDataUnit();
|
||||
} else {
|
||||
let strJson = '{"_tableName":"' + tableName
|
||||
+ '","_selectFieldList":"' + strSelectFieldList + '",'
|
||||
+ '"_offset":' + offset + ','
|
||||
+ '"_rowsInChunk":' + rowsInChunk + ','
|
||||
+ '"_rowsCnt":' + rowsCnt + '}';
|
||||
|
||||
let sql = 'INSERT INTO "' + self._schema + '"."data_pool_' + self._schema
|
||||
+ self._mySqlDbName + '"("is_started", "json") VALUES(FALSE, $1);';
|
||||
|
||||
client.query(sql, [strJson], err => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[prepareDataChunks] INSERT failed...\n' + err, sql);
|
||||
}
|
||||
|
||||
resolveDataUnit();
|
||||
});
|
||||
}
|
||||
});
|
||||
}));
|
||||
}
|
||||
|
||||
Promise.all(arrDataPoolPromises).then(() => resolve());
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
251
migration/fmtp/DataLoader.js
Normal file
251
migration/fmtp/DataLoader.js
Normal file
|
@ -0,0 +1,251 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const pgCopyStreams = require('pg-copy-streams');
|
||||
const csvStringify = require('./CsvStringifyModified');
|
||||
const log = require('./Logger');
|
||||
const generateError = require('./ErrorGenerator');
|
||||
const connect = require('./Connector');
|
||||
const Conversion = require('./Conversion');
|
||||
const MessageToMaster = require('./MessageToMaster');
|
||||
const enforceConsistency = require('./ConsistencyEnforcer');
|
||||
const copyFrom = pgCopyStreams.from;
|
||||
|
||||
let self = null;
|
||||
|
||||
process.on('message', signal => {
|
||||
self = new Conversion(signal.config);
|
||||
let promises = [];
|
||||
log(self, '\t--[loadData] Loading the data...');
|
||||
|
||||
for (let i = 0; i < signal.chunks.length; ++i) {
|
||||
promises.push(
|
||||
connect(self).then(() => {
|
||||
return enforceConsistency(self, signal.chunks[i]._id);
|
||||
}).then(isNormalFlow => {
|
||||
if (isNormalFlow) {
|
||||
return populateTableWorker(
|
||||
signal.chunks[i]._tableName,
|
||||
signal.chunks[i]._selectFieldList,
|
||||
signal.chunks[i]._offset,
|
||||
signal.chunks[i]._rowsInChunk,
|
||||
signal.chunks[i]._rowsCnt,
|
||||
signal.chunks[i]._id
|
||||
);
|
||||
}
|
||||
|
||||
let sql = buildChunkQuery(
|
||||
signal.chunks[i]._tableName,
|
||||
signal.chunks[i]._selectFieldList,
|
||||
signal.chunks[i]._offset,
|
||||
signal.chunks[i]._rowsInChunk
|
||||
);
|
||||
|
||||
let strTwelveSpaces = ' ';
|
||||
let rejectedData = '\n\t--[loadData] Possible data duplication alert!\n\t ' + strTwelveSpaces
|
||||
+ 'Data, retrievable by following MySQL query:\n' + sql + '\n\t ' + strTwelveSpaces
|
||||
+ 'may already be migrated.\n\t' + strTwelveSpaces + ' Please, check it.';
|
||||
|
||||
log(self, rejectedData, self._logsDirPath + '/' + signal.chunks[i]._tableName + '.log');
|
||||
return deleteChunk(signal.chunks[i]._id);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
Promise.all(promises).then(() => process.send('processed'));
|
||||
});
|
||||
|
||||
/**
|
||||
* Delete given record from the data-pool.
|
||||
*
|
||||
* @param {Number} dataPoolId
|
||||
* @param {Node-pg client|undefined} client
|
||||
* @param {Function|undefined} done
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function deleteChunk(dataPoolId, client, done) {
|
||||
return new Promise(resolve => {
|
||||
if (client) {
|
||||
let sql = 'DELETE FROM "' + self._schema + '"."data_pool_' + self._schema + self._mySqlDbName + '" ' + 'WHERE id = ' + dataPoolId + ';';
|
||||
|
||||
client.query(sql, err => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[deleteChunk] ' + err, sql);
|
||||
}
|
||||
|
||||
resolve();
|
||||
});
|
||||
} else {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
generateError(self, '\t--[deleteChunk] Cannot connect to PostgreSQL server...\n' + error);
|
||||
resolve();
|
||||
} else {
|
||||
let sql = 'DELETE FROM "' + self._schema + '"."data_pool_' + self._schema + self._mySqlDbName + '" ' + 'WHERE id = ' + dataPoolId + ';';
|
||||
|
||||
client.query(sql, err => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[deleteChunk] ' + err, sql);
|
||||
}
|
||||
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete given csv file.
|
||||
*
|
||||
* @param {String} csvAddr
|
||||
* @param {FileDescriptor} fd
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function deleteCsv(csvAddr, fd) {
|
||||
return new Promise(resolve => {
|
||||
fs.unlink(csvAddr, () => {
|
||||
fs.close(fd, () => {
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a MySQL query to retrieve the chunk of data.
|
||||
*
|
||||
* @param {String} tableName
|
||||
* @param {String} strSelectFieldList
|
||||
* @param {Number} offset
|
||||
* @param {Number} rowsInChunk
|
||||
* @returns {String}
|
||||
*/
|
||||
function buildChunkQuery(tableName, strSelectFieldList, offset, rowsInChunk) {
|
||||
return 'SELECT ' + strSelectFieldList + ' FROM `' + tableName + '` LIMIT ' + offset + ',' + rowsInChunk + ';';
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a chunk of data using "PostgreSQL COPY".
|
||||
*
|
||||
* @param {String} tableName
|
||||
* @param {String} strSelectFieldList
|
||||
* @param {Number} offset
|
||||
* @param {Number} rowsInChunk
|
||||
* @param {Number} rowsCnt
|
||||
* @param {Number} dataPoolId
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function populateTableWorker(tableName, strSelectFieldList, offset, rowsInChunk, rowsCnt, dataPoolId) {
|
||||
return new Promise(resolvePopulateTableWorker => {
|
||||
self._mysql.getConnection((error, connection) => {
|
||||
if (error) {
|
||||
// The connection is undefined.
|
||||
generateError(self, '\t--[populateTableWorker] Cannot connect to MySQL server...\n\t' + error);
|
||||
resolvePopulateTableWorker();
|
||||
} else {
|
||||
let csvAddr = self._tempDirPath + '/' + tableName + offset + '.csv';
|
||||
let sql = buildChunkQuery(tableName, strSelectFieldList, offset, rowsInChunk);
|
||||
|
||||
connection.query(sql, (err, rows) => {
|
||||
connection.release();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[populateTableWorker] ' + err, sql);
|
||||
resolvePopulateTableWorker();
|
||||
} else {
|
||||
rowsInChunk = rows.length;
|
||||
|
||||
csvStringify(rows, (csvError, csvString) => {
|
||||
rows = null;
|
||||
|
||||
if (csvError) {
|
||||
generateError(self, '\t--[populateTableWorker] ' + csvError);
|
||||
resolvePopulateTableWorker();
|
||||
} else {
|
||||
let buffer = new Buffer(csvString, self._encoding);
|
||||
csvString = null;
|
||||
|
||||
fs.open(csvAddr, 'w', self._0777, (csvErrorFputcsvOpen, fd) => {
|
||||
if (csvErrorFputcsvOpen) {
|
||||
buffer = null;
|
||||
generateError(self, '\t--[populateTableWorker] ' + csvErrorFputcsvOpen);
|
||||
resolvePopulateTableWorker();
|
||||
} else {
|
||||
fs.write(fd, buffer, 0, buffer.length, null, csvErrorFputcsvWrite => {
|
||||
buffer = null;
|
||||
|
||||
if (csvErrorFputcsvWrite) {
|
||||
generateError(self, '\t--[populateTableWorker] ' + csvErrorFputcsvWrite);
|
||||
resolvePopulateTableWorker();
|
||||
} else {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
generateError(self, '\t--[populateTableWorker] Cannot connect to PostgreSQL server...\n' + error, sql);
|
||||
deleteCsv(csvAddr, fd).then(() => resolvePopulateTableWorker());
|
||||
} else {
|
||||
let sqlCopy = 'COPY "' + self._schema + '"."' + tableName + '" FROM STDIN DELIMITER \'' + ',\'' + ' CSV;';
|
||||
let stream = client.query(copyFrom(sqlCopy));
|
||||
let readStream = fs.createReadStream(csvAddr);
|
||||
|
||||
readStream.on('end', () => {
|
||||
/*
|
||||
* COPY FROM STDIN does not return the number of rows inserted.
|
||||
* But the transactional behavior still applies (no records inserted if at least one failed).
|
||||
* That is why in case of 'on end' the rowsInChunk value is actually the number of records inserted.
|
||||
*/
|
||||
process.send(new MessageToMaster(tableName, rowsInChunk, rowsCnt));
|
||||
deleteChunk(dataPoolId, client, done).then(() => {
|
||||
deleteCsv(csvAddr, fd).then(() => resolvePopulateTableWorker());
|
||||
});
|
||||
});
|
||||
|
||||
readStream.on('error', readStreamError => {
|
||||
generateError(self, '\t--[populateTableWorker] ' + readStreamError, sqlCopy);
|
||||
let rejectedData = '\t--[populateTableWorker] Error loading table data:\n' + sql + '\n';
|
||||
log(self, rejectedData, self._logsDirPath + '/' + tableName + '.log');
|
||||
deleteChunk(dataPoolId, client, done).then(() => {
|
||||
deleteCsv(csvAddr, fd).then(() => resolvePopulateTableWorker());
|
||||
});
|
||||
});
|
||||
|
||||
readStream.pipe(stream);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
129
migration/fmtp/DataPoolManager.js
Normal file
129
migration/fmtp/DataPoolManager.js
Normal file
|
@ -0,0 +1,129 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const connect = require('./Connector');
|
||||
const log = require('./Logger');
|
||||
const generateError = require('./ErrorGenerator');
|
||||
|
||||
/**
|
||||
* Create the "{schema}"."data_pool_{self._schema + self._mySqlDbName} temporary table."
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports.createDataPoolTable = function(self) {
|
||||
return connect(self).then(() => {
|
||||
return new Promise((resolve, reject) => {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
generateError(self, '\t--[DataPoolManager.createDataPoolTable] Cannot connect to PostgreSQL server...\n' + error);
|
||||
reject();
|
||||
} else {
|
||||
let sql = 'CREATE TABLE IF NOT EXISTS "' + self._schema + '"."data_pool_' + self._schema + self._mySqlDbName
|
||||
+ '"("id" BIGSERIAL, "json" TEXT, "is_started" BOOLEAN);';
|
||||
|
||||
client.query(sql, err => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[DataPoolManager.createDataPoolTable] ' + err, sql);
|
||||
reject();
|
||||
} else {
|
||||
log(self, '\t--[DataPoolManager.createDataPoolTable] table "' + self._schema + '"."data_pool_' + self._schema + self._mySqlDbName + '" is created...');
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Drop the "{schema}"."data_pool_{self._schema + self._mySqlDbName} temporary table."
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports.dropDataPoolTable = function(self) {
|
||||
return connect(self).then(() => {
|
||||
return new Promise(resolve => {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
generateError(self, '\t--[DataPoolManager.dropDataPoolTable] Cannot connect to PostgreSQL server...\n' + error);
|
||||
resolve();
|
||||
} else {
|
||||
let sql = 'DROP TABLE "' + self._schema + '"."data_pool_' + self._schema + self._mySqlDbName + '";';
|
||||
client.query(sql, err => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[DataPoolManager.dropDataPoolTable] ' + err, sql);
|
||||
} else {
|
||||
log(self, '\t--[DataPoolManager.dropDataPoolTable] table "' + self._schema + '"."data_pool_' + self._schema + self._mySqlDbName + '" is dropped...');
|
||||
}
|
||||
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Reads temporary table, and generates Data-pool.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports.readDataPool = function(self) {
|
||||
return connect(self).then(() => {
|
||||
return new Promise((resolve, reject) => {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
generateError(self, '\t--[DataPoolManager.readDataPool] Cannot connect to PostgreSQL server...\n' + error);
|
||||
reject();
|
||||
} else {
|
||||
let sql = 'SELECT id AS id, json AS json FROM "' + self._schema + '"."data_pool_' + self._schema + self._mySqlDbName + '";';
|
||||
client.query(sql, (err, arrDataPool) => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[DataPoolManager.readDataPool] ' + err, sql);
|
||||
return reject();
|
||||
}
|
||||
|
||||
for (let i = 0; i < arrDataPool.rows.length; ++i) {
|
||||
let obj = JSON.parse(arrDataPool.rows[i].json);
|
||||
obj._id = arrDataPool.rows[i].id;
|
||||
self._dataPool.push(obj);
|
||||
}
|
||||
|
||||
log(self, '\t--[DataPoolManager.readDataPool] Data-Pool is loaded...');
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
|
@ -49,7 +49,7 @@
|
|||
},
|
||||
|
||||
"bigint" : {
|
||||
"increased_size" : "numeric",
|
||||
"increased_size" : "bigint",
|
||||
"type" : "bigint",
|
||||
"mySqlVarLenPgSqlFixedLen" : true
|
||||
},
|
||||
|
@ -77,7 +77,7 @@
|
|||
"type" : "geometry",
|
||||
"mySqlVarLenPgSqlFixedLen" : false
|
||||
},
|
||||
|
||||
|
||||
"numeric" : {
|
||||
"increased_size" : "",
|
||||
"type" : "numeric",
|
||||
|
@ -156,6 +156,12 @@
|
|||
"mySqlVarLenPgSqlFixedLen" : false
|
||||
},
|
||||
|
||||
"set" : {
|
||||
"increased_size" : "",
|
||||
"type" : "character varying(255)",
|
||||
"mySqlVarLenPgSqlFixedLen" : false
|
||||
},
|
||||
|
||||
"tinytext" : {
|
||||
"increased_size" : "",
|
||||
"type" : "text",
|
||||
|
|
50
migration/fmtp/DataTypesMapReader.js
Normal file
50
migration/fmtp/DataTypesMapReader.js
Normal file
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
|
||||
/**
|
||||
* Reads "./DataTypesMap.json" and converts its json content to js object.
|
||||
* Appends this object to "FromMySQL2PostgreSQL" instance.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports = function(self) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.readFile(self._dataTypesMapAddr, (error, data) => {
|
||||
if (error) {
|
||||
console.log('\t--[readDataTypesMap] Cannot read "DataTypesMap" from ' + self._dataTypesMapAddr);
|
||||
reject();
|
||||
} else {
|
||||
try {
|
||||
self._dataTypesMap = JSON.parse(data.toString());
|
||||
console.log('\t--[readDataTypesMap] Data Types Map is loaded...');
|
||||
resolve();
|
||||
} catch (err) {
|
||||
console.log('\t--[readDataTypesMap] Cannot parse JSON from' + self._dataTypesMapAddr);
|
||||
reject();
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
114
migration/fmtp/DefaultProcessor.js
Normal file
114
migration/fmtp/DefaultProcessor.js
Normal file
|
@ -0,0 +1,114 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const connect = require('./Connector');
|
||||
const log = require('./Logger');
|
||||
const generateError = require('./ErrorGenerator');
|
||||
|
||||
/**
|
||||
* Checks if given value is float number.
|
||||
*
|
||||
* @param {String|Number} value
|
||||
* @returns {Boolean}
|
||||
*/
|
||||
function isFloatNumeric(value) {
|
||||
return !isNaN(parseFloat(value)) && isFinite(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define which columns of the given table have default value.
|
||||
* Set default values, if need.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {String} tableName
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports = function(self, tableName) {
|
||||
return connect(self).then(() => {
|
||||
return new Promise(resolve => {
|
||||
log(self, '\t--[processDefault] Defines default values for table: "' + self._schema + '"."' + tableName + '"', self._dicTables[tableName].tableLogPath);
|
||||
let processDefaultPromises = [];
|
||||
let sqlReservedValues = {
|
||||
'CURRENT_DATE' : 'CURRENT_DATE',
|
||||
'0000-00-00' : "'-INFINITY'",
|
||||
'CURRENT_TIME' : 'CURRENT_TIME',
|
||||
'00:00:00' : '00:00:00',
|
||||
'CURRENT_TIMESTAMP' : 'CURRENT_TIMESTAMP',
|
||||
'0000-00-00 00:00:00' : "'-INFINITY'",
|
||||
'LOCALTIME' : 'LOCALTIME',
|
||||
'LOCALTIMESTAMP' : 'LOCALTIMESTAMP',
|
||||
'NULL' : 'NULL',
|
||||
'UTC_DATE' : "(CURRENT_DATE AT TIME ZONE 'UTC')",
|
||||
'UTC_TIME' : "(CURRENT_TIME AT TIME ZONE 'UTC')",
|
||||
'UTC_TIMESTAMP' : "(NOW() AT TIME ZONE 'UTC')"
|
||||
};
|
||||
|
||||
for (let i = 0; i < self._dicTables[tableName].arrTableColumns.length; ++i) {
|
||||
if (self._dicTables[tableName].arrTableColumns[i].Default) {
|
||||
processDefaultPromises.push(
|
||||
new Promise(resolveProcessDefault => {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
let msg = '\t--[processDefault] Cannot connect to PostgreSQL server...\n' + error;
|
||||
generateError(self, msg);
|
||||
resolveProcessDefault();
|
||||
} else {
|
||||
let sql = 'ALTER TABLE "' + self._schema + '"."' + tableName
|
||||
+ '" ' + 'ALTER COLUMN "' + self._dicTables[tableName].arrTableColumns[i].Field + '" SET DEFAULT ';
|
||||
|
||||
if (sqlReservedValues[self._dicTables[tableName].arrTableColumns[i].Default]) {
|
||||
sql += sqlReservedValues[self._dicTables[tableName].arrTableColumns[i].Default] + ';';
|
||||
} else {
|
||||
sql += isFloatNumeric(self._dicTables[tableName].arrTableColumns[i].Default)
|
||||
? self._dicTables[tableName].arrTableColumns[i].Default + ';'
|
||||
: "'" + self._dicTables[tableName].arrTableColumns[i].Default + "';";
|
||||
}
|
||||
|
||||
client.query(sql, err => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
let msg = '\t--[processDefault] Error occurred when tried to set default value for "'
|
||||
+ self._schema + '"."' + tableName
|
||||
+ '"."' + self._dicTables[tableName].arrTableColumns[i].Field + '"...\n' + err;
|
||||
|
||||
generateError(self, msg, sql);
|
||||
resolveProcessDefault();
|
||||
} else {
|
||||
let success = '\t--[processDefault] Set default value for "' + self._schema + '"."' + tableName
|
||||
+ '"."' + self._dicTables[tableName].arrTableColumns[i].Field + '"...';
|
||||
|
||||
log(self, success, self._dicTables[tableName].tableLogPath);
|
||||
resolveProcessDefault();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Promise.all(processDefaultPromises).then(() => resolve());
|
||||
});
|
||||
});
|
||||
};
|
137
migration/fmtp/DirectoriesManager.js
Normal file
137
migration/fmtp/DirectoriesManager.js
Normal file
|
@ -0,0 +1,137 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const log = require('./Logger');
|
||||
|
||||
/**
|
||||
* Creates temporary directory.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports.createTemporaryDirectory = function(self) {
|
||||
return new Promise((resolve, reject) => {
|
||||
log(self, '\t--[DirectoriesManager.createTemporaryDirectory] Creating temporary directory...');
|
||||
fs.stat(self._tempDirPath, (directoryDoesNotExist, stat) => {
|
||||
if (directoryDoesNotExist) {
|
||||
fs.mkdir(self._tempDirPath, self._0777, e => {
|
||||
if (e) {
|
||||
let msg = '\t--[DirectoriesManager.createTemporaryDirectory] Cannot perform a migration due to impossibility to create '
|
||||
+ '"temporary_directory": ' + self._tempDirPath;
|
||||
|
||||
log(self, msg);
|
||||
reject();
|
||||
} else {
|
||||
log(self, '\t--[DirectoriesManager.createTemporaryDirectory] Temporary directory is created...');
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
} else if (!stat.isDirectory()) {
|
||||
log(self, '\t--[DirectoriesManager.createTemporaryDirectory] Cannot perform a migration due to unexpected error');
|
||||
reject();
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Removes temporary directory.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports.removeTemporaryDirectory = function(self) {
|
||||
return new Promise(resolve => {
|
||||
fs.readdir(self._tempDirPath, (err, arrContents) => {
|
||||
let msg = '';
|
||||
|
||||
if (err) {
|
||||
msg = '\t--[DirectoriesManager.removeTemporaryDirectory] Note, TemporaryDirectory located at "'
|
||||
+ self._tempDirPath + '" is not removed \n\t--[DirectoriesManager.removeTemporaryDirectory] ' + err;
|
||||
|
||||
log(self, msg);
|
||||
resolve();
|
||||
|
||||
} else {
|
||||
let promises = [];
|
||||
|
||||
for (let i = 0; i < arrContents.length; ++i) {
|
||||
promises.push(new Promise(resolveUnlink => {
|
||||
fs.unlink(self._tempDirPath + '/' + arrContents[i], () => resolveUnlink());
|
||||
}));
|
||||
}
|
||||
|
||||
Promise.all(promises).then(() => {
|
||||
fs.rmdir(self._tempDirPath, error => {
|
||||
if (error) {
|
||||
msg = '\t--[DirectoriesManager.removeTemporaryDirectory] Note, TemporaryDirectory located at "'
|
||||
+ self._tempDirPath + '" is not removed \n\t--[DirectoriesManager.removeTemporaryDirectory] ' + error;
|
||||
} else {
|
||||
msg = '\t--[DirectoriesManager.removeTemporaryDirectory] TemporaryDirectory located at "'
|
||||
+ self._tempDirPath + '" is removed';
|
||||
}
|
||||
|
||||
log(self, msg);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates logs directory.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports.createLogsDirectory = function(self) {
|
||||
return new Promise((resolve, reject) => {
|
||||
console.log('\t--[DirectoriesManager.createLogsDirectory] Creating logs directory...');
|
||||
fs.stat(self._logsDirPath, (directoryDoesNotExist, stat) => {
|
||||
if (directoryDoesNotExist) {
|
||||
fs.mkdir(self._logsDirPath, self._0777, e => {
|
||||
if (e) {
|
||||
let msg = '\t--[DirectoriesManager.createLogsDirectory] Cannot perform a migration due to impossibility to create '
|
||||
+ '"logs_directory": ' + self._logsDirPath;
|
||||
|
||||
console.log(msg);
|
||||
reject();
|
||||
} else {
|
||||
log(self, '\t--[DirectoriesManager.createLogsDirectory] Logs directory is created...');
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
} else if (!stat.isDirectory()) {
|
||||
console.log('\t--[DirectoriesManager.createLogsDirectory] Cannot perform a migration due to unexpected error');
|
||||
reject();
|
||||
} else {
|
||||
log(self, '\t--[DirectoriesManager.createLogsDirectory] Logs directory already exists...');
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
85
migration/fmtp/EnumProcessor.js
Normal file
85
migration/fmtp/EnumProcessor.js
Normal file
|
@ -0,0 +1,85 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const connect = require('./Connector');
|
||||
const log = require('./Logger');
|
||||
const generateError = require('./ErrorGenerator');
|
||||
|
||||
/**
|
||||
* Define which columns of the given table are of type "enum".
|
||||
* Set an appropriate constraint, if need.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {String} tableName
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports = function(self, tableName) {
|
||||
return connect(self).then(() => {
|
||||
return new Promise(resolve => {
|
||||
log(self, '\t--[processEnum] Defines "ENUMs" for table "' + self._schema + '"."' + tableName + '"', self._dicTables[tableName].tableLogPath);
|
||||
let processEnumPromises = [];
|
||||
|
||||
for (let i = 0; i < self._dicTables[tableName].arrTableColumns.length; ++i) {
|
||||
if (self._dicTables[tableName].arrTableColumns[i].Type.indexOf('(') !== -1) {
|
||||
let arrType = self._dicTables[tableName].arrTableColumns[i].Type.split('(');
|
||||
|
||||
if (arrType[0] === 'enum') {
|
||||
processEnumPromises.push(
|
||||
new Promise(resolveProcessEnum => {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
let msg = '\t--[processEnum] Cannot connect to PostgreSQL server...\n' + error;
|
||||
generateError(self, msg);
|
||||
resolveProcessEnum();
|
||||
} else {
|
||||
let sql = 'ALTER TABLE "' + self._schema + '"."' + tableName + '" '
|
||||
+ 'ADD CHECK ("' + self._dicTables[tableName].arrTableColumns[i].Field + '" IN (' + arrType[1] + ');';
|
||||
|
||||
client.query(sql, err => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
let msg = '\t--[processEnum] Error while setting ENUM for "' + self._schema + '"."'
|
||||
+ tableName + '"."' + self._dicTables[tableName].arrTableColumns[i].Field + '"...\n' + err;
|
||||
|
||||
generateError(self, msg, sql);
|
||||
resolveProcessEnum();
|
||||
} else {
|
||||
let success = '\t--[processEnum] Set "ENUM" for "' + self._schema + '"."' + tableName
|
||||
+ '"."' + self._dicTables[tableName].arrTableColumns[i].Field + '"...';
|
||||
|
||||
log(self, success, self._dicTables[tableName].tableLogPath);
|
||||
resolveProcessEnum();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Promise.all(processEnumPromises).then(() => resolve());
|
||||
});
|
||||
});
|
||||
};
|
|
@ -19,21 +19,29 @@
|
|||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
const fs = require('fs');
|
||||
const fmtp = require('./migration/fmtp/FromMySQL2PostgreSQL');
|
||||
const nmig = new fmtp.FromMySQL2PostgreSQL();
|
||||
|
||||
fs.readFile(__dirname + '/config.json', (error, data) => {
|
||||
if (error) {
|
||||
console.log('\n\t--Cannot run migration\nCannot read configuration info from ' + __dirname + '/config.json');
|
||||
} else {
|
||||
try {
|
||||
let config = JSON.parse(data.toString());
|
||||
config.tempDirPath = __dirname + '/temporary_directory';
|
||||
config.logsDirPath = __dirname + '/logs_directory';
|
||||
nmig.run(config);
|
||||
} catch (err) {
|
||||
console.log('\n\t--Cannot parse JSON from' + __dirname + '/config.json');
|
||||
const fs = require('fs');
|
||||
const log = require('./Logger');
|
||||
|
||||
/**
|
||||
* Writes a ditailed error message to the "/errors-only.log" file
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {String} message
|
||||
* @param {String} sql
|
||||
* @returns {undefined}
|
||||
*/
|
||||
module.exports = function(self, message, sql) {
|
||||
message += '\n\n\tSQL: ' + (sql || '') + '\n\n';
|
||||
let buffer = new Buffer(message, self._encoding);
|
||||
log(self, message, undefined, true);
|
||||
|
||||
fs.open(self._errorLogsPath, 'a', self._0777, (error, fd) => {
|
||||
if (!error) {
|
||||
fs.write(fd, buffer, 0, buffer.length, null, () => {
|
||||
buffer = null;
|
||||
fs.close(fd);
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
159
migration/fmtp/ForeignKeyProcessor.js
Normal file
159
migration/fmtp/ForeignKeyProcessor.js
Normal file
|
@ -0,0 +1,159 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const log = require('./Logger');
|
||||
const generateError = require('./ErrorGenerator');
|
||||
const migrationStateManager = require('./MigrationStateManager');
|
||||
|
||||
/**
|
||||
* Creates foreign keys for given table.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {String} tableName
|
||||
* @param {Array} rows
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function processForeignKeyWorker(self, tableName, rows) {
|
||||
return new Promise(resolve => {
|
||||
let constraintsPromises = [];
|
||||
let objConstraints = Object.create(null);
|
||||
|
||||
for (let i = 0; i < rows.length; ++i) {
|
||||
if (rows[i].CONSTRAINT_NAME in objConstraints) {
|
||||
objConstraints[rows[i].CONSTRAINT_NAME].column_name.push('"' + rows[i].COLUMN_NAME + '"');
|
||||
objConstraints[rows[i].CONSTRAINT_NAME].referenced_column_name.push('"' + rows[i].REFERENCED_COLUMN_NAME + '"');
|
||||
} else {
|
||||
objConstraints[rows[i].CONSTRAINT_NAME] = Object.create(null);
|
||||
objConstraints[rows[i].CONSTRAINT_NAME].column_name = ['"' + rows[i].COLUMN_NAME + '"'];
|
||||
objConstraints[rows[i].CONSTRAINT_NAME].referenced_column_name = ['"' + rows[i].REFERENCED_COLUMN_NAME + '"'];
|
||||
objConstraints[rows[i].CONSTRAINT_NAME].referenced_table_name = rows[i].REFERENCED_TABLE_NAME;
|
||||
objConstraints[rows[i].CONSTRAINT_NAME].update_rule = rows[i].UPDATE_RULE;
|
||||
objConstraints[rows[i].CONSTRAINT_NAME].delete_rule = rows[i].DELETE_RULE;
|
||||
}
|
||||
}
|
||||
|
||||
rows = null;
|
||||
|
||||
for (let attr in objConstraints) {
|
||||
constraintsPromises.push(
|
||||
new Promise(resolveConstraintPromise => {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
objConstraints[attr] = null;
|
||||
generateError(self, '\t--[processForeignKeyWorker] Cannot connect to PostgreSQL server...');
|
||||
resolveConstraintPromise();
|
||||
} else {
|
||||
let sql = 'ALTER TABLE "' + self._schema + '"."' + tableName + '" ADD FOREIGN KEY ('
|
||||
+ objConstraints[attr].column_name.join(',') + ') REFERENCES "' + self._schema + '"."'
|
||||
+ objConstraints[attr].referenced_table_name + '" (' + objConstraints[attr].referenced_column_name.join(',')
|
||||
+ ') ON UPDATE ' + objConstraints[attr].update_rule + ' ON DELETE ' + objConstraints[attr].delete_rule + ';';
|
||||
|
||||
objConstraints[attr] = null;
|
||||
client.query(sql, err => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[processForeignKeyWorker] ' + err, sql);
|
||||
resolveConstraintPromise();
|
||||
} else {
|
||||
resolveConstraintPromise();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
Promise.all(constraintsPromises).then(() => resolve());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts a process of foreign keys creation.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports = function(self) {
|
||||
return migrationStateManager.get(self, 'foreign_keys_loaded').then(isForeignKeysProcessed => {
|
||||
return new Promise(resolve => {
|
||||
let fkPromises = [];
|
||||
|
||||
if (!isForeignKeysProcessed) {
|
||||
for (let i = 0; i < self._tablesToMigrate.length; ++i) {
|
||||
let tableName = self._tablesToMigrate[i];
|
||||
log(self, '\t--[processForeignKey] Search foreign keys for table "' + self._schema + '"."' + tableName + '"...');
|
||||
fkPromises.push(
|
||||
new Promise(fkResolve => {
|
||||
self._mysql.getConnection((error, connection) => {
|
||||
if (error) {
|
||||
// The connection is undefined.
|
||||
generateError(self, '\t--[processForeignKey] Cannot connect to MySQL server...\n' + error);
|
||||
fkResolve();
|
||||
} else {
|
||||
let sql = "SELECT cols.COLUMN_NAME, refs.REFERENCED_TABLE_NAME, refs.REFERENCED_COLUMN_NAME, "
|
||||
+ "cRefs.UPDATE_RULE, cRefs.DELETE_RULE, cRefs.CONSTRAINT_NAME "
|
||||
+ "FROM INFORMATION_SCHEMA.`COLUMNS` AS cols "
|
||||
+ "INNER JOIN INFORMATION_SCHEMA.`KEY_COLUMN_USAGE` AS refs "
|
||||
+ "ON refs.TABLE_SCHEMA = cols.TABLE_SCHEMA "
|
||||
+ "AND refs.REFERENCED_TABLE_SCHEMA = cols.TABLE_SCHEMA "
|
||||
+ "AND refs.TABLE_NAME = cols.TABLE_NAME "
|
||||
+ "AND refs.COLUMN_NAME = cols.COLUMN_NAME "
|
||||
+ "LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS cRefs "
|
||||
+ "ON cRefs.CONSTRAINT_SCHEMA = cols.TABLE_SCHEMA "
|
||||
+ "AND cRefs.CONSTRAINT_NAME = refs.CONSTRAINT_NAME "
|
||||
+ "LEFT JOIN INFORMATION_SCHEMA.`KEY_COLUMN_USAGE` AS links "
|
||||
+ "ON links.TABLE_SCHEMA = cols.TABLE_SCHEMA "
|
||||
+ "AND links.REFERENCED_TABLE_SCHEMA = cols.TABLE_SCHEMA "
|
||||
+ "AND links.REFERENCED_TABLE_NAME = cols.TABLE_NAME "
|
||||
+ "AND links.REFERENCED_COLUMN_NAME = cols.COLUMN_NAME "
|
||||
+ "LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS cLinks "
|
||||
+ "ON cLinks.CONSTRAINT_SCHEMA = cols.TABLE_SCHEMA "
|
||||
+ "AND cLinks.CONSTRAINT_NAME = links.CONSTRAINT_NAME "
|
||||
+ "WHERE cols.TABLE_SCHEMA = '" + self._mySqlDbName + "' "
|
||||
+ "AND cols.TABLE_NAME = '" + tableName + "';";
|
||||
|
||||
connection.query(sql, (err, rows) => {
|
||||
connection.release();
|
||||
|
||||
if (err) {
|
||||
generateError(self, self, '\t--[processForeignKey] ' + err, sql);
|
||||
fkResolve();
|
||||
} else {
|
||||
processForeignKeyWorker(self, tableName, rows).then(() => {
|
||||
log(self, '\t--[processForeignKey] Foreign keys for table "' + self._schema + '"."' + tableName + '" are set...');
|
||||
fkResolve();
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Promise.all(fkPromises).then(() => resolve());
|
||||
});
|
||||
});
|
||||
};
|
119
migration/fmtp/IndexAndKeyProcessor.js
Normal file
119
migration/fmtp/IndexAndKeyProcessor.js
Normal file
|
@ -0,0 +1,119 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const connect = require('./Connector');
|
||||
const log = require('./Logger');
|
||||
const generateError = require('./ErrorGenerator');
|
||||
|
||||
/**
|
||||
* Create primary key and indices.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {String} tableName
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports = function(self, tableName) {
|
||||
return connect(self).then(() => {
|
||||
return new Promise(resolveProcessIndexAndKey => {
|
||||
self._mysql.getConnection((error, connection) => {
|
||||
if (error) {
|
||||
// The connection is undefined.
|
||||
generateError(self, '\t--[processIndexAndKey] Cannot connect to MySQL server...\n\t' + error);
|
||||
resolveProcessIndexAndKey();
|
||||
} else {
|
||||
let sql = 'SHOW INDEX FROM `' + tableName + '`;';
|
||||
connection.query(sql, (err, arrIndices) => {
|
||||
connection.release();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[processIndexAndKey] ' + err, sql);
|
||||
resolveProcessIndexAndKey();
|
||||
} else {
|
||||
let objPgIndices = Object.create(null);
|
||||
let cnt = 0;
|
||||
let indexType = '';
|
||||
let processIndexAndKeyPromises = [];
|
||||
|
||||
for (let i = 0; i < arrIndices.length; ++i) {
|
||||
if (arrIndices[i].Key_name in objPgIndices) {
|
||||
objPgIndices[arrIndices[i].Key_name].column_name.push('"' + arrIndices[i].Column_name + '"');
|
||||
} else {
|
||||
objPgIndices[arrIndices[i].Key_name] = {
|
||||
is_unique : arrIndices[i].Non_unique === 0 ? true : false,
|
||||
column_name : ['"' + arrIndices[i].Column_name + '"'],
|
||||
Index_type : ' USING ' + (arrIndices[i].Index_type === 'SPATIAL' ? 'GIST' : arrIndices[i].Index_type)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
for (let attr in objPgIndices) {
|
||||
processIndexAndKeyPromises.push(
|
||||
new Promise(resolveProcessIndexAndKeySql => {
|
||||
self._pg.connect((pgError, pgClient, done) => {
|
||||
if (pgError) {
|
||||
let msg = '\t--[processIndexAndKey] Cannot connect to PostgreSQL server...\n' + pgError;
|
||||
generateError(self, msg);
|
||||
resolveProcessIndexAndKeySql();
|
||||
} else {
|
||||
if (attr.toLowerCase() === 'primary') {
|
||||
indexType = 'PK';
|
||||
sql = 'ALTER TABLE "' + self._schema + '"."' + tableName + '" '
|
||||
+ 'ADD PRIMARY KEY(' + objPgIndices[attr].column_name.join(',') + ');';
|
||||
|
||||
} else {
|
||||
// "schema_idxname_{integer}_idx" - is NOT a mistake.
|
||||
let columnName = objPgIndices[attr].column_name[0].slice(1, -1) + cnt++;
|
||||
indexType = 'index';
|
||||
sql = 'CREATE ' + (objPgIndices[attr].is_unique ? 'UNIQUE ' : '') + 'INDEX "'
|
||||
+ self._schema + '_' + tableName + '_' + columnName + '_idx" ON "'
|
||||
+ self._schema + '"."' + tableName + '" '
|
||||
+ objPgIndices[attr].Index_type + ' (' + objPgIndices[attr].column_name.join(',') + ');';
|
||||
}
|
||||
|
||||
pgClient.query(sql, err2 => {
|
||||
done();
|
||||
|
||||
if (err2) {
|
||||
generateError(self, '\t--[processIndexAndKey] ' + err2, sql);
|
||||
resolveProcessIndexAndKeySql();
|
||||
} else {
|
||||
resolveProcessIndexAndKeySql();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
Promise.all(processIndexAndKeyPromises).then(() => {
|
||||
let success = '\t--[processIndexAndKey] "' + self._schema + '"."' + tableName + '": PK/indices are successfully set...';
|
||||
log(self, success, self._dicTables[tableName].tableLogPath);
|
||||
resolveProcessIndexAndKey();
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
31
migration/fmtp/IntegerValidator.js
Normal file
31
migration/fmtp/IntegerValidator.js
Normal file
|
@ -0,0 +1,31 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software= you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful;
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not; see <http=//www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Checks if given value is integer number.
|
||||
*
|
||||
* @param {String|Number} value
|
||||
* @returns {Boolean}
|
||||
*/
|
||||
module.exports = function(value) {
|
||||
return !isNaN(parseInt(value)) && isFinite(value);
|
||||
};
|
61
migration/fmtp/Logger.js
Normal file
61
migration/fmtp/Logger.js
Normal file
|
@ -0,0 +1,61 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
|
||||
/**
|
||||
* Outputs given log.
|
||||
* Writes given log to the "/all.log" file.
|
||||
* If necessary, writes given log to the "/{tableName}.log" file.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {String} log
|
||||
* @param {String} tableLogPath
|
||||
* @param {Boolean} isErrorLog
|
||||
* @returns {undefined}
|
||||
*/
|
||||
module.exports = function(self, log, tableLogPath, isErrorLog) {
|
||||
let buffer = new Buffer(log + '\n\n', self._encoding);
|
||||
|
||||
if (!isErrorLog) {
|
||||
console.log(log);
|
||||
}
|
||||
|
||||
fs.open(self._allLogsPath, 'a', self._0777, (error, fd) => {
|
||||
if (!error) {
|
||||
fs.write(fd, buffer, 0, buffer.length, null, () => {
|
||||
fs.close(fd, () => {
|
||||
if (tableLogPath) {
|
||||
fs.open(tableLogPath, 'a', self._0777, (error, fd) => {
|
||||
if (!error) {
|
||||
fs.write(fd, buffer, 0, buffer.length, null, () => {
|
||||
buffer = null;
|
||||
fs.close(fd);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
239
migration/fmtp/Main.js
Normal file
239
migration/fmtp/Main.js
Normal file
|
@ -0,0 +1,239 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const childProcess = require('child_process');
|
||||
const processViews = require('./ViewGenerator');
|
||||
const readDataTypesMap = require('./DataTypesMapReader');
|
||||
const log = require('./Logger');
|
||||
const generateError = require('./ErrorGenerator');
|
||||
const MessageToDataLoader = require('./MessageToDataLoader');
|
||||
const Conversion = require('./Conversion');
|
||||
const generateReport = require('./ReportGenerator');
|
||||
const processComments = require('./CommentsProcessor');
|
||||
const migrationStateManager = require('./MigrationStateManager');
|
||||
const processIndexAndKey = require('./IndexAndKeyProcessor');
|
||||
const processForeignKey = require('./ForeignKeyProcessor');
|
||||
const createSequence = require('./SequencesProcessor');
|
||||
const runVacuumFullAndAnalyze = require('./VacuumProcessor');
|
||||
const processEnum = require('./EnumProcessor');
|
||||
const processNull = require('./NullProcessor');
|
||||
const processDefault = require('./DefaultProcessor');
|
||||
const createSchema = require('./SchemaProcessor');
|
||||
const cleanup = require('./CleanupProcessor');
|
||||
const dataPoolManager = require('./DataPoolManager');
|
||||
const directoriesManager = require('./DirectoriesManager');
|
||||
const loadStructureToMigrate = require('./StructureLoader');
|
||||
|
||||
let self = null;
|
||||
let intProcessedDataUnits = 0;
|
||||
|
||||
/**
|
||||
* Kill a process specified by the pid.
|
||||
*
|
||||
* @param {Number} pid
|
||||
* @returns {undefined}
|
||||
*/
|
||||
function killProcess(pid) {
|
||||
try {
|
||||
process.kill(pid);
|
||||
} catch (killError) {
|
||||
generateError(self, '\t--[killProcess] ' + killError);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Instructs DataLoader which DataUnits should be loaded.
|
||||
* No need to check the state-log.
|
||||
* If dataPool's length is zero, then nmig will proceed to the next step.
|
||||
*
|
||||
* @returns {undefined}
|
||||
*/
|
||||
function pipeData() {
|
||||
if (self._dataPool.length === 0) {
|
||||
return continueProcessAfterDataLoading();
|
||||
}
|
||||
|
||||
let strDataLoaderPath = __dirname + '/DataLoader.js';
|
||||
let options = self._loaderMaxOldSpaceSize === 'DEFAULT' ? {} : { execArgv: ['--max-old-space-size=' + self._loaderMaxOldSpaceSize] };
|
||||
let loaderProcess = childProcess.fork(strDataLoaderPath, options);
|
||||
|
||||
loaderProcess.on('message', signal => {
|
||||
if (typeof signal === 'object') {
|
||||
self._dicTables[signal.tableName].totalRowsInserted += signal.rowsInserted;
|
||||
let msg = '\t--[pipeData] For now inserted: ' + self._dicTables[signal.tableName].totalRowsInserted + ' rows, '
|
||||
+ 'Total rows to insert into "' + self._schema + '"."' + signal.tableName + '": ' + signal.totalRowsToInsert;
|
||||
|
||||
log(self, msg);
|
||||
} else {
|
||||
killProcess(loaderProcess.pid);
|
||||
intProcessedDataUnits += self._pipeWidth;
|
||||
return intProcessedDataUnits < self._dataPool.length ? pipeData() : continueProcessAfterDataLoading();
|
||||
}
|
||||
});
|
||||
|
||||
let intEnd = self._dataPool.length - (self._dataPool.length - self._pipeWidth - intProcessedDataUnits);
|
||||
let message = new MessageToDataLoader(self._config, self._dataPool.slice(intProcessedDataUnits, intEnd));
|
||||
loaderProcess.send(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Continues migration process after data loading.
|
||||
*
|
||||
* @returns {undefined}
|
||||
*/
|
||||
function continueProcessAfterDataLoading() {
|
||||
if (self._migrateOnlyData) {
|
||||
dataPoolManager.dropDataPoolTable(self).then(() => {
|
||||
return runVacuumFullAndAnalyze(self);
|
||||
}).then(() => {
|
||||
return migrationStateManager.dropStateLogsTable(self);
|
||||
}).then(() => {
|
||||
return cleanup(self);
|
||||
}).then(
|
||||
() => generateReport(self, 'NMIG migration is accomplished.')
|
||||
);
|
||||
|
||||
} else {
|
||||
migrationStateManager.get(self, 'per_table_constraints_loaded').then(isTableConstraintsLoaded => {
|
||||
let promises = [];
|
||||
|
||||
if (!isTableConstraintsLoaded) {
|
||||
for (let i = 0; i < self._tablesToMigrate.length; ++i) {
|
||||
let tableName = self._tablesToMigrate[i];
|
||||
promises.push(
|
||||
processEnum(self, tableName).then(() => {
|
||||
return processNull(self, tableName);
|
||||
}).then(() => {
|
||||
return processDefault(self, tableName);
|
||||
}).then(() => {
|
||||
return createSequence(self, tableName);
|
||||
}).then(() => {
|
||||
return processIndexAndKey(self, tableName);
|
||||
}).then(() => {
|
||||
return processComments(self, tableName);
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Promise.all(promises).then(() => {
|
||||
migrationStateManager.set(self, 'per_table_constraints_loaded').then(() => {
|
||||
return processForeignKey(self);
|
||||
}).then(() => {
|
||||
return migrationStateManager.set(self, 'foreign_keys_loaded');
|
||||
}).then(() => {
|
||||
return dataPoolManager.dropDataPoolTable(self);
|
||||
}).then(() => {
|
||||
return processViews(self);
|
||||
}).then(() => {
|
||||
return migrationStateManager.set(self, 'views_loaded');
|
||||
}).then(() => {
|
||||
return runVacuumFullAndAnalyze(self);
|
||||
}).then(() => {
|
||||
return migrationStateManager.dropStateLogsTable(self);
|
||||
}).then(() => {
|
||||
return cleanup(self);
|
||||
}).then(
|
||||
() => generateReport(self, 'NMIG migration is accomplished.')
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs migration according to user's configuration.
|
||||
*
|
||||
* @param {Object} config
|
||||
* @returns {undefined}
|
||||
*/
|
||||
module.exports = function(config) {
|
||||
console.log('\n\tNMIG - the database migration tool\n\tCopyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>\n\t Boot...');
|
||||
self = new Conversion(config);
|
||||
|
||||
readDataTypesMap(self).then(
|
||||
() => {
|
||||
return directoriesManager.createLogsDirectory(self);
|
||||
},
|
||||
() => {
|
||||
// Braces are essential. Without them promises-chain will continue execution.
|
||||
console.log('\t--[Main] Failed to boot migration');
|
||||
}
|
||||
).then(
|
||||
() => {
|
||||
return directoriesManager.createTemporaryDirectory(self);
|
||||
},
|
||||
() => {
|
||||
// Braces are essential. Without them promises-chain will continue execution.
|
||||
log(self, '\t--[Main] Logs directory was not created...');
|
||||
}
|
||||
).then(
|
||||
() => {
|
||||
return createSchema(self);
|
||||
},
|
||||
() => {
|
||||
let msg = '\t--[Main] The temporary directory [' + self._tempDirPath + '] already exists...'
|
||||
+ '\n\t Please, remove this directory and rerun NMIG...';
|
||||
|
||||
log(self, msg);
|
||||
}
|
||||
).then(
|
||||
() => {
|
||||
return migrationStateManager.createStateLogsTable(self);
|
||||
},
|
||||
() => {
|
||||
generateError(self, '\t--[Main] Cannot create new DB schema...');
|
||||
return cleanup(self);
|
||||
}
|
||||
).then(
|
||||
() => {
|
||||
return dataPoolManager.createDataPoolTable(self);
|
||||
},
|
||||
() => {
|
||||
generateError(self, '\t--[Main] Cannot create execution_logs table...');
|
||||
return cleanup(self);
|
||||
}
|
||||
).then(
|
||||
() => {
|
||||
return loadStructureToMigrate(self);
|
||||
},
|
||||
() => {
|
||||
generateError(self, '\t--[Main] Cannot create data-pool...');
|
||||
return cleanup(self);
|
||||
}
|
||||
).then(
|
||||
() => {
|
||||
return dataPoolManager.readDataPool(self);
|
||||
},
|
||||
() => {
|
||||
generateError(self, '\t--[Main] NMIG cannot load source database structure...');
|
||||
return cleanup(self);
|
||||
}
|
||||
).then(
|
||||
pipeData,
|
||||
() => {
|
||||
generateError(self, '\t--[Main] NMIG failed to load Data-Units pool...');
|
||||
return cleanup(self);
|
||||
}
|
||||
);
|
||||
};
|
34
migration/fmtp/MessageToDataLoader.js
Normal file
34
migration/fmtp/MessageToDataLoader.js
Normal file
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Representation of a message of the master process to DataLoader process.
|
||||
* Contents migration's configuration and an array of "data-chunks".
|
||||
* Constructor.
|
||||
*
|
||||
* @param {Object} config
|
||||
* @param {Array} chunks
|
||||
*/
|
||||
module.exports = function MessageToDataLoader(config, chunks) {
|
||||
this.config = config;
|
||||
this.chunks = chunks;
|
||||
};
|
36
migration/fmtp/MessageToMaster.js
Normal file
36
migration/fmtp/MessageToMaster.js
Normal file
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Representation of a message of DataLoader process to the master process regarding records,
|
||||
* inserted to specified table.
|
||||
* Constructor.
|
||||
*
|
||||
* @param {String} tableName
|
||||
* @param {Number} rowsInserted
|
||||
* @param {Number} totalRowsToInsert
|
||||
*/
|
||||
module.exports = function MessageToMaster(tableName, rowsInserted, totalRowsToInsert) {
|
||||
this.tableName = tableName;
|
||||
this.rowsInserted = rowsInserted;
|
||||
this.totalRowsToInsert = totalRowsToInsert;
|
||||
};
|
191
migration/fmtp/MigrationStateManager.js
Normal file
191
migration/fmtp/MigrationStateManager.js
Normal file
|
@ -0,0 +1,191 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const connect = require('./Connector');
|
||||
const log = require('./Logger');
|
||||
const generateError = require('./ErrorGenerator');
|
||||
|
||||
/**
|
||||
* Get state-log.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {String} param
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports.get = function(self, param) {
|
||||
return connect(self).then(() => {
|
||||
return new Promise(resolve => {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
generateError(self, '\t--[MigrationStateManager.get] Cannot connect to PostgreSQL server...\n' + error);
|
||||
resolve(false);
|
||||
} else {
|
||||
let sql = 'SELECT ' + param + ' FROM "' + self._schema + '"."state_logs_' + self._schema + self._mySqlDbName + '";';
|
||||
|
||||
client.query(sql, (err, data) => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[MigrationStateManager.get] ' + err, sql);
|
||||
resolve(false);
|
||||
} else {
|
||||
resolve(data.rows[0][param]);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Update the state-log.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {String} param
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports.set = function(self, param) {
|
||||
return connect(self).then(() => {
|
||||
return new Promise(resolve => {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
generateError(self, '\t--[MigrationStateManager.set] Cannot connect to PostgreSQL server...\n' + error);
|
||||
resolve();
|
||||
} else {
|
||||
let sql = 'UPDATE "' + self._schema + '"."state_logs_'
|
||||
+ self._schema + self._mySqlDbName + '" SET ' + param + ' = TRUE;';
|
||||
|
||||
client.query(sql, err => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[MigrationStateManager.set] ' + err, sql);
|
||||
}
|
||||
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Create the "{schema}"."state_logs_{self._schema + self._mySqlDbName} temporary table."
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports.createStateLogsTable = function(self) {
|
||||
return connect(self).then(() => {
|
||||
return new Promise((resolve, reject) => {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
generateError(self, '\t--[createStateLogsTable] Cannot connect to PostgreSQL server...\n' + error);
|
||||
reject();
|
||||
} else {
|
||||
let sql = 'CREATE TABLE IF NOT EXISTS "' + self._schema + '"."state_logs_' + self._schema + self._mySqlDbName
|
||||
+ '"('
|
||||
+ '"tables_loaded" BOOLEAN,'
|
||||
+ '"per_table_constraints_loaded" BOOLEAN,'
|
||||
+ '"foreign_keys_loaded" BOOLEAN,'
|
||||
+ '"views_loaded" BOOLEAN'
|
||||
+ ');';
|
||||
|
||||
client.query(sql, err => {
|
||||
if (err) {
|
||||
done();
|
||||
generateError(self, '\t--[createStateLogsTable] ' + err, sql);
|
||||
reject();
|
||||
} else {
|
||||
sql = 'SELECT COUNT(1) AS cnt FROM "' + self._schema + '"."state_logs_' + self._schema + self._mySqlDbName + '";';
|
||||
client.query(sql, (errorCount, result) => {
|
||||
if (errorCount) {
|
||||
done();
|
||||
generateError(self, '\t--[createStateLogsTable] ' + errorCount, sql);
|
||||
reject();
|
||||
} else if (+result.rows[0].cnt === 0) {
|
||||
sql = 'INSERT INTO "' + self._schema + '"."state_logs_' + self._schema + self._mySqlDbName
|
||||
+ '" VALUES(FALSE, FALSE, FALSE, FALSE);';
|
||||
|
||||
client.query(sql, errorInsert => {
|
||||
done();
|
||||
|
||||
if (errorInsert) {
|
||||
generateError(self, '\t--[createStateLogsTable] ' + errorInsert, sql);
|
||||
reject();
|
||||
} else {
|
||||
let msg = '\t--[createStateLogsTable] table "' + self._schema + '"."state_logs_'
|
||||
+ self._schema + self._mySqlDbName + '" is created...';
|
||||
|
||||
log(self, msg);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
let msg = '\t--[createStateLogsTable] table "' + self._schema + '"."state_logs_'
|
||||
+ self._schema + self._mySqlDbName + '" is created...';
|
||||
|
||||
log(self, msg);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Drop the "{schema}"."state_logs_{self._schema + self._mySqlDbName} temporary table."
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports.dropStateLogsTable = function(self) {
|
||||
return connect(self).then(() => {
|
||||
return new Promise(resolve => {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
generateError(self, '\t--[dropStateLogsTable] Cannot connect to PostgreSQL server...\n' + error);
|
||||
resolve();
|
||||
} else {
|
||||
let sql = 'DROP TABLE "' + self._schema + '"."state_logs_' + self._schema + self._mySqlDbName + '";';
|
||||
client.query(sql, err => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[dropStateLogsTable] ' + err, sql);
|
||||
} else {
|
||||
log(self, '\t--[dropStateLogsTable] table "' + self._schema + '"."state_logs_' + self._schema + self._mySqlDbName + '" is dropped...');
|
||||
}
|
||||
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
81
migration/fmtp/NullProcessor.js
Normal file
81
migration/fmtp/NullProcessor.js
Normal file
|
@ -0,0 +1,81 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const connect = require('./Connector');
|
||||
const log = require('./Logger');
|
||||
const generateError = require('./ErrorGenerator');
|
||||
|
||||
/**
|
||||
* Define which columns of the given table can contain the "NULL" value.
|
||||
* Set an appropriate constraint, if need.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {String} tableName
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports = function(self, tableName) {
|
||||
return connect(self).then(() => {
|
||||
return new Promise(resolve => {
|
||||
log(self, '\t--[processNull] Defines "NOT NULLs" for table: "' + self._schema + '"."' + tableName + '"', self._dicTables[tableName].tableLogPath);
|
||||
let processNullPromises = [];
|
||||
|
||||
for (let i = 0; i < self._dicTables[tableName].arrTableColumns.length; ++i) {
|
||||
if (self._dicTables[tableName].arrTableColumns[i].Null.toLowerCase() === 'no') {
|
||||
processNullPromises.push(
|
||||
new Promise(resolveProcessNull => {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
let msg = '\t--[processNull] Cannot connect to PostgreSQL server...\n' + error;
|
||||
generateError(self, msg);
|
||||
resolveProcessNull();
|
||||
} else {
|
||||
let sql = 'ALTER TABLE "' + self._schema + '"."' + tableName
|
||||
+ '" ALTER COLUMN "' + self._dicTables[tableName].arrTableColumns[i].Field + '" SET NOT NULL;';
|
||||
|
||||
client.query(sql, err => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
let msg = '\t--[processNull] Error while setting NOT NULL for "' + self._schema + '"."'
|
||||
+ tableName + '"."' + self._dicTables[tableName].arrTableColumns[i].Field + '"...\n' + err;
|
||||
|
||||
generateError(self, msg, sql);
|
||||
resolveProcessNull();
|
||||
} else {
|
||||
let success = '\t--[processNull] Set NOT NULL for "' + self._schema + '"."' + tableName
|
||||
+ '"."' + self._dicTables[tableName].arrTableColumns[i].Field + '"...';
|
||||
|
||||
log(self, success, self._dicTables[tableName].tableLogPath);
|
||||
resolveProcessNull();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Promise.all(processNullPromises).then(() => resolve());
|
||||
});
|
||||
});
|
||||
};
|
47
migration/fmtp/ReportGenerator.js
Normal file
47
migration/fmtp/ReportGenerator.js
Normal file
|
@ -0,0 +1,47 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const log = require('./Logger');
|
||||
|
||||
/**
|
||||
* Generates a summary report.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {String} endMsg
|
||||
* @returns {undefined}
|
||||
*/
|
||||
module.exports = function(self, endMsg) {
|
||||
let differenceSec = ((new Date()) - self._timeBegin) / 1000;
|
||||
let seconds = Math.floor(differenceSec % 60);
|
||||
differenceSec = differenceSec / 60;
|
||||
let minutes = Math.floor(differenceSec % 60);
|
||||
let hours = Math.floor(differenceSec / 60);
|
||||
hours = hours < 10 ? '0' + hours : hours;
|
||||
minutes = minutes < 10 ? '0' + minutes : minutes;
|
||||
seconds = seconds < 10 ? '0' + seconds : seconds;
|
||||
let output = '\t--[generateReport] ' + endMsg
|
||||
+ '\n\t--[generateReport] Total time: ' + hours + ':' + minutes + ':' + seconds
|
||||
+ '\n\t--[generateReport] (hours:minutes:seconds)';
|
||||
|
||||
log(self, output);
|
||||
process.exit();
|
||||
};
|
67
migration/fmtp/SchemaProcessor.js
Normal file
67
migration/fmtp/SchemaProcessor.js
Normal file
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const connect = require('./Connector');
|
||||
const generateError = require('./ErrorGenerator');
|
||||
|
||||
/**
|
||||
* Create a new database schema.
|
||||
* Insure a uniqueness of a new schema name.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports = function(self) {
|
||||
return connect(self).then(() => {
|
||||
return new Promise((resolve, reject) => {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
generateError(self, '\t--[createSchema] Cannot connect to PostgreSQL server...\n' + error);
|
||||
reject();
|
||||
} else {
|
||||
let sql = "SELECT schema_name FROM information_schema.schemata WHERE schema_name = '" + self._schema + "';";
|
||||
client.query(sql, (err, result) => {
|
||||
if (err) {
|
||||
done();
|
||||
generateError(self, '\t--[createSchema] ' + err, sql);
|
||||
reject();
|
||||
} else if (result.rows.length === 0) {
|
||||
sql = 'CREATE SCHEMA "' + self._schema + '";';
|
||||
client.query(sql, err => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[createSchema] ' + err, sql);
|
||||
reject();
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
124
migration/fmtp/SequencesProcessor.js
Normal file
124
migration/fmtp/SequencesProcessor.js
Normal file
|
@ -0,0 +1,124 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const connect = require('./Connector');
|
||||
const log = require('./Logger');
|
||||
const generateError = require('./ErrorGenerator');
|
||||
|
||||
/**
|
||||
* Define which column in given table has the "auto_increment" attribute.
|
||||
* Create an appropriate sequence.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {String} tableName
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports = function(self, tableName) {
|
||||
return connect(self).then(() => {
|
||||
return new Promise(resolve => {
|
||||
let createSequencePromises = [];
|
||||
|
||||
for (let i = 0; i < self._dicTables[tableName].arrTableColumns.length; ++i) {
|
||||
if (self._dicTables[tableName].arrTableColumns[i].Extra === 'auto_increment') {
|
||||
createSequencePromises.push(
|
||||
new Promise(resolveCreateSequence => {
|
||||
let seqName = tableName + '_' + self._dicTables[tableName].arrTableColumns[i].Field + '_seq';
|
||||
log(self, '\t--[createSequence] Trying to create sequence : "' + self._schema + '"."' + seqName + '"', self._dicTables[tableName].tableLogPath);
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
let msg = '\t--[createSequence] Cannot connect to PostgreSQL server...\n' + error;
|
||||
generateError(self, msg);
|
||||
resolveCreateSequence();
|
||||
} else {
|
||||
let sql = 'CREATE SEQUENCE "' + self._schema + '"."' + seqName + '";';
|
||||
client.query(sql, err => {
|
||||
if (err) {
|
||||
done();
|
||||
let errMsg = '\t--[createSequence] Failed to create sequence "' + self._schema + '"."' + seqName + '"';
|
||||
generateError(self, errMsg, sql);
|
||||
resolveCreateSequence();
|
||||
} else {
|
||||
sql = 'ALTER TABLE "' + self._schema + '"."' + tableName + '" '
|
||||
+ 'ALTER COLUMN "' + self._dicTables[tableName].arrTableColumns[i].Field + '" '
|
||||
+ 'SET DEFAULT NEXTVAL(\'"' + self._schema + '"."' + seqName + '"\');';
|
||||
|
||||
client.query(sql, err2 => {
|
||||
if (err2) {
|
||||
done();
|
||||
let err2Msg = '\t--[createSequence] Failed to set default value for "' + self._schema + '"."'
|
||||
+ tableName + '"."' + self._dicTables[tableName].arrTableColumns[i].Field + '"...'
|
||||
+ '\n\t--[createSequence] Note: sequence "' + self._schema + '"."' + seqName + '" was created...';
|
||||
|
||||
generateError(self, err2Msg, sql);
|
||||
resolveCreateSequence();
|
||||
} else {
|
||||
sql = 'ALTER SEQUENCE "' + self._schema + '"."' + seqName + '" '
|
||||
+ 'OWNED BY "' + self._schema + '"."' + tableName
|
||||
+ '"."' + self._dicTables[tableName].arrTableColumns[i].Field + '";';
|
||||
|
||||
client.query(sql, err3 => {
|
||||
if (err3) {
|
||||
done();
|
||||
let err3Msg = '\t--[createSequence] Failed to relate sequence "' + self._schema + '"."' + seqName + '" to '
|
||||
+ '"' + self._schema + '"."'
|
||||
+ tableName + '"."' + self._dicTables[tableName].arrTableColumns[i].Field + '"...';
|
||||
|
||||
generateError(self, err3Msg, sql);
|
||||
resolveCreateSequence();
|
||||
} else {
|
||||
sql = 'SELECT SETVAL(\'"' + self._schema + '"."' + seqName + '"\', '
|
||||
+ '(SELECT MAX("' + self._dicTables[tableName].arrTableColumns[i].Field + '") FROM "'
|
||||
+ self._schema + '"."' + tableName + '"));';
|
||||
|
||||
client.query(sql, err4 => {
|
||||
done();
|
||||
|
||||
if (err4) {
|
||||
let err4Msg = '\t--[createSequence] Failed to set max-value of "' + self._schema + '"."'
|
||||
+ tableName + '"."' + self._dicTables[tableName].arrTableColumns[i].Field + '" '
|
||||
+ 'as the "NEXTVAL of "' + self._schema + '"."' + seqName + '"...';
|
||||
|
||||
generateError(self, err4Msg, sql);
|
||||
resolveCreateSequence();
|
||||
} else {
|
||||
let success = '\t--[createSequence] Sequence "' + self._schema + '"."' + seqName + '" is created...';
|
||||
log(self, success, self._dicTables[tableName].tableLogPath);
|
||||
resolveCreateSequence();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Promise.all(createSequencePromises).then(() => resolve());
|
||||
});
|
||||
});
|
||||
};
|
111
migration/fmtp/StructureLoader.js
Normal file
111
migration/fmtp/StructureLoader.js
Normal file
|
@ -0,0 +1,111 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const Table = require('./Table');
|
||||
const createTable = require('./TableProcessor');
|
||||
const connect = require('./Connector');
|
||||
const log = require('./Logger');
|
||||
const generateError = require('./ErrorGenerator');
|
||||
const prepareDataChunks = require('./DataChunksProcessor');
|
||||
const migrationStateManager = require('./MigrationStateManager');
|
||||
|
||||
/**
|
||||
* Processes current table before data loading.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {String} tableName
|
||||
* @param {Boolean} stateLog
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function processTableBeforeDataLoading(self, tableName, stateLog) {
|
||||
return connect(self).then(() => {
|
||||
return createTable(self, tableName);
|
||||
}).then(() => {
|
||||
return prepareDataChunks(self, tableName, stateLog);
|
||||
}).catch(() => {
|
||||
generateError(self, '\t--[processTableBeforeDataLoading] Cannot create table "' + self._schema + '"."' + tableName + '"...');
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Load source tables and views, that need to be migrated.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports = function(self) {
|
||||
return migrationStateManager.get(self, 'tables_loaded').then(haveTablesLoaded => {
|
||||
return new Promise((resolve, reject) => {
|
||||
self._mysql.getConnection((error, connection) => {
|
||||
if (error) {
|
||||
// The connection is undefined.
|
||||
generateError(self, '\t--[loadStructureToMigrate] Cannot connect to MySQL server...\n' + error);
|
||||
reject();
|
||||
} else {
|
||||
let sql = 'SHOW FULL TABLES IN `' + self._mySqlDbName + '`;';
|
||||
connection.query(sql, (strErr, rows) => {
|
||||
connection.release();
|
||||
|
||||
if (strErr) {
|
||||
generateError(self, '\t--[loadStructureToMigrate] ' + strErr, sql);
|
||||
reject();
|
||||
} else {
|
||||
let tablesCnt = 0;
|
||||
let viewsCnt = 0;
|
||||
let processTablePromises = [];
|
||||
|
||||
for (let i = 0; i < rows.length; ++i) {
|
||||
let relationName = rows[i]['Tables_in_' + self._mySqlDbName];
|
||||
|
||||
if (rows[i].Table_type === 'BASE TABLE' && self._excludeTables.indexOf(relationName) === -1) {
|
||||
self._tablesToMigrate.push(relationName);
|
||||
self._dicTables[relationName] = new Table(self._logsDirPath + '/' + relationName + '.log');
|
||||
processTablePromises.push(processTableBeforeDataLoading(self, relationName, haveTablesLoaded));
|
||||
tablesCnt++;
|
||||
} else if (rows[i].Table_type === 'VIEW') {
|
||||
self._viewsToMigrate.push(relationName);
|
||||
viewsCnt++;
|
||||
}
|
||||
}
|
||||
|
||||
rows = null;
|
||||
self._tablesCnt = tablesCnt;
|
||||
self._viewsCnt = viewsCnt;
|
||||
let message = '\t--[loadStructureToMigrate] Source DB structure is loaded...\n'
|
||||
+ '\t--[loadStructureToMigrate] Tables to migrate: ' + tablesCnt + '\n'
|
||||
+ '\t--[loadStructureToMigrate] Views to migrate: ' + viewsCnt;
|
||||
|
||||
log(self, message);
|
||||
|
||||
Promise.all(processTablePromises).then(
|
||||
() => {
|
||||
migrationStateManager.set(self, 'tables_loaded').then(() => resolve());
|
||||
},
|
||||
() => reject()
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
147
migration/fmtp/TableProcessor.js
Normal file
147
migration/fmtp/TableProcessor.js
Normal file
|
@ -0,0 +1,147 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const connect = require('./Connector');
|
||||
const log = require('./Logger');
|
||||
const generateError = require('./ErrorGenerator');
|
||||
|
||||
/**
|
||||
* Converts MySQL data types to corresponding PostgreSQL data types.
|
||||
* This conversion performs in accordance to mapping rules in './DataTypesMap.json'.
|
||||
* './DataTypesMap.json' can be customized.
|
||||
*
|
||||
* @param {Object} objDataTypesMap
|
||||
* @param {String} mySqlDataType
|
||||
* @returns {String}
|
||||
*/
|
||||
function mapDataTypes(objDataTypesMap, mySqlDataType) {
|
||||
let retVal = '';
|
||||
let arrDataTypeDetails = mySqlDataType.split(' ');
|
||||
mySqlDataType = arrDataTypeDetails[0].toLowerCase();
|
||||
let increaseOriginalSize = arrDataTypeDetails.indexOf('unsigned') !== -1 || arrDataTypeDetails.indexOf('zerofill') !== -1;
|
||||
arrDataTypeDetails = null;
|
||||
|
||||
if (mySqlDataType.indexOf('(') === -1) {
|
||||
// No parentheses detected.
|
||||
retVal = increaseOriginalSize ? objDataTypesMap[mySqlDataType].increased_size : objDataTypesMap[mySqlDataType].type;
|
||||
} else {
|
||||
// Parentheses detected.
|
||||
let arrDataType = mySqlDataType.split('(');
|
||||
let strDataType = arrDataType[0].toLowerCase();
|
||||
let strDataTypeDisplayWidth = arrDataType[1];
|
||||
arrDataType = null;
|
||||
|
||||
if ('enum' === strDataType || 'set' === strDataType) {
|
||||
retVal = 'character varying(255)';
|
||||
} else if ('decimal' === strDataType || 'numeric' === strDataType) {
|
||||
retVal = objDataTypesMap[strDataType].type + '(' + strDataTypeDisplayWidth;
|
||||
} else if ('decimal(19,2)' === mySqlDataType || objDataTypesMap[strDataType].mySqlVarLenPgSqlFixedLen) {
|
||||
// Should be converted without a length definition.
|
||||
retVal = increaseOriginalSize
|
||||
? objDataTypesMap[strDataType].increased_size
|
||||
: objDataTypesMap[strDataType].type;
|
||||
} else {
|
||||
// Should be converted with a length definition.
|
||||
retVal = increaseOriginalSize
|
||||
? objDataTypesMap[strDataType].increased_size + '(' + strDataTypeDisplayWidth
|
||||
: objDataTypesMap[strDataType].type + '(' + strDataTypeDisplayWidth;
|
||||
}
|
||||
}
|
||||
|
||||
// Prevent incompatible length (CHARACTER(0) or CHARACTER VARYING(0)).
|
||||
if (retVal === 'character(0)') {
|
||||
retVal = 'character(1)';
|
||||
} else if (retVal === 'character varying(0)') {
|
||||
retVal = 'character varying(1)';
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrates structure of a single table to PostgreSql server.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {String} tableName
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports = function(self, tableName) {
|
||||
return connect(self).then(() => {
|
||||
return new Promise((resolveCreateTable, rejectCreateTable) => {
|
||||
log(self, '\t--[createTable] Currently creating table: `' + tableName + '`', self._dicTables[tableName].tableLogPath);
|
||||
self._mysql.getConnection((error, connection) => {
|
||||
if (error) {
|
||||
// The connection is undefined.
|
||||
generateError(self, '\t--[createTable] Cannot connect to MySQL server...\n' + error);
|
||||
rejectCreateTable();
|
||||
} else {
|
||||
let sql = 'SHOW FULL COLUMNS FROM `' + tableName + '`;';
|
||||
connection.query(sql, (err, rows) => {
|
||||
connection.release();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[createTable] ' + err, sql);
|
||||
rejectCreateTable();
|
||||
} else {
|
||||
self._dicTables[tableName].arrTableColumns = rows;
|
||||
|
||||
if (self._migrateOnlyData) {
|
||||
return resolveCreateTable();
|
||||
}
|
||||
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
generateError(self, '\t--[createTable] Cannot connect to PostgreSQL server...\n' + error, sql);
|
||||
rejectCreateTable();
|
||||
} else {
|
||||
sql = 'CREATE TABLE IF NOT EXISTS "' + self._schema + '"."' + tableName + '"(';
|
||||
|
||||
for (let i = 0; i < rows.length; ++i) {
|
||||
let strConvertedType = mapDataTypes(self._dataTypesMap, rows[i].Type);
|
||||
sql += '"' + rows[i].Field + '" ' + strConvertedType + ',';
|
||||
}
|
||||
|
||||
rows = null;
|
||||
sql = sql.slice(0, -1) + ');';
|
||||
client.query(sql, err => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[createTable] ' + err, sql);
|
||||
rejectCreateTable();
|
||||
} else {
|
||||
log(self,
|
||||
'\t--[createTable] Table "' + self._schema + '"."' + tableName + '" is created...',
|
||||
self._dicTables[tableName].tableLogPath
|
||||
);
|
||||
resolveCreateTable();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
74
migration/fmtp/VacuumProcessor.js
Normal file
74
migration/fmtp/VacuumProcessor.js
Normal file
|
@ -0,0 +1,74 @@
|
|||
/*
|
||||
* This file is a part of "NMIG" - the database migration tool.
|
||||
*
|
||||
* Copyright 2016 Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program (please see the "LICENSE.md" file).
|
||||
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
|
||||
*
|
||||
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const connect = require('./Connector');
|
||||
const log = require('./Logger');
|
||||
const generateError = require('./ErrorGenerator');
|
||||
|
||||
/**
|
||||
* Runs "vacuum full" and "analyze".
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports = function(self) {
|
||||
return connect(self).then(() => {
|
||||
return new Promise(resolve => {
|
||||
let vacuumPromises = [];
|
||||
|
||||
for (let i = 0; i < self._tablesToMigrate.length; ++i) {
|
||||
if (self._noVacuum.indexOf(self._tablesToMigrate[i]) === -1) {
|
||||
let msg = '\t--[runVacuumFullAndAnalyze] Running "VACUUM FULL and ANALYZE" query for table "'
|
||||
+ self._schema + '"."' + self._tablesToMigrate[i] + '"...';
|
||||
|
||||
log(self, msg);
|
||||
vacuumPromises.push(
|
||||
new Promise(resolveVacuum => {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
generateError(self, '\t--[runVacuumFullAndAnalyze] Cannot connect to PostgreSQL server...');
|
||||
resolveVacuum();
|
||||
} else {
|
||||
let sql = 'VACUUM (FULL, ANALYZE) "' + self._schema + '"."' + self._tablesToMigrate[i] + '";';
|
||||
client.query(sql, err => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[runVacuumFullAndAnalyze] ' + err, sql);
|
||||
resolveVacuum();
|
||||
} else {
|
||||
let msg2 = '\t--[runVacuumFullAndAnalyze] Table "' + self._schema + '"."' + self._tablesToMigrate[i] + '" is VACUUMed...';
|
||||
log(self, msg2);
|
||||
resolveVacuum();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Promise.all(vacuumPromises).then(() => resolve());
|
||||
});
|
||||
});
|
||||
};
|
|
@ -20,6 +20,11 @@
|
|||
*/
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const log = require('./Logger');
|
||||
const generateError = require('./ErrorGenerator');
|
||||
const migrationStateManager = require('./MigrationStateManager');
|
||||
|
||||
/**
|
||||
* Attempts to convert MySQL view to PostgreSQL view.
|
||||
*
|
||||
|
@ -28,12 +33,12 @@
|
|||
* @param {String} mysqlViewCode
|
||||
* @returns {String}
|
||||
*/
|
||||
module.exports = function(schema, viewName, mysqlViewCode) {
|
||||
function generateView(schema, viewName, mysqlViewCode) {
|
||||
mysqlViewCode = mysqlViewCode.split('`').join('"');
|
||||
let queryStart = mysqlViewCode.indexOf('AS');
|
||||
mysqlViewCode = mysqlViewCode.slice(queryStart);
|
||||
let arrMysqlViewCode = mysqlViewCode.split(' ');
|
||||
|
||||
|
||||
for (let i = 0; i < arrMysqlViewCode.length; ++i) {
|
||||
if (
|
||||
arrMysqlViewCode[i].toLowerCase() === 'from'
|
||||
|
@ -45,4 +50,117 @@ module.exports = function(schema, viewName, mysqlViewCode) {
|
|||
}
|
||||
|
||||
return 'CREATE OR REPLACE VIEW "' + schema + '"."' + viewName + '" ' + arrMysqlViewCode.join(' ') + ';';
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes a log, containing a view code.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @param {String} viewName
|
||||
* @param {String} sql
|
||||
* @returns {undefined}
|
||||
*/
|
||||
function logNotCreatedView(self, viewName, sql) {
|
||||
fs.stat(self._notCreatedViewsPath, (directoryDoesNotExist, stat) => {
|
||||
if (directoryDoesNotExist) {
|
||||
fs.mkdir(self._notCreatedViewsPath, self._0777, e => {
|
||||
if (e) {
|
||||
log(self, '\t--[logNotCreatedView] ' + e);
|
||||
} else {
|
||||
log(self, '\t--[logNotCreatedView] "not_created_views" directory is created...');
|
||||
// "not_created_views" directory is created. Can write the log...
|
||||
fs.open(self._notCreatedViewsPath + '/' + viewName + '.sql', 'w', self._0777, (error, fd) => {
|
||||
if (error) {
|
||||
log(self, error);
|
||||
} else {
|
||||
let buffer = new Buffer(sql, self._encoding);
|
||||
fs.write(fd, buffer, 0, buffer.length, null, () => {
|
||||
buffer = null;
|
||||
fs.close(fd);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
} else if (!stat.isDirectory()) {
|
||||
log(self, '\t--[logNotCreatedView] Cannot write the log due to unexpected error');
|
||||
} else {
|
||||
// "not_created_views" directory already exists. Can write the log...
|
||||
fs.open(self._notCreatedViewsPath + '/' + viewName + '.sql', 'w', self._0777, (error, fd) => {
|
||||
if (error) {
|
||||
log(self, error);
|
||||
} else {
|
||||
let buffer = new Buffer(sql, self._encoding);
|
||||
fs.write(fd, buffer, 0, buffer.length, null, () => {
|
||||
buffer = null;
|
||||
fs.close(fd);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to convert MySQL view to PostgreSQL view.
|
||||
*
|
||||
* @param {Conversion} self
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports = function(self) {
|
||||
return migrationStateManager.get(self, 'views_loaded').then(isViewsLoaded => {
|
||||
return new Promise(resolve => {
|
||||
let createViewPromises = [];
|
||||
|
||||
if (!isViewsLoaded) {
|
||||
for (let i = 0; i < self._viewsToMigrate.length; ++i) {
|
||||
createViewPromises.push(
|
||||
new Promise(resolveProcessView2 => {
|
||||
self._mysql.getConnection((error, connection) => {
|
||||
if (error) {
|
||||
// The connection is undefined.
|
||||
generateError(self, '\t--[processView] Cannot connect to MySQL server...\n' + error);
|
||||
resolveProcessView2();
|
||||
} else {
|
||||
let sql = 'SHOW CREATE VIEW `' + self._viewsToMigrate[i] + '`;';
|
||||
connection.query(sql, (strErr, rows) => {
|
||||
connection.release();
|
||||
|
||||
if (strErr) {
|
||||
generateError(self, '\t--[processView] ' + strErr, sql);
|
||||
resolveProcessView2();
|
||||
} else {
|
||||
self._pg.connect((error, client, done) => {
|
||||
if (error) {
|
||||
generateError(self, '\t--[processView] Cannot connect to PostgreSQL server...');
|
||||
resolveProcessView2();
|
||||
} else {
|
||||
sql = generateView(self._schema, self._viewsToMigrate[i], rows[0]['Create View']);
|
||||
rows = null;
|
||||
client.query(sql, err => {
|
||||
done();
|
||||
|
||||
if (err) {
|
||||
generateError(self, '\t--[processView] ' + err, sql);
|
||||
logNotCreatedView(self, self._viewsToMigrate[i], sql);
|
||||
resolveProcessView2();
|
||||
} else {
|
||||
log(self, '\t--[processView] View "' + self._schema + '"."' + self._viewsToMigrate[i] + '" is created...');
|
||||
resolveProcessView2();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Promise.all(createViewPromises).then(() => resolve());
|
||||
});
|
||||
});
|
||||
};
|
||||
|
|
4
nmig.js
4
nmig.js
|
@ -21,7 +21,7 @@
|
|||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const fmtp = require('./migration/fmtp/FromMySQL2PostgreSQL');
|
||||
const main = require('./migration/fmtp/Main');
|
||||
|
||||
fs.readFile(__dirname + '/config.json', (error, data) => {
|
||||
if (error) {
|
||||
|
@ -31,7 +31,7 @@ fs.readFile(__dirname + '/config.json', (error, data) => {
|
|||
let config = JSON.parse(data.toString());
|
||||
config.tempDirPath = __dirname + '/temporary_directory';
|
||||
config.logsDirPath = __dirname + '/logs_directory';
|
||||
fmtp(config);
|
||||
main(config);
|
||||
} catch (err) {
|
||||
console.log('\n\t--Cannot parse JSON from' + __dirname + '/config.json');
|
||||
}
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
{
|
||||
"name": "nmig",
|
||||
"version": "1.1.1",
|
||||
"version": "2.2.0",
|
||||
"description": "The database migration app",
|
||||
"author": "Anatoly Khaytovich<anatolyuss@gmail.com>",
|
||||
"dependencies": {
|
||||
"mysql": "*",
|
||||
"pg": "*"
|
||||
"pg": "*",
|
||||
"pg-copy-streams": "*"
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue