mirror of
https://github.com/percona/percona-toolkit.git
synced 2025-12-11 02:04:38 +08:00
Compare commits
46 Commits
snyk-fix-2
...
NEEDED-FOR
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
14ca3d0205 | ||
|
|
84095fd7d7 | ||
|
|
47b8c5b067 | ||
|
|
7717cfe4f1 | ||
|
|
f9d9a993e9 | ||
|
|
7f322f7cbd | ||
|
|
3ff98c20bc | ||
|
|
f816053065 | ||
|
|
34a14ec77e | ||
|
|
ac53883f29 | ||
|
|
808c590e7a | ||
|
|
c09b622c3e | ||
|
|
2df1bd8950 | ||
|
|
ebacadf098 | ||
|
|
c49c58db2b | ||
|
|
9711db87a7 | ||
|
|
e964e17f21 | ||
|
|
c3a201d5f8 | ||
|
|
2474b1f45b | ||
|
|
88367c1dea | ||
|
|
840ba6926b | ||
|
|
25f4ee6d80 | ||
|
|
c83d2f547d | ||
|
|
e4cecc3e69 | ||
|
|
f9ea94f195 | ||
|
|
c92d95bc38 | ||
|
|
6b449ec081 | ||
|
|
af7bd8abd6 | ||
|
|
6fad1f0ff0 | ||
|
|
f4a324581a | ||
|
|
3cb46e61f7 | ||
|
|
2198763042 | ||
|
|
4bf48d864f | ||
|
|
16f5aac023 | ||
|
|
69cbfca27f | ||
|
|
14623c5dce | ||
|
|
71ffb19e9e | ||
|
|
5c16d37020 | ||
|
|
f70f8084dd | ||
|
|
5474f5d5ff | ||
|
|
d0f8fb231b | ||
|
|
8b61618d35 | ||
|
|
9e9f7434d1 | ||
|
|
888af5f5ef | ||
|
|
dc77289d60 | ||
|
|
d5ec5d9ca8 |
4
.github/workflows/toolkit.yml
vendored
4
.github/workflows/toolkit.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
- name: Build the Docker image
|
||||
run: echo "FROM oraclelinux:9-slim" > Dockerfile; echo "RUN microdnf -y update" >> Dockerfile; echo "COPY bin/* /usr/bin/" >> Dockerfile; docker build . --file Dockerfile --tag percona-toolkit:${{ github.sha }}
|
||||
- name: Run Trivy vulnerability scanner
|
||||
uses: aquasecurity/trivy-action@0.29.0
|
||||
uses: aquasecurity/trivy-action@0.30.0
|
||||
with:
|
||||
image-ref: 'percona-toolkit:${{ github.sha }}'
|
||||
format: 'table'
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
vuln-type: 'os,library'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
- name: Upload a Build Artifact
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: binaries
|
||||
path: bin/*
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
Changelog for Percona Toolkit
|
||||
|
||||
v3.7.0-1 released 2025-05-14
|
||||
|
||||
This release addresses multiple security vulnerabilities reported in Percona Toolkit version 3.7.0, including issues related to the `libxml2` component (CVE-2024-56171, CVE-2025-24928), `openssl` (CVE-2024-12797), and `krb5` (CVE-2022-37967).
|
||||
|
||||
* Fixed bug PT-2442: percona-toolkit:latest Vulnerability [CVE-2024-56171 CVE-2024-12797 CVE-2022-37967 CVE-2025-24928]
|
||||
|
||||
v3.7.0 released 2024-12-23
|
||||
|
||||
* Feature PT-2340: Support MySQL 8.4
|
||||
|
||||
@@ -11,7 +11,7 @@ MAKE_GOTOOLS
|
||||
|
||||
WriteMakefile(
|
||||
NAME => 'Percona::Toolkit',
|
||||
VERSION => '3.7.0',
|
||||
VERSION => '3.7.0-1',
|
||||
EXE_FILES => [
|
||||
map {
|
||||
(my $name = $_) =~ s/^bin.//;
|
||||
|
||||
@@ -6969,7 +6969,7 @@ sub main {
|
||||
warn "Invalid output format:". $o->get('format');
|
||||
warn "Using default 'dump' format";
|
||||
} elsif ($o->get('output-format') || '' eq 'csv') {
|
||||
$fields_separated_by = ", ";
|
||||
$fields_separated_by = ",";
|
||||
$optionally_enclosed_by = '"';
|
||||
}
|
||||
my $need_hdr = $o->get('header') && !-f $archive_file;
|
||||
@@ -7511,7 +7511,7 @@ sub escape {
|
||||
s/([\t\n\\])/\\$1/g if defined $_; # Escape tabs etc
|
||||
my $s = defined $_ ? $_ : '\N'; # NULL = \N
|
||||
# var & ~var will return 0 only for numbers
|
||||
if ($s !~ /^[0-9,.E]+$/ && $optionally_enclosed_by eq '"') {
|
||||
if ($s !~ /^[0-9,.E]+$/ && $optionally_enclosed_by eq '"' && $s ne '\N') {
|
||||
$s =~ s/([^\\])"/$1\\"/g;
|
||||
$s = $optionally_enclosed_by."$s".$optionally_enclosed_by;
|
||||
}
|
||||
|
||||
@@ -8943,12 +8943,20 @@ sub main {
|
||||
$sth->finish();
|
||||
PTDEBUG && _d('Last chunk:', Dumper($last_chunk));
|
||||
|
||||
if ( !$last_chunk || !$last_chunk->{new_table_name} ) {
|
||||
if ( !$last_chunk ) {
|
||||
$oktorun = undef;
|
||||
_die("Option --resume refers non-existing job ID: ${old_job_id}. Exiting."
|
||||
, UNSUPPORTED_OPERATION);
|
||||
}
|
||||
|
||||
if ( !$last_chunk->{new_table_name}
|
||||
|| !$last_chunk->{lower_boundary}
|
||||
|| !$last_chunk->{upper_boundary} ) {
|
||||
$oktorun = undef;
|
||||
_die("Option --resume refers job ${old_job_id} with empty boundaries. Exiting."
|
||||
, UNSUPPORTED_OPERATION);
|
||||
}
|
||||
|
||||
if ( $last_chunk->{db} ne $db
|
||||
|| $last_chunk->{tbl} ne $tbl
|
||||
|| $last_chunk->{altr} ne $o->get('alter') ){
|
||||
@@ -9607,11 +9615,16 @@ sub main {
|
||||
# ''
|
||||
# doesn't match '(?-xism:Failed to find a unique new table name)'
|
||||
|
||||
# (*) Frank: commented them out because it caused infinite loop
|
||||
# and the mentioned test error doesn't arise
|
||||
|
||||
my $original_error = $EVAL_ERROR;
|
||||
my $original_error_code = $?;
|
||||
my $original_error_code;
|
||||
if ( $? ) {
|
||||
$original_error_code = $?;
|
||||
}
|
||||
else {
|
||||
$original_error_code = $!;
|
||||
}
|
||||
|
||||
$SIG{__DIE__} = 'DEFAULT';
|
||||
|
||||
foreach my $task ( reverse @cleanup_tasks ) {
|
||||
eval {
|
||||
@@ -9797,9 +9810,9 @@ sub main {
|
||||
|
||||
if ( $o->get('history') ) {
|
||||
my $sth = $cxn->dbh()->prepare(
|
||||
"UPDATE ${hist_table} SET new_table_name = ?"
|
||||
"UPDATE ${hist_table} SET new_table_name = ? WHERE job_id = ?"
|
||||
);
|
||||
$sth->execute($new_tbl->{tbl});
|
||||
$sth->execute($new_tbl->{tbl}, $job_id);
|
||||
}
|
||||
|
||||
# If the new table still exists, drop it unless the tool was interrupted.
|
||||
@@ -9912,7 +9925,7 @@ sub main {
|
||||
);
|
||||
}
|
||||
|
||||
if ( my $alter = $o->get('alter') ) {
|
||||
if ( (my $alter = $o->get('alter')) && !$o->get('resume') ) {
|
||||
print "Altering new table...\n";
|
||||
my $sql = "ALTER TABLE $new_tbl->{name} $alter";
|
||||
print $sql, "\n" if $o->get('print');
|
||||
@@ -9921,10 +9934,12 @@ sub main {
|
||||
$cxn->dbh()->do($sql);
|
||||
};
|
||||
if ( $EVAL_ERROR ) {
|
||||
if ( $plugin && $plugin->can('before_die') ) {
|
||||
$plugin->before_die(exit_status => $EVAL_ERROR);
|
||||
}
|
||||
if ( $plugin && $plugin->can('before_die') ) {
|
||||
$plugin->before_die(exit_status => $EVAL_ERROR);
|
||||
}
|
||||
# this is trapped by a signal handler. Don't replace it with _die
|
||||
# we need to override $SIG{__DIE__} to return correct error code
|
||||
$SIG{__DIE__} = sub { print(STDERR "$_[0]"); exit ERROR_ALTERING_TABLE; };
|
||||
die "Error altering new table $new_tbl->{name}: $EVAL_ERROR\n";
|
||||
}
|
||||
print "Altered $new_tbl->{name} OK.\n";
|
||||
|
||||
@@ -582,8 +582,8 @@ OS_NAME=
|
||||
ARCH=
|
||||
OS=
|
||||
INSTALL=0
|
||||
RPM_RELEASE=1
|
||||
DEB_RELEASE=1
|
||||
RPM_RELEASE=2
|
||||
DEB_RELEASE=2
|
||||
REVISION=0
|
||||
GIT_BRANCH=${GIT_BRANCH}
|
||||
GIT_REPO=https://github.com/percona/percona-toolkit.git
|
||||
|
||||
@@ -50,7 +50,7 @@ copyright = u'2024, Percona LLC and/or its affiliates'
|
||||
# The short X.Y version.
|
||||
version = '3.7'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '3.7.0'
|
||||
release = '3.7.0-1'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
|
||||
@@ -1,6 +1,16 @@
|
||||
Release Notes
|
||||
***************
|
||||
|
||||
v3.7.0-1 released 2025-05-14
|
||||
==============================
|
||||
|
||||
This release addresses multiple security vulnerabilities reported in Percona Toolkit version 3.7.0, including issues related to the `libxml2` component (CVE-2024-56171, CVE-2025-24928), `openssl` (CVE-2024-12797), and `krb5` (CVE-2022-37967).
|
||||
|
||||
Bug Fixed
|
||||
------------
|
||||
|
||||
* :jirabug:`PT-2442`: percona-toolkit:latest Vulnerability [CVE-2024-56171 CVE-2024-12797 CVE-2022-37967 CVE-2025-24928]
|
||||
|
||||
v3.7.0 released 2024-12-23
|
||||
==============================
|
||||
|
||||
|
||||
24
go.mod
24
go.mod
@@ -2,12 +2,14 @@ module github.com/percona/percona-toolkit
|
||||
|
||||
go 1.23.4
|
||||
|
||||
toolchain go1.24.1
|
||||
|
||||
require (
|
||||
github.com/AlekSi/pointer v1.2.0
|
||||
github.com/Ladicle/tabwriter v1.0.0
|
||||
github.com/Masterminds/semver v1.5.0
|
||||
github.com/alecthomas/kingpin v2.2.6+incompatible
|
||||
github.com/alecthomas/kong v1.8.1
|
||||
github.com/alecthomas/kong v1.9.0
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc
|
||||
github.com/go-ini/ini v1.67.0
|
||||
github.com/golang/mock v1.6.0
|
||||
@@ -21,17 +23,17 @@ require (
|
||||
github.com/pborman/getopt v1.1.0
|
||||
github.com/percona/go-mysql v0.0.0-20210427141028-73d29c6da78c
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/rs/zerolog v1.33.0
|
||||
github.com/rs/zerolog v1.34.0
|
||||
github.com/shirou/gopsutil v3.21.11+incompatible
|
||||
github.com/sirupsen/logrus v1.9.3
|
||||
github.com/stretchr/testify v1.10.0
|
||||
github.com/xlab/treeprint v1.2.0
|
||||
go.mongodb.org/mongo-driver v1.17.2
|
||||
golang.org/x/crypto v0.35.0
|
||||
go.mongodb.org/mongo-driver v1.17.3
|
||||
golang.org/x/crypto v0.36.0
|
||||
golang.org/x/exp v0.0.0-20230321023759-10a507213a29
|
||||
gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
k8s.io/api v0.32.2
|
||||
k8s.io/api v0.32.3
|
||||
k8s.io/utils v0.0.0-20241104100929-3ea5e8cea738
|
||||
)
|
||||
|
||||
@@ -60,14 +62,14 @@ require (
|
||||
github.com/xdg-go/stringprep v1.0.4 // indirect
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
|
||||
github.com/yusufpapurcu/wmi v1.2.2 // indirect
|
||||
golang.org/x/net v0.33.0 // indirect
|
||||
golang.org/x/sync v0.11.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
golang.org/x/term v0.29.0 // indirect
|
||||
golang.org/x/text v0.22.0 // indirect
|
||||
golang.org/x/net v0.36.0 // indirect
|
||||
golang.org/x/sync v0.12.0 // indirect
|
||||
golang.org/x/sys v0.31.0 // indirect
|
||||
golang.org/x/term v0.30.0 // indirect
|
||||
golang.org/x/text v0.23.0 // indirect
|
||||
gopkg.in/inf.v0 v0.9.1 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
k8s.io/apimachinery v0.32.2 // indirect
|
||||
k8s.io/apimachinery v0.32.3 // indirect
|
||||
k8s.io/klog/v2 v2.130.1 // indirect
|
||||
sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3 // indirect
|
||||
sigs.k8s.io/structured-merge-diff/v4 v4.4.2 // indirect
|
||||
|
||||
46
go.sum
46
go.sum
@@ -8,8 +8,8 @@ github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8v
|
||||
github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
|
||||
github.com/alecthomas/kingpin v2.2.6+incompatible h1:5svnBTFgJjZvGKyYBtMB0+m5wvrbUHiqye8wRJMlnYI=
|
||||
github.com/alecthomas/kingpin v2.2.6+incompatible/go.mod h1:59OFYbFVLKQKq+mqrL6Rw5bR0c3ACQaawgXx0QYndlE=
|
||||
github.com/alecthomas/kong v1.8.1 h1:6aamvWBE/REnR/BCq10EcozmcpUPc5aGI1lPAWdB0EE=
|
||||
github.com/alecthomas/kong v1.8.1/go.mod h1:p2vqieVMeTAnaC83txKtXe8FLke2X07aruPWXyMPQrU=
|
||||
github.com/alecthomas/kong v1.9.0 h1:Wgg0ll5Ys7xDnpgYBuBn/wPeLGAuK0NvYmEcisJgrIs=
|
||||
github.com/alecthomas/kong v1.9.0/go.mod h1:p2vqieVMeTAnaC83txKtXe8FLke2X07aruPWXyMPQrU=
|
||||
github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc=
|
||||
github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
|
||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM=
|
||||
@@ -88,9 +88,9 @@ github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRI
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
|
||||
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
|
||||
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
|
||||
github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8=
|
||||
github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
|
||||
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
|
||||
github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY=
|
||||
github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ=
|
||||
github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI=
|
||||
github.com/shirou/gopsutil v3.21.11+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA=
|
||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||
@@ -125,14 +125,14 @@ github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/yusufpapurcu/wmi v1.2.2 h1:KBNDSne4vP5mbSWnJbO+51IMOXJB67QiYCSBrubbPRg=
|
||||
github.com/yusufpapurcu/wmi v1.2.2/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
|
||||
go.mongodb.org/mongo-driver v1.17.2 h1:gvZyk8352qSfzyZ2UMWcpDpMSGEr1eqE4T793SqyhzM=
|
||||
go.mongodb.org/mongo-driver v1.17.2/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ=
|
||||
go.mongodb.org/mongo-driver v1.17.3 h1:TQyXhnsWfWtgAhMtOgtYHMTkZIfBTpMTsMnd9ZBeHxQ=
|
||||
go.mongodb.org/mongo-driver v1.17.3/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs=
|
||||
golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ=
|
||||
golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
|
||||
golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
|
||||
golang.org/x/exp v0.0.0-20230321023759-10a507213a29 h1:ooxPy7fPvB4kwsA2h+iBNHkAbp/4JxTSwCmvdjEYmug=
|
||||
golang.org/x/exp v0.0.0-20230321023759-10a507213a29/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc=
|
||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
@@ -146,15 +146,15 @@ golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwY
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
|
||||
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||
golang.org/x/net v0.36.0 h1:vWF2fRbw4qslQsQzgFqZff+BItCvGFQqKzKIzx1rmoA=
|
||||
golang.org/x/net v0.36.0/go.mod h1:bFmbeoIPfrw4sMHNhb4J9f6+tPziuGjq7Jk/38fxi1I=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w=
|
||||
golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw=
|
||||
golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@@ -170,18 +170,18 @@ golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
|
||||
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU=
|
||||
golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s=
|
||||
golang.org/x/term v0.30.0 h1:PQ39fJZ+mfadBm0y5WlL4vlM7Sx1Hgf13sMIY2+QS9Y=
|
||||
golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
||||
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
|
||||
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
|
||||
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
|
||||
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
@@ -205,10 +205,10 @@ gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
k8s.io/api v0.32.2 h1:bZrMLEkgizC24G9eViHGOPbW+aRo9duEISRIJKfdJuw=
|
||||
k8s.io/api v0.32.2/go.mod h1:hKlhk4x1sJyYnHENsrdCWw31FEmCijNGPJO5WzHiJ6Y=
|
||||
k8s.io/apimachinery v0.32.2 h1:yoQBR9ZGkA6Rgmhbp/yuT9/g+4lxtsGYwW6dR6BDPLQ=
|
||||
k8s.io/apimachinery v0.32.2/go.mod h1:GpHVgxoKlTxClKcteaeuF1Ul/lDVb74KpZcxcmLDElE=
|
||||
k8s.io/api v0.32.3 h1:Hw7KqxRusq+6QSplE3NYG4MBxZw1BZnq4aP4cJVINls=
|
||||
k8s.io/api v0.32.3/go.mod h1:2wEDTXADtm/HA7CCMD8D8bK4yuBUptzaRhYcYEEYA3k=
|
||||
k8s.io/apimachinery v0.32.3 h1:JmDuDarhDmA/Li7j3aPrwhpNBA94Nvk5zLeOge9HH1U=
|
||||
k8s.io/apimachinery v0.32.3/go.mod h1:GpHVgxoKlTxClKcteaeuF1Ul/lDVb74KpZcxcmLDElE=
|
||||
k8s.io/klog/v2 v2.130.1 h1:n9Xl7H1Xvksem4KFG4PYbdQCQxqc/tTUyrgXaOhHSzk=
|
||||
k8s.io/klog/v2 v2.130.1/go.mod h1:3Jpz1GvMt720eyJH1ckRHK1EDfpxISzJ7I9OYgaDtPE=
|
||||
k8s.io/utils v0.0.0-20241104100929-3ea5e8cea738 h1:M3sRQVHv7vB20Xc2ybTt7ODCeFj6JSWYFzOFnYeS6Ro=
|
||||
|
||||
@@ -16,7 +16,6 @@ import (
|
||||
|
||||
const (
|
||||
planSummaryCollScan = "COLLSCAN"
|
||||
planSummaryIXScan = "IXSCAN"
|
||||
)
|
||||
|
||||
type StatsError struct {
|
||||
@@ -106,9 +105,8 @@ func (s *Stats) Add(doc proto.SystemProfile) error {
|
||||
if qiac.PlanSummary == planSummaryCollScan {
|
||||
qiac.CollScanCount++
|
||||
}
|
||||
if strings.HasPrefix(qiac.PlanSummary, planSummaryIXScan) {
|
||||
qiac.PlanSummary = planSummaryIXScan
|
||||
}
|
||||
|
||||
qiac.PlanSummary = strings.Split(qiac.PlanSummary, " ")[0]
|
||||
|
||||
qiac.NReturned = append(qiac.NReturned, float64(doc.Nreturned))
|
||||
qiac.QueryTime = append(qiac.QueryTime, float64(doc.Millis))
|
||||
|
||||
@@ -208,7 +208,7 @@ func main() {
|
||||
if err != nil {
|
||||
log.Infof("cannot check version updates: %s", err.Error())
|
||||
} else if advice != "" {
|
||||
log.Infof(advice)
|
||||
log.Infof("%s", advice)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -373,24 +373,6 @@ func getHostInfo(ctx context.Context, client *mongo.Client) (*hostInfo, error) {
|
||||
return nil, errors.Wrap(err, "GetHostInfo.hostInfo")
|
||||
}
|
||||
|
||||
cmdOpts := proto.CommandLineOptions{}
|
||||
query := primitive.D{{Key: "getCmdLineOpts", Value: 1}}
|
||||
err := client.Database("admin").RunCommand(ctx, query).Decode(&cmdOpts)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "cannot get command line options")
|
||||
}
|
||||
|
||||
ss := proto.ServerStatus{}
|
||||
query = primitive.D{{Key: "serverStatus", Value: 1}}
|
||||
if err := client.Database("admin").RunCommand(ctx, query).Decode(&ss); err != nil {
|
||||
return nil, errors.Wrap(err, "GetHostInfo.serverStatus")
|
||||
}
|
||||
|
||||
pi := procInfo{}
|
||||
if err := getProcInfo(int32(ss.Pid), &pi); err != nil {
|
||||
pi.Error = err
|
||||
}
|
||||
|
||||
nodeType, _ := getNodeType(ctx, client)
|
||||
procCount, _ := countMongodProcesses()
|
||||
|
||||
@@ -398,24 +380,41 @@ func getHostInfo(ctx context.Context, client *mongo.Client) (*hostInfo, error) {
|
||||
Hostname: hi.System.Hostname,
|
||||
HostOsType: hi.Os.Type,
|
||||
HostSystemCPUArch: hi.System.CpuArch,
|
||||
DBPath: "", // Sets default. It will be overridden later if necessary
|
||||
|
||||
ProcessName: ss.Process,
|
||||
ProcProcessCount: procCount,
|
||||
Version: ss.Version,
|
||||
NodeType: nodeType,
|
||||
|
||||
ProcPath: pi.Path,
|
||||
ProcUserName: pi.UserName,
|
||||
ProcCreateTime: pi.CreateTime,
|
||||
CmdlineArgs: cmdOpts.Argv,
|
||||
}
|
||||
if ss.Repl != nil {
|
||||
i.ReplicasetName = ss.Repl.SetName
|
||||
ProcProcessCount: procCount,
|
||||
NodeType: nodeType,
|
||||
CmdlineArgs: nil,
|
||||
}
|
||||
|
||||
if cmdOpts.Parsed.Storage.DbPath != "" {
|
||||
i.DBPath = cmdOpts.Parsed.Storage.DbPath
|
||||
var cmdOpts proto.CommandLineOptions
|
||||
query := primitive.D{{Key: "getCmdLineOpts", Value: 1}}
|
||||
err := client.Database("admin").RunCommand(ctx, query).Decode(&cmdOpts)
|
||||
if err == nil {
|
||||
if len(cmdOpts.Argv) > 0 {
|
||||
i.CmdlineArgs = cmdOpts.Argv
|
||||
}
|
||||
if cmdOpts.Parsed.Storage.DbPath != "" {
|
||||
i.DBPath = cmdOpts.Parsed.Storage.DbPath
|
||||
}
|
||||
}
|
||||
|
||||
var ss proto.ServerStatus
|
||||
query = primitive.D{{Key: "serverStatus", Value: 1}}
|
||||
err = client.Database("admin").RunCommand(ctx, query).Decode(&ss)
|
||||
if err == nil {
|
||||
i.ProcessName = ss.Process
|
||||
i.Version = ss.Version
|
||||
if ss.Repl != nil {
|
||||
i.ReplicasetName = ss.Repl.SetName
|
||||
}
|
||||
|
||||
pi := procInfo{}
|
||||
if err := getProcInfo(int32(ss.Pid), &pi); err != nil {
|
||||
pi.Error = err
|
||||
} else {
|
||||
i.ProcPath = pi.Path
|
||||
i.ProcUserName = pi.UserName
|
||||
i.ProcCreateTime = pi.CreateTime
|
||||
}
|
||||
}
|
||||
|
||||
return i, nil
|
||||
|
||||
@@ -8,9 +8,9 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/pborman/getopt"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
tu "github.com/percona/percona-toolkit/src/go/internal/testutils"
|
||||
"github.com/percona/percona-toolkit/src/go/mongolib/proto"
|
||||
)
|
||||
|
||||
func TestGetHostInfo(t *testing.T) {
|
||||
@@ -49,6 +49,18 @@ func TestGetHostInfo(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetHostInfoResult(t *testing.T) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
|
||||
client, err := tu.TestClient(ctx, tu.MongoDBShard1PrimaryPort)
|
||||
require.NoError(t, err, "cannot get a new MongoDB client")
|
||||
|
||||
host, err := getHostInfo(ctx, client)
|
||||
require.NoError(t, err, "getHostInfo error")
|
||||
require.NotEmpty(t, host)
|
||||
}
|
||||
|
||||
func TestClusterWideInfo(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
@@ -85,16 +97,6 @@ func TestClusterWideInfo(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func addToCounters(ss proto.ServerStatus, increment int64) proto.ServerStatus {
|
||||
ss.Opcounters.Command += increment
|
||||
ss.Opcounters.Delete += increment
|
||||
ss.Opcounters.GetMore += increment
|
||||
ss.Opcounters.Insert += increment
|
||||
ss.Opcounters.Query += increment
|
||||
ss.Opcounters.Update += increment
|
||||
return ss
|
||||
}
|
||||
|
||||
func TestParseArgs(t *testing.T) {
|
||||
tests := []struct {
|
||||
args []string
|
||||
|
||||
@@ -15,7 +15,7 @@ Usage
|
||||
|
||||
::
|
||||
|
||||
pt-secure-data [<flags>] <command> [<args> ...]
|
||||
pt-secure-collect [<flags>] <command> [<args> ...]
|
||||
|
||||
By default, :program:`pt-secure-collect` will collect the output of:
|
||||
|
||||
|
||||
@@ -123,11 +123,11 @@ $output = output(
|
||||
);
|
||||
$output = `cat archive.test.table_2`;
|
||||
is($output, <<EOF
|
||||
1, 2, 3, 4
|
||||
2, "\\N", 3, 4
|
||||
3, 2, 3, "\\\t"
|
||||
4, 2, 3, "\\\n"
|
||||
5, 2, 3, "Zapp \\"Brannigan"
|
||||
1,2,3,4
|
||||
2,\\N,3,4
|
||||
3,2,3,"\\\t"
|
||||
4,2,3,"\\\n"
|
||||
5,2,3,"Zapp \\"Brannigan"
|
||||
EOF
|
||||
, '--output-format=csv');
|
||||
`rm -f archive.test.table_2`;
|
||||
|
||||
75
t/pt-archiver/pt-2410.t
Normal file
75
t/pt-archiver/pt-2410.t
Normal file
@@ -0,0 +1,75 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
|
||||
use charnames ':full';
|
||||
|
||||
use PerconaTest;
|
||||
use Sandbox;
|
||||
require "$trunk/bin/pt-archiver";
|
||||
|
||||
my $dp = new DSNParser(opts=>$dsn_opts);
|
||||
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
|
||||
my $dbh = $sb->get_dbh_for('source');
|
||||
|
||||
if ( !$dbh ) {
|
||||
plan skip_all => 'Cannot connect to sandbox source';
|
||||
}
|
||||
|
||||
my $output;
|
||||
my $exit_status;
|
||||
my $cnf = "/tmp/12345/my.sandbox.cnf";
|
||||
my $cmd = "$trunk/bin/pt-archiver";
|
||||
|
||||
$sb->wipe_clean($dbh);
|
||||
$sb->create_dbs($dbh, ['test']);
|
||||
|
||||
$sb->load_file('source', 't/pt-archiver/samples/pt-2410.sql');
|
||||
|
||||
($output, $exit_status) = full_output(
|
||||
sub { pt_archiver::main(
|
||||
qw(--where 1=1 --output-format=csv),
|
||||
'--source', "L=1,D=pt_2410,t=test,F=$cnf",
|
||||
'--file', '/tmp/pt-2410.csv') },
|
||||
);
|
||||
|
||||
is(
|
||||
$exit_status,
|
||||
0,
|
||||
'pt-archiver comleted'
|
||||
);
|
||||
|
||||
$output = `cat /tmp/pt-2410.csv`;
|
||||
like(
|
||||
$output,
|
||||
qr/1,\\N,"testing..."/,
|
||||
'NULL values stored correctly'
|
||||
) or diag($output);
|
||||
|
||||
$dbh->do("load data local infile '/tmp/pt-2410.csv' into table pt_2410.test COLUMNS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"'");
|
||||
|
||||
$output = `/tmp/12345/use pt_2410 -N -e 'SELECT * FROM test'`;
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/1 NULL testing.../,
|
||||
'NULL values loaded correctly'
|
||||
) or diag($output);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
diag(`rm -f /tmp/pt-2410.csv`);
|
||||
$sb->wipe_clean($dbh);
|
||||
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
|
||||
done_testing;
|
||||
exit;
|
||||
10
t/pt-archiver/samples/pt-2410.sql
Normal file
10
t/pt-archiver/samples/pt-2410.sql
Normal file
@@ -0,0 +1,10 @@
|
||||
CREATE DATABASE pt_2410;
|
||||
USE pt_2410;
|
||||
|
||||
CREATE TABLE test(
|
||||
id int not null primary key auto_increment,
|
||||
column1 int default null,
|
||||
column2 varchar(50) not null);
|
||||
|
||||
INSERT INTO test VALUES (null,null,'testing...');
|
||||
INSERT INTO test VALUES (null,null,'testing...');
|
||||
@@ -137,7 +137,7 @@ set_delay();
|
||||
# We need to sleep, otherwise pt-osc can finish before replica is delayed
|
||||
sleep($max_lag);
|
||||
|
||||
my $args = "$source_dsn,D=test,t=pt1717 --execute --chunk-size ${chunk_size} --max-lag $max_lag --alter 'engine=INNODB' --pid $tmp_file_name --progress time,5 --no-drop-new-table --no-drop-triggers --history";
|
||||
my $args = "$source_dsn,D=test,t=pt1717 --execute --chunk-size ${chunk_size} --max-lag $max_lag --alter 'ADD COLUMN foo varchar(32)' --pid $tmp_file_name --progress time,5 --no-drop-new-table --no-drop-triggers --history";
|
||||
|
||||
$output = run_broken_job($args);
|
||||
|
||||
@@ -165,7 +165,7 @@ my @args = (qw(--execute --chunk-size=10 --history));
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=INNODB', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
'--chunk-index=f2'
|
||||
) }
|
||||
);
|
||||
@@ -186,7 +186,7 @@ like(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--max-lag', $max_lag,
|
||||
'--resume', $job_id,
|
||||
'--alter', 'engine=INNODB',
|
||||
'--alter', 'ADD COLUMN foo varchar(32)',
|
||||
'--plugin', "$plugin/pt-1717.pm",
|
||||
),
|
||||
},
|
||||
@@ -208,8 +208,10 @@ ok(
|
||||
'All rows copied correctly'
|
||||
) or diag("New table checksum: '${new_table_checksum}', original content checksum: '${old_table_checksum}'");
|
||||
|
||||
diag(`/tmp/12345/use test -N -e "ALTER TABLE pt1717 DROP COLUMN foo"`);
|
||||
|
||||
# Tests for chunk-index and chunk-index-columns options
|
||||
$args = "$source_dsn,D=test,t=pt1717 --alter engine=innodb --execute --history --chunk-size=10 --no-drop-new-table --no-drop-triggers --reverse-triggers --chunk-index=f2";
|
||||
$args = "$source_dsn,D=test,t=pt1717 --alter 'ADD COLUMN foo varchar(32)' --execute --history --chunk-size=10 --no-drop-new-table --no-drop-triggers --reverse-triggers --chunk-index=f2";
|
||||
|
||||
set_delay();
|
||||
$output = run_broken_job($args);
|
||||
@@ -220,7 +222,7 @@ $job_id = $1;
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=innodb', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
) }
|
||||
);
|
||||
|
||||
@@ -238,7 +240,7 @@ like(
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=innodb', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
'--chunk-index=f1'
|
||||
) }
|
||||
);
|
||||
@@ -257,7 +259,7 @@ like(
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=innodb', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
'--chunk-index=f2', '--chunk-index-columns=1'
|
||||
) }
|
||||
);
|
||||
@@ -288,7 +290,7 @@ is(
|
||||
$output + 0,
|
||||
3,
|
||||
'Triggers were not dropped'
|
||||
);
|
||||
) or diag($output);
|
||||
|
||||
$output = `/tmp/12345/use -N -e "select count(*) from information_schema.triggers where TRIGGER_SCHEMA='test' AND EVENT_OBJECT_TABLE like '%pt1717%_new' AND trigger_name LIKE 'rt_%'"`;
|
||||
|
||||
@@ -300,7 +302,7 @@ is(
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=innodb', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
'--chunk-size=4',
|
||||
'--chunk-index=f2'
|
||||
) }
|
||||
@@ -348,7 +350,7 @@ ok(
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=innodb', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
'--chunk-size=4',
|
||||
'--chunk-index=f2'
|
||||
) }
|
||||
@@ -372,7 +374,7 @@ $output =~ /New table `test`.`([_]+pt1717_new)` not found, restart operation fro
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=innodb', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
'--chunk-size=4',
|
||||
'--chunk-index=f2'
|
||||
) }
|
||||
|
||||
239
t/pt-online-schema-change/pt-2355.t
Normal file
239
t/pt-online-schema-change/pt-2355.t
Normal file
@@ -0,0 +1,239 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use threads;
|
||||
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
|
||||
use Data::Dumper;
|
||||
use PerconaTest;
|
||||
use Sandbox;
|
||||
use SqlModes;
|
||||
use File::Temp qw/ tempdir tempfile /;
|
||||
|
||||
our $delay = 10;
|
||||
my $max_lag = $delay / 2;
|
||||
my $output;
|
||||
my $exit;
|
||||
|
||||
my $tmp_file = File::Temp->new();
|
||||
my $tmp_file_name = $tmp_file->filename;
|
||||
unlink $tmp_file_name;
|
||||
|
||||
require "$trunk/bin/pt-online-schema-change";
|
||||
|
||||
my $dp = new DSNParser(opts=>$dsn_opts);
|
||||
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
|
||||
if ($sb->is_cluster_mode) {
|
||||
plan skip_all => 'Not for PXC';
|
||||
}
|
||||
|
||||
my $source_dbh = $sb->get_dbh_for('source');
|
||||
my $replica_dbh1 = $sb->get_dbh_for('replica1');
|
||||
my $replica_dbh2 = $sb->get_dbh_for('replica2');
|
||||
my $source_dsn = 'h=127.0.0.1,P=12345,u=msandbox,p=msandbox';
|
||||
my $replica_dsn1 = 'h=127.0.0.1,P=12346,u=msandbox,p=msandbox';
|
||||
my $replica_dsn2 = 'h=127.0.0.1,P=12347,u=msandbox,p=msandbox';
|
||||
my $sample = "t/pt-online-schema-change/samples";
|
||||
my $plugin = "$trunk/$sample/plugins";
|
||||
|
||||
# We need sync_relay_log=1 to keep changes after replica restart
|
||||
my $cnf = '/tmp/12347/my.sandbox.cnf';
|
||||
diag(`cp $cnf $cnf.bak`);
|
||||
diag(`echo "[mysqld]" > /tmp/12347/my.sandbox.2.cnf`);
|
||||
diag(`echo "sync_relay_log=1" >> /tmp/12347/my.sandbox.2.cnf`);
|
||||
diag(`echo "sync_relay_log_info=1" >> /tmp/12347/my.sandbox.2.cnf`);
|
||||
diag(`echo "relay_log_recovery=1" >> /tmp/12347/my.sandbox.2.cnf`);
|
||||
diag(`echo "!include /tmp/12347/my.sandbox.2.cnf" >> $cnf`);
|
||||
diag(`/tmp/12347/stop >/dev/null`);
|
||||
sleep 1;
|
||||
diag(`/tmp/12347/start >/dev/null`);
|
||||
|
||||
sub reset_query_cache {
|
||||
my @dbhs = @_;
|
||||
return if ($sandbox_version ge '8.0');
|
||||
foreach my $dbh (@dbhs) {
|
||||
$dbh->do('RESET QUERY CACHE');
|
||||
}
|
||||
}
|
||||
|
||||
sub run_broken_job {
|
||||
my ($args) = @_;
|
||||
my ($fh, $filename) = tempfile();
|
||||
my $pid = fork();
|
||||
|
||||
if (!$pid) {
|
||||
open(STDERR, '>', $filename);
|
||||
open(STDOUT, '>', $filename);
|
||||
exec("$trunk/bin/pt-online-schema-change $args");
|
||||
}
|
||||
|
||||
sleep($max_lag + $max_lag/2);
|
||||
# stop replica 12347
|
||||
diag(`/tmp/12347/stop >/dev/null`);
|
||||
sleep 1;
|
||||
|
||||
waitpid($pid, 0);
|
||||
my $output = do {
|
||||
local $/ = undef;
|
||||
<$fh>;
|
||||
};
|
||||
|
||||
return $output;
|
||||
}
|
||||
|
||||
sub set_delay {
|
||||
$sb->wait_for_replicas();
|
||||
|
||||
diag("Setting replica delay to $delay seconds");
|
||||
diag(`/tmp/12345/use -N test -e "DROP TABLE IF EXISTS pt1717_back"`);
|
||||
|
||||
$replica_dbh1->do("STOP ${replica_name}");
|
||||
$replica_dbh1->do("CHANGE ${source_change} TO ${source_name}_DELAY=$delay");
|
||||
$replica_dbh1->do("START ${replica_name}");
|
||||
|
||||
# Run a full table scan query to ensure the replica is behind the source
|
||||
# There is no query cache in MySQL 8.0+
|
||||
reset_query_cache($source_dbh, $source_dbh);
|
||||
# Update one row so replica is delayed
|
||||
$source_dbh->do('UPDATE `test`.`pt1717` SET f2 = f2 + 1 LIMIT 1');
|
||||
$source_dbh->do('UPDATE `test`.`pt1717` SET f2 = f2 + 1 WHERE f1 = ""');
|
||||
|
||||
# Creating copy of table pt1717, so we can compare data later
|
||||
diag(`/tmp/12345/use -N test -e "CREATE TABLE pt1717_back like pt1717"`);
|
||||
diag(`/tmp/12345/use -N test -e "INSERT INTO pt1717_back SELECT * FROM pt1717"`);
|
||||
}
|
||||
|
||||
# 1) Set the replica delay to 0 just in case we are re-running the tests without restarting the sandbox.
|
||||
# 2) Load sample data
|
||||
# 3) Set the replica delay to 30 seconds to be able to see the 'waiting' message.
|
||||
diag("Setting replica delay to 0 seconds");
|
||||
$replica_dbh1->do("STOP ${replica_name}");
|
||||
$source_dbh->do("RESET ${source_reset}");
|
||||
$replica_dbh1->do("RESET ${replica_name}");
|
||||
$replica_dbh1->do("START ${replica_name}");
|
||||
|
||||
diag('Loading test data');
|
||||
$sb->load_file('source', "t/pt-online-schema-change/samples/pt-1717.sql");
|
||||
|
||||
# Should be greater than chunk-size and big enough, so pt-osc will wait for delay
|
||||
my $num_rows = 5000;
|
||||
my $chunk_size = 10;
|
||||
diag("Loading $num_rows into the table. This might take some time.");
|
||||
diag(`util/mysql_random_data_load --host=127.0.0.1 --port=12345 --user=msandbox --password=msandbox test pt1717 $num_rows`);
|
||||
|
||||
diag("Starting tests...");
|
||||
|
||||
set_delay();
|
||||
|
||||
# We need to sleep, otherwise pt-osc can finish before replica is delayed
|
||||
sleep($max_lag);
|
||||
|
||||
my $args = "$source_dsn,D=test,t=pt1717 --execute --chunk-size ${chunk_size} --max-lag $max_lag --alter 'ADD INDEX idx1(f1)' --pid $tmp_file_name --progress time,5 --no-drop-new-table --no-drop-triggers --history";
|
||||
|
||||
$output = run_broken_job($args);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/`test`.`pt1717` was not altered/s,
|
||||
"pt-osc stopped with error as expected",
|
||||
) or diag($output);
|
||||
|
||||
diag(`/tmp/12347/start >/dev/null`);
|
||||
$sb->wait_for_replicas();
|
||||
|
||||
$output = `/tmp/12345/use -N -e "select job_id, upper_boundary from percona.pt_osc_history"`;
|
||||
my ($job_id, $upper_boundary) = split(/\s+/, $output);
|
||||
|
||||
my $copied_rows = `/tmp/12345/use -N -e "select count(*) from test._pt1717_new"`;
|
||||
chomp($copied_rows);
|
||||
|
||||
ok(
|
||||
$copied_rows eq $upper_boundary,
|
||||
'Upper chunk boundary stored correctly'
|
||||
) or diag("Copied_rows: ${copied_rows}, upper boundary: ${upper_boundary}");;
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main("$source_dsn,D=test,t=pt1717",
|
||||
"--execute", "--chunk-size=${chunk_size}", "--max-lag=${max_lag}",
|
||||
"--alter=ADD INDEX idx1(f1)",
|
||||
"--resume=${job_id}",
|
||||
) }
|
||||
);
|
||||
|
||||
is(
|
||||
$exit,
|
||||
0,
|
||||
'pt-osc works correctly with --resume'
|
||||
) or diag($exit);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Successfully altered/,
|
||||
'Success message printed'
|
||||
) or diag($output);
|
||||
|
||||
# Corrupting job record, so we can test error message
|
||||
diag(`/tmp/12345/use -N -e "update percona.pt_osc_history set new_table_name=NULL where job_id=${job_id}"`);
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main("$source_dsn,D=test,t=pt1717",
|
||||
"--execute", "--chunk-size=${chunk_size}", "--max-lag=${max_lag}",
|
||||
"--alter=ADD INDEX idx1(f1)",
|
||||
"--resume=${job_id}",
|
||||
) }
|
||||
);
|
||||
|
||||
is(
|
||||
$exit,
|
||||
17,
|
||||
'pt-osc works correctly fails with empty boundaries'
|
||||
) or diag($exit);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Option --resume refers job \d+ with empty boundaries. Exiting./,
|
||||
'Correct error message printed'
|
||||
) or diag($output);
|
||||
|
||||
unlike(
|
||||
$output,
|
||||
qr/Option --resume refers non-existing job ID: \d+. Exiting./,
|
||||
'Misleading error message not printed'
|
||||
) or diag($output);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
diag("Cleaning");
|
||||
$replica_dbh2 = $sb->get_dbh_for('replica2');
|
||||
diag("Setting replica delay to 0 seconds");
|
||||
$replica_dbh1->do("STOP ${replica_name}");
|
||||
$replica_dbh2->do("STOP ${replica_name}");
|
||||
$source_dbh->do("RESET ${source_reset}");
|
||||
$replica_dbh1->do("RESET ${source_reset}");
|
||||
$replica_dbh1->do("RESET ${replica_name}");
|
||||
$replica_dbh2->do("RESET ${replica_name}");
|
||||
$replica_dbh1->do("START ${replica_name}");
|
||||
$replica_dbh2->do("START ${replica_name}");
|
||||
|
||||
diag(`mv $cnf.bak $cnf`);
|
||||
|
||||
diag(`/tmp/12347/stop >/dev/null`);
|
||||
diag(`/tmp/12347/start >/dev/null`);
|
||||
|
||||
diag("Dropping test database");
|
||||
$source_dbh->do("DROP DATABASE IF EXISTS test");
|
||||
$sb->wait_for_replicas();
|
||||
|
||||
$sb->wipe_clean($source_dbh);
|
||||
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
|
||||
done_testing;
|
||||
71
t/pt-online-schema-change/pt-2407.t
Normal file
71
t/pt-online-schema-change/pt-2407.t
Normal file
@@ -0,0 +1,71 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
|
||||
use PerconaTest;
|
||||
use Sandbox;
|
||||
require "$trunk/bin/pt-online-schema-change";
|
||||
require VersionParser;
|
||||
|
||||
use Data::Dumper;
|
||||
|
||||
my $dp = new DSNParser(opts=>$dsn_opts);
|
||||
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
|
||||
my $source_dbh = $sb->get_dbh_for('source');
|
||||
my $replica_dbh = $sb->get_dbh_for('replica1');
|
||||
|
||||
if ( !$source_dbh ) {
|
||||
plan skip_all => 'Cannot connect to sandbox source';
|
||||
}
|
||||
elsif ( !$replica_dbh ) {
|
||||
plan skip_all => 'Cannot connect to sandbox replica';
|
||||
}
|
||||
|
||||
my @args = qw(--set-vars innodb_lock_wait_timeout=3);
|
||||
my $output = "";
|
||||
my $dsn = "h=127.1,P=12345,u=msandbox,p=msandbox";
|
||||
my $exit = 0;
|
||||
my $sample = "t/pt-online-schema-change/samples";
|
||||
|
||||
$sb->load_file('source', "$sample/pt-2407.sql");
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$dsn,D=pt_2407,t=t1",
|
||||
'--alter', 'alter table t1 ADD COLUMN payout_group_id VARCHAR(255) DEFAULT NULL, ALGORITHM=INSTANT;', '--execute') }
|
||||
);
|
||||
|
||||
is(
|
||||
$exit,
|
||||
11,
|
||||
'Return code non-zero for failed operation'
|
||||
) or diag($exit);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/You have an error in your SQL syntax/,
|
||||
'Job failed due to SQL syntax error'
|
||||
) or diag($output);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Error altering new table/,
|
||||
'Error altering new table message printed'
|
||||
) or diag($output);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
|
||||
$sb->wipe_clean($source_dbh);
|
||||
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
|
||||
#
|
||||
done_testing;
|
||||
120
t/pt-online-schema-change/pt-2422.t
Normal file
120
t/pt-online-schema-change/pt-2422.t
Normal file
@@ -0,0 +1,120 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
|
||||
use PerconaTest;
|
||||
use Sandbox;
|
||||
require "$trunk/bin/pt-online-schema-change";
|
||||
require VersionParser;
|
||||
|
||||
use Data::Dumper;
|
||||
|
||||
my $dp = new DSNParser(opts=>$dsn_opts);
|
||||
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
|
||||
my $source_dbh = $sb->get_dbh_for('source');
|
||||
my $replica_dbh = $sb->get_dbh_for('replica1');
|
||||
|
||||
if ( !$source_dbh ) {
|
||||
plan skip_all => 'Cannot connect to sandbox source';
|
||||
}
|
||||
elsif ( !$replica_dbh ) {
|
||||
plan skip_all => 'Cannot connect to sandbox replica';
|
||||
}
|
||||
|
||||
my @args = qw(--set-vars innodb_lock_wait_timeout=3);
|
||||
my $output = "";
|
||||
my $dsn = "h=127.1,P=12345,u=msandbox,p=msandbox";
|
||||
my $exit = 0;
|
||||
my $sample = "t/pt-online-schema-change/samples";
|
||||
|
||||
$sb->load_file('source', "$sample/basic_no_fks_innodb.sql");
|
||||
$source_dbh->do('CREATE TABLE pt_osc.pt_2422 LIKE pt_osc.t');
|
||||
$source_dbh->do('INSERT INTO pt_osc.pt_2422 SELECT * FROM pt_osc.t');
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$dsn,D=pt_osc,t=t",
|
||||
'--alter', 'engine=innodb', '--execute', '--history') }
|
||||
);
|
||||
|
||||
is(
|
||||
$exit,
|
||||
0,
|
||||
'basic test with option --history finished OK'
|
||||
) or diag($output);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Job \d started/,
|
||||
'Job id printed in the beginning of the tool output'
|
||||
);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Job \d finished successfully/,
|
||||
'Job id printed for successful copy'
|
||||
);
|
||||
|
||||
$output = `/tmp/12345/use -N -e "SELECT new_table_name FROM percona.pt_osc_history WHERE job_id=1"`;
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/_t_new/,
|
||||
'Correct new table name inserted'
|
||||
) or diag($output);
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$dsn,D=pt_osc,t=pt_2422",
|
||||
'--alter', 'engine=innodb', '--execute', '--history') }
|
||||
);
|
||||
|
||||
is(
|
||||
$exit,
|
||||
0,
|
||||
'basic test with second table and option --history finished OK'
|
||||
) or diag($output);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Job \d started/,
|
||||
'Job id printed in the beginning of the tool output for the second table'
|
||||
);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Job \d finished successfully/,
|
||||
'Job id printed for successful copy of the second table'
|
||||
);
|
||||
|
||||
$output = `/tmp/12345/use -N -e "SELECT new_table_name FROM percona.pt_osc_history WHERE job_id=1"`;
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/_t_new/,
|
||||
'New table name for previouse job was not updated'
|
||||
) or diag($output);
|
||||
|
||||
$output = `/tmp/12345/use -N -e "SELECT new_table_name FROM percona.pt_osc_history WHERE job_id=2"`;
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/_pt_2422_new/,
|
||||
'Correct new table name inserted for the second table'
|
||||
) or diag($output);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
|
||||
$sb->wipe_clean($source_dbh);
|
||||
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
|
||||
#
|
||||
done_testing;
|
||||
12
t/pt-online-schema-change/samples/pt-2407.sql
Normal file
12
t/pt-online-schema-change/samples/pt-2407.sql
Normal file
@@ -0,0 +1,12 @@
|
||||
CREATE DATABASE pt_2407;
|
||||
|
||||
USE pt_2407;
|
||||
|
||||
CREATE TABLE t1 (
|
||||
c1 int NOT NULL,
|
||||
c2 varchar(100) NOT NULL,
|
||||
PRIMARY KEY (c1),
|
||||
KEY idx (c2)
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
INSERT INTO t1 VALUES(1,1),(2,2),(3,3),(4,4),(5,5);
|
||||
Reference in New Issue
Block a user