mirror of
https://github.com/percona/percona-toolkit.git
synced 2025-09-04 03:26:19 +00:00
Compare commits
39 Commits
snyk-fix-2
...
PMM-12548-
Author | SHA1 | Date | |
---|---|---|---|
![]() |
824ba7e80a | ||
![]() |
c9bb7a260d | ||
![]() |
ac53883f29 | ||
![]() |
808c590e7a | ||
![]() |
c09b622c3e | ||
![]() |
2df1bd8950 | ||
![]() |
ebacadf098 | ||
![]() |
c49c58db2b | ||
![]() |
9711db87a7 | ||
![]() |
e964e17f21 | ||
![]() |
c3a201d5f8 | ||
![]() |
2474b1f45b | ||
![]() |
88367c1dea | ||
![]() |
840ba6926b | ||
![]() |
25f4ee6d80 | ||
![]() |
c83d2f547d | ||
![]() |
e4cecc3e69 | ||
![]() |
f9ea94f195 | ||
![]() |
c92d95bc38 | ||
![]() |
6b449ec081 | ||
![]() |
af7bd8abd6 | ||
![]() |
6fad1f0ff0 | ||
![]() |
f4a324581a | ||
![]() |
3cb46e61f7 | ||
![]() |
2198763042 | ||
![]() |
4bf48d864f | ||
![]() |
16f5aac023 | ||
![]() |
69cbfca27f | ||
![]() |
14623c5dce | ||
![]() |
71ffb19e9e | ||
![]() |
5c16d37020 | ||
![]() |
f70f8084dd | ||
![]() |
5474f5d5ff | ||
![]() |
d0f8fb231b | ||
![]() |
8b61618d35 | ||
![]() |
9e9f7434d1 | ||
![]() |
888af5f5ef | ||
![]() |
dc77289d60 | ||
![]() |
d5ec5d9ca8 |
4
.github/workflows/toolkit.yml
vendored
4
.github/workflows/toolkit.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
- name: Build the Docker image
|
||||
run: echo "FROM oraclelinux:9-slim" > Dockerfile; echo "RUN microdnf -y update" >> Dockerfile; echo "COPY bin/* /usr/bin/" >> Dockerfile; docker build . --file Dockerfile --tag percona-toolkit:${{ github.sha }}
|
||||
- name: Run Trivy vulnerability scanner
|
||||
uses: aquasecurity/trivy-action@0.29.0
|
||||
uses: aquasecurity/trivy-action@0.30.0
|
||||
with:
|
||||
image-ref: 'percona-toolkit:${{ github.sha }}'
|
||||
format: 'table'
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
vuln-type: 'os,library'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
- name: Upload a Build Artifact
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: binaries
|
||||
path: bin/*
|
||||
|
@@ -6969,7 +6969,7 @@ sub main {
|
||||
warn "Invalid output format:". $o->get('format');
|
||||
warn "Using default 'dump' format";
|
||||
} elsif ($o->get('output-format') || '' eq 'csv') {
|
||||
$fields_separated_by = ", ";
|
||||
$fields_separated_by = ",";
|
||||
$optionally_enclosed_by = '"';
|
||||
}
|
||||
my $need_hdr = $o->get('header') && !-f $archive_file;
|
||||
@@ -7511,7 +7511,7 @@ sub escape {
|
||||
s/([\t\n\\])/\\$1/g if defined $_; # Escape tabs etc
|
||||
my $s = defined $_ ? $_ : '\N'; # NULL = \N
|
||||
# var & ~var will return 0 only for numbers
|
||||
if ($s !~ /^[0-9,.E]+$/ && $optionally_enclosed_by eq '"') {
|
||||
if ($s !~ /^[0-9,.E]+$/ && $optionally_enclosed_by eq '"' && $s ne '\N') {
|
||||
$s =~ s/([^\\])"/$1\\"/g;
|
||||
$s = $optionally_enclosed_by."$s".$optionally_enclosed_by;
|
||||
}
|
||||
|
@@ -8943,12 +8943,20 @@ sub main {
|
||||
$sth->finish();
|
||||
PTDEBUG && _d('Last chunk:', Dumper($last_chunk));
|
||||
|
||||
if ( !$last_chunk || !$last_chunk->{new_table_name} ) {
|
||||
if ( !$last_chunk ) {
|
||||
$oktorun = undef;
|
||||
_die("Option --resume refers non-existing job ID: ${old_job_id}. Exiting."
|
||||
, UNSUPPORTED_OPERATION);
|
||||
}
|
||||
|
||||
if ( !$last_chunk->{new_table_name}
|
||||
|| !$last_chunk->{lower_boundary}
|
||||
|| !$last_chunk->{upper_boundary} ) {
|
||||
$oktorun = undef;
|
||||
_die("Option --resume refers job ${old_job_id} with empty boundaries. Exiting."
|
||||
, UNSUPPORTED_OPERATION);
|
||||
}
|
||||
|
||||
if ( $last_chunk->{db} ne $db
|
||||
|| $last_chunk->{tbl} ne $tbl
|
||||
|| $last_chunk->{altr} ne $o->get('alter') ){
|
||||
@@ -9607,11 +9615,16 @@ sub main {
|
||||
# ''
|
||||
# doesn't match '(?-xism:Failed to find a unique new table name)'
|
||||
|
||||
# (*) Frank: commented them out because it caused infinite loop
|
||||
# and the mentioned test error doesn't arise
|
||||
|
||||
my $original_error = $EVAL_ERROR;
|
||||
my $original_error_code = $?;
|
||||
my $original_error_code;
|
||||
if ( $? ) {
|
||||
$original_error_code = $?;
|
||||
}
|
||||
else {
|
||||
$original_error_code = $!;
|
||||
}
|
||||
|
||||
$SIG{__DIE__} = 'DEFAULT';
|
||||
|
||||
foreach my $task ( reverse @cleanup_tasks ) {
|
||||
eval {
|
||||
@@ -9797,9 +9810,9 @@ sub main {
|
||||
|
||||
if ( $o->get('history') ) {
|
||||
my $sth = $cxn->dbh()->prepare(
|
||||
"UPDATE ${hist_table} SET new_table_name = ?"
|
||||
"UPDATE ${hist_table} SET new_table_name = ? WHERE job_id = ?"
|
||||
);
|
||||
$sth->execute($new_tbl->{tbl});
|
||||
$sth->execute($new_tbl->{tbl}, $job_id);
|
||||
}
|
||||
|
||||
# If the new table still exists, drop it unless the tool was interrupted.
|
||||
@@ -9912,7 +9925,7 @@ sub main {
|
||||
);
|
||||
}
|
||||
|
||||
if ( my $alter = $o->get('alter') ) {
|
||||
if ( (my $alter = $o->get('alter')) && !$o->get('resume') ) {
|
||||
print "Altering new table...\n";
|
||||
my $sql = "ALTER TABLE $new_tbl->{name} $alter";
|
||||
print $sql, "\n" if $o->get('print');
|
||||
@@ -9921,10 +9934,12 @@ sub main {
|
||||
$cxn->dbh()->do($sql);
|
||||
};
|
||||
if ( $EVAL_ERROR ) {
|
||||
if ( $plugin && $plugin->can('before_die') ) {
|
||||
$plugin->before_die(exit_status => $EVAL_ERROR);
|
||||
}
|
||||
if ( $plugin && $plugin->can('before_die') ) {
|
||||
$plugin->before_die(exit_status => $EVAL_ERROR);
|
||||
}
|
||||
# this is trapped by a signal handler. Don't replace it with _die
|
||||
# we need to override $SIG{__DIE__} to return correct error code
|
||||
$SIG{__DIE__} = sub { print(STDERR "$_[0]"); exit ERROR_ALTERING_TABLE; };
|
||||
die "Error altering new table $new_tbl->{name}: $EVAL_ERROR\n";
|
||||
}
|
||||
print "Altered $new_tbl->{name} OK.\n";
|
||||
|
24
go.mod
24
go.mod
@@ -2,12 +2,14 @@ module github.com/percona/percona-toolkit
|
||||
|
||||
go 1.23.4
|
||||
|
||||
toolchain go1.24.1
|
||||
|
||||
require (
|
||||
github.com/AlekSi/pointer v1.2.0
|
||||
github.com/Ladicle/tabwriter v1.0.0
|
||||
github.com/Masterminds/semver v1.5.0
|
||||
github.com/alecthomas/kingpin v2.2.6+incompatible
|
||||
github.com/alecthomas/kong v1.8.1
|
||||
github.com/alecthomas/kong v1.9.0
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc
|
||||
github.com/go-ini/ini v1.67.0
|
||||
github.com/golang/mock v1.6.0
|
||||
@@ -21,17 +23,17 @@ require (
|
||||
github.com/pborman/getopt v1.1.0
|
||||
github.com/percona/go-mysql v0.0.0-20210427141028-73d29c6da78c
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/rs/zerolog v1.33.0
|
||||
github.com/rs/zerolog v1.34.0
|
||||
github.com/shirou/gopsutil v3.21.11+incompatible
|
||||
github.com/sirupsen/logrus v1.9.3
|
||||
github.com/stretchr/testify v1.10.0
|
||||
github.com/xlab/treeprint v1.2.0
|
||||
go.mongodb.org/mongo-driver v1.17.2
|
||||
golang.org/x/crypto v0.35.0
|
||||
go.mongodb.org/mongo-driver v1.17.3
|
||||
golang.org/x/crypto v0.36.0
|
||||
golang.org/x/exp v0.0.0-20230321023759-10a507213a29
|
||||
gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
k8s.io/api v0.32.2
|
||||
k8s.io/api v0.32.3
|
||||
k8s.io/utils v0.0.0-20241104100929-3ea5e8cea738
|
||||
)
|
||||
|
||||
@@ -60,14 +62,14 @@ require (
|
||||
github.com/xdg-go/stringprep v1.0.4 // indirect
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
|
||||
github.com/yusufpapurcu/wmi v1.2.2 // indirect
|
||||
golang.org/x/net v0.33.0 // indirect
|
||||
golang.org/x/sync v0.11.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
golang.org/x/term v0.29.0 // indirect
|
||||
golang.org/x/text v0.22.0 // indirect
|
||||
golang.org/x/net v0.36.0 // indirect
|
||||
golang.org/x/sync v0.12.0 // indirect
|
||||
golang.org/x/sys v0.31.0 // indirect
|
||||
golang.org/x/term v0.30.0 // indirect
|
||||
golang.org/x/text v0.23.0 // indirect
|
||||
gopkg.in/inf.v0 v0.9.1 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
k8s.io/apimachinery v0.32.2 // indirect
|
||||
k8s.io/apimachinery v0.32.3 // indirect
|
||||
k8s.io/klog/v2 v2.130.1 // indirect
|
||||
sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3 // indirect
|
||||
sigs.k8s.io/structured-merge-diff/v4 v4.4.2 // indirect
|
||||
|
46
go.sum
46
go.sum
@@ -8,8 +8,8 @@ github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8v
|
||||
github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
|
||||
github.com/alecthomas/kingpin v2.2.6+incompatible h1:5svnBTFgJjZvGKyYBtMB0+m5wvrbUHiqye8wRJMlnYI=
|
||||
github.com/alecthomas/kingpin v2.2.6+incompatible/go.mod h1:59OFYbFVLKQKq+mqrL6Rw5bR0c3ACQaawgXx0QYndlE=
|
||||
github.com/alecthomas/kong v1.8.1 h1:6aamvWBE/REnR/BCq10EcozmcpUPc5aGI1lPAWdB0EE=
|
||||
github.com/alecthomas/kong v1.8.1/go.mod h1:p2vqieVMeTAnaC83txKtXe8FLke2X07aruPWXyMPQrU=
|
||||
github.com/alecthomas/kong v1.9.0 h1:Wgg0ll5Ys7xDnpgYBuBn/wPeLGAuK0NvYmEcisJgrIs=
|
||||
github.com/alecthomas/kong v1.9.0/go.mod h1:p2vqieVMeTAnaC83txKtXe8FLke2X07aruPWXyMPQrU=
|
||||
github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc=
|
||||
github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
|
||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM=
|
||||
@@ -88,9 +88,9 @@ github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRI
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
|
||||
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
|
||||
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
|
||||
github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8=
|
||||
github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
|
||||
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
|
||||
github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY=
|
||||
github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ=
|
||||
github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI=
|
||||
github.com/shirou/gopsutil v3.21.11+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA=
|
||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||
@@ -125,14 +125,14 @@ github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/yusufpapurcu/wmi v1.2.2 h1:KBNDSne4vP5mbSWnJbO+51IMOXJB67QiYCSBrubbPRg=
|
||||
github.com/yusufpapurcu/wmi v1.2.2/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
|
||||
go.mongodb.org/mongo-driver v1.17.2 h1:gvZyk8352qSfzyZ2UMWcpDpMSGEr1eqE4T793SqyhzM=
|
||||
go.mongodb.org/mongo-driver v1.17.2/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ=
|
||||
go.mongodb.org/mongo-driver v1.17.3 h1:TQyXhnsWfWtgAhMtOgtYHMTkZIfBTpMTsMnd9ZBeHxQ=
|
||||
go.mongodb.org/mongo-driver v1.17.3/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs=
|
||||
golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ=
|
||||
golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
|
||||
golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
|
||||
golang.org/x/exp v0.0.0-20230321023759-10a507213a29 h1:ooxPy7fPvB4kwsA2h+iBNHkAbp/4JxTSwCmvdjEYmug=
|
||||
golang.org/x/exp v0.0.0-20230321023759-10a507213a29/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc=
|
||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
@@ -146,15 +146,15 @@ golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwY
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
|
||||
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||
golang.org/x/net v0.36.0 h1:vWF2fRbw4qslQsQzgFqZff+BItCvGFQqKzKIzx1rmoA=
|
||||
golang.org/x/net v0.36.0/go.mod h1:bFmbeoIPfrw4sMHNhb4J9f6+tPziuGjq7Jk/38fxi1I=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w=
|
||||
golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw=
|
||||
golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@@ -170,18 +170,18 @@ golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
|
||||
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU=
|
||||
golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s=
|
||||
golang.org/x/term v0.30.0 h1:PQ39fJZ+mfadBm0y5WlL4vlM7Sx1Hgf13sMIY2+QS9Y=
|
||||
golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
||||
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
|
||||
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
|
||||
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
|
||||
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
@@ -205,10 +205,10 @@ gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
k8s.io/api v0.32.2 h1:bZrMLEkgizC24G9eViHGOPbW+aRo9duEISRIJKfdJuw=
|
||||
k8s.io/api v0.32.2/go.mod h1:hKlhk4x1sJyYnHENsrdCWw31FEmCijNGPJO5WzHiJ6Y=
|
||||
k8s.io/apimachinery v0.32.2 h1:yoQBR9ZGkA6Rgmhbp/yuT9/g+4lxtsGYwW6dR6BDPLQ=
|
||||
k8s.io/apimachinery v0.32.2/go.mod h1:GpHVgxoKlTxClKcteaeuF1Ul/lDVb74KpZcxcmLDElE=
|
||||
k8s.io/api v0.32.3 h1:Hw7KqxRusq+6QSplE3NYG4MBxZw1BZnq4aP4cJVINls=
|
||||
k8s.io/api v0.32.3/go.mod h1:2wEDTXADtm/HA7CCMD8D8bK4yuBUptzaRhYcYEEYA3k=
|
||||
k8s.io/apimachinery v0.32.3 h1:JmDuDarhDmA/Li7j3aPrwhpNBA94Nvk5zLeOge9HH1U=
|
||||
k8s.io/apimachinery v0.32.3/go.mod h1:GpHVgxoKlTxClKcteaeuF1Ul/lDVb74KpZcxcmLDElE=
|
||||
k8s.io/klog/v2 v2.130.1 h1:n9Xl7H1Xvksem4KFG4PYbdQCQxqc/tTUyrgXaOhHSzk=
|
||||
k8s.io/klog/v2 v2.130.1/go.mod h1:3Jpz1GvMt720eyJH1ckRHK1EDfpxISzJ7I9OYgaDtPE=
|
||||
k8s.io/utils v0.0.0-20241104100929-3ea5e8cea738 h1:M3sRQVHv7vB20Xc2ybTt7ODCeFj6JSWYFzOFnYeS6Ro=
|
||||
|
@@ -16,8 +16,6 @@ sphinx-tabs
|
||||
certifi>=2024.7.4 # not directly required, pinned by Snyk to avoid a vulnerability
|
||||
jinja2>=3.1.6 # not directly required, pinned by Snyk to avoid a vulnerability
|
||||
pygments>=2.15.0 # not directly required, pinned by Snyk to avoid a vulnerability
|
||||
requests>=2.32.2 # not directly required, pinned by Snyk to avoid a vulnerability
|
||||
requests>=2.31.0 # not directly required, pinned by Snyk to avoid a vulnerability
|
||||
setuptools>=70.0.0 # not directly required, pinned by Snyk to avoid a vulnerability
|
||||
idna>=3.7 # not directly required, pinned by Snyk to avoid a vulnerability
|
||||
urllib3>=2.2.2 # not directly required, pinned by Snyk to avoid a vulnerability
|
||||
zipp>=3.19.1 # not directly required, pinned by Snyk to avoid a vulnerability
|
||||
|
@@ -8,103 +8,104 @@ import (
|
||||
)
|
||||
|
||||
// docsExamined is renamed from nscannedObjects in 3.2.0
|
||||
// json tags are used for PMM purposes
|
||||
// https://docs.mongodb.com/manual/reference/database-profiler/#system.profile.docsExamined
|
||||
type SystemProfile struct {
|
||||
AllUsers []interface{} `bson:"allUsers"`
|
||||
Client string `bson:"client"`
|
||||
CursorExhausted bool `bson:"cursorExhausted"`
|
||||
AllUsers []interface{} `bson:"allUsers" json:"allUsers"`
|
||||
Client string `bson:"client" json:"client"`
|
||||
CursorExhausted bool `bson:"cursorExhausted" json:"cursorExhausted"`
|
||||
ExecStats struct {
|
||||
Advanced int `bson:"advanced"`
|
||||
ExecutionTimeMillisEstimate int `bson:"executionTimeMillisEstimate"`
|
||||
Advanced int `bson:"advanced" json:"advanced"`
|
||||
ExecutionTimeMillisEstimate int `bson:"executionTimeMillisEstimate" json:"executionTimeMillisEstimate"`
|
||||
InputStage struct {
|
||||
Advanced int `bson:"advanced"`
|
||||
Direction string `bson:"direction"`
|
||||
DocsExamined int `bson:"docsExamined"`
|
||||
ExecutionTimeMillisEstimate int `bson:"executionTimeMillisEstimate"`
|
||||
Advanced int `bson:"advanced" json:"advanced"`
|
||||
Direction string `bson:"direction" json:"direction"`
|
||||
DocsExamined int `bson:"docsExamined" json:"docsExamined"`
|
||||
ExecutionTimeMillisEstimate int `bson:"executionTimeMillisEstimate" json:"executionTimeMillisEstimate"`
|
||||
Filter struct {
|
||||
Date struct {
|
||||
Eq string `bson:"$eq"`
|
||||
} `bson:"date"`
|
||||
} `bson:"filter"`
|
||||
Invalidates int `bson:"invalidates"`
|
||||
IsEOF int `bson:"isEOF"`
|
||||
NReturned int `bson:"nReturned"`
|
||||
NeedTime int `bson:"needTime"`
|
||||
NeedYield int `bson:"needYield"`
|
||||
RestoreState int `bson:"restoreState"`
|
||||
SaveState int `bson:"saveState"`
|
||||
Stage string `bson:"stage"`
|
||||
Works int `bson:"works"`
|
||||
} `bson:"inputStage"`
|
||||
Invalidates int `bson:"invalidates"`
|
||||
IsEOF int `bson:"isEOF"`
|
||||
LimitAmount int `bson:"limitAmount"`
|
||||
NReturned int `bson:"nReturned"`
|
||||
NeedTime int `bson:"needTime"`
|
||||
NeedYield int `bson:"needYield"`
|
||||
RestoreState int `bson:"restoreState"`
|
||||
SaveState int `bson:"saveState"`
|
||||
Stage string `bson:"stage"`
|
||||
Works int `bson:"works"`
|
||||
DocsExamined int `bson:"docsExamined"`
|
||||
} `bson:"execStats"`
|
||||
KeyUpdates int `bson:"keyUpdates"`
|
||||
KeysExamined int `bson:"keysExamined"`
|
||||
Eq string `bson:"$eq" json:"$eq"`
|
||||
} `bson:"date" json:"date"`
|
||||
} `bson:"filter" json:"filter"`
|
||||
Invalidates int `bson:"invalidates" json:"invalidates"`
|
||||
IsEOF int `bson:"isEOF" json:"isEOF"`
|
||||
NReturned int `bson:"nReturned" json:"nReturned"`
|
||||
NeedTime int `bson:"needTime" json:"needTime"`
|
||||
NeedYield int `bson:"needYield" json:"needYield"`
|
||||
RestoreState int `bson:"restoreState" json:"restoreState"`
|
||||
SaveState int `bson:"saveState" json:"saveState"`
|
||||
Stage string `bson:"stage" json:"stage"`
|
||||
Works int `bson:"works" json:"works"`
|
||||
} `bson:"inputStage" json:"inputStage"`
|
||||
Invalidates int `bson:"invalidates" json:"invalidates"`
|
||||
IsEOF int `bson:"isEOF" json:"isEOF"`
|
||||
LimitAmount int `bson:"limitAmount" json:"limitAmount"`
|
||||
NReturned int `bson:"nReturned" json:"nReturned"`
|
||||
NeedTime int `bson:"needTime" json:"needTime"`
|
||||
NeedYield int `bson:"needYield" json:"needYield"`
|
||||
RestoreState int `bson:"restoreState" json:"restoreState"`
|
||||
SaveState int `bson:"saveState" json:"saveState"`
|
||||
Stage string `bson:"stage" json:"stage"`
|
||||
Works int `bson:"works" json:"works"`
|
||||
DocsExamined int `bson:"docsExamined" json:"docsExamined"`
|
||||
} `bson:"execStats" json:"execStats"`
|
||||
KeyUpdates int `bson:"keyUpdates" json:"keyUpdates"`
|
||||
KeysExamined int `bson:"keysExamined" json:"keysExamined"`
|
||||
Locks struct {
|
||||
Collection struct {
|
||||
AcquireCount struct {
|
||||
Read int `bson:"R"`
|
||||
ReadShared int `bson:"r"`
|
||||
} `bson:"acquireCount"`
|
||||
} `bson:"Collection"`
|
||||
Read int `bson:"R" json:"R"`
|
||||
ReadShared int `bson:"r" json:"r"`
|
||||
} `bson:"acquireCount" json:"acquireCount"`
|
||||
} `bson:"Collection" json:"Collection"`
|
||||
Database struct {
|
||||
AcquireCount struct {
|
||||
ReadShared int `bson:"r"`
|
||||
} `bson:"acquireCount"`
|
||||
ReadShared int `bson:"r" json:"r"`
|
||||
} `bson:"acquireCount" json:"acquireCount"`
|
||||
AcquireWaitCount struct {
|
||||
ReadShared int `bson:"r"`
|
||||
} `bson:"acquireWaitCount"`
|
||||
ReadShared int `bson:"r" json:"r"`
|
||||
} `bson:"acquireWaitCount" json:"acquireWaitCount"`
|
||||
TimeAcquiringMicros struct {
|
||||
ReadShared int64 `bson:"r"`
|
||||
} `bson:"timeAcquiringMicros"`
|
||||
} `bson:"Database"`
|
||||
ReadShared int64 `bson:"r" json:"r"`
|
||||
} `bson:"timeAcquiringMicros" json:"timeAcquiringMicros"`
|
||||
} `bson:"Database" json:"Database"`
|
||||
Global struct {
|
||||
AcquireCount struct {
|
||||
ReadShared int `bson:"r"`
|
||||
WriteShared int `bson:"w"`
|
||||
} `bson:"acquireCount"`
|
||||
} `bson:"Global"`
|
||||
ReadShared int `bson:"r" json:"r"`
|
||||
WriteShared int `bson:"w" json:"w"`
|
||||
} `bson:"acquireCount" json:"acquireCount"`
|
||||
} `bson:"Global" json:"Global"`
|
||||
MMAPV1Journal struct {
|
||||
AcquireCount struct {
|
||||
ReadShared int `bson:"r"`
|
||||
} `bson:"acquireCount"`
|
||||
} `bson:"MMAPV1Journal"`
|
||||
} `bson:"locks"`
|
||||
Millis int `bson:"millis"`
|
||||
Nreturned int `bson:"nreturned"`
|
||||
Ns string `bson:"ns"`
|
||||
NumYield int `bson:"numYield"`
|
||||
Op string `bson:"op"`
|
||||
PlanSummary string `bson:"planSummary"`
|
||||
Protocol string `bson:"protocol"`
|
||||
Query bson.D `bson:"query"`
|
||||
UpdateObj bson.D `bson:"updateobj"`
|
||||
Command bson.D `bson:"command"`
|
||||
OriginatingCommand bson.D `bson:"originatingCommand"`
|
||||
ResponseLength int `bson:"responseLength"`
|
||||
Ts time.Time `bson:"ts"`
|
||||
User string `bson:"user"`
|
||||
WriteConflicts int `bson:"writeConflicts"`
|
||||
DocsExamined int `bson:"docsExamined"`
|
||||
QueryHash string `bson:"queryHash"`
|
||||
ReadShared int `bson:"r" json:"r"`
|
||||
} `bson:"acquireCount" json:"acquireCount"`
|
||||
} `bson:"MMAPV1Journal" json:"MMAPV1Journal"`
|
||||
} `bson:"locks" json:"locks"`
|
||||
Millis int `bson:"millis" json:"durationMillis"`
|
||||
Nreturned int `bson:"nreturned" json:"nreturned"`
|
||||
Ns string `bson:"ns" json:"ns"`
|
||||
NumYield int `bson:"numYield" json:"numYield"`
|
||||
Op string `bson:"op" json:"op"`
|
||||
PlanSummary string `bson:"planSummary" json:"planSummary"`
|
||||
Protocol string `bson:"protocol" json:"protocol"`
|
||||
Query bson.D `bson:"query" json:"query"`
|
||||
UpdateObj bson.D `bson:"updateobj" json:"updateobj"`
|
||||
Command bson.D `bson:"command" json:"command"`
|
||||
OriginatingCommand bson.D `bson:"originatingCommand" json:"originatingCommand"`
|
||||
ResponseLength int `bson:"responseLength" json:"reslen"`
|
||||
Ts time.Time `bson:"ts" json:"ts"`
|
||||
User string `bson:"user" json:"user"`
|
||||
WriteConflicts int `bson:"writeConflicts" json:"writeConflicts"`
|
||||
DocsExamined int `bson:"docsExamined" json:"docsExamined"`
|
||||
QueryHash string `bson:"queryHash" json:"queryHash"`
|
||||
Storage struct {
|
||||
Data struct {
|
||||
BytesRead int64 `bson:"bytesRead"`
|
||||
TimeReadingMicros int64 `bson:"timeReadingMicros"`
|
||||
} `bson:"data"`
|
||||
} `bson:"storage"`
|
||||
AppName string `bson:"appName"`
|
||||
Comments string `bson:"comments"`
|
||||
BytesRead int64 `bson:"bytesRead" json:"bytesRead"`
|
||||
TimeReadingMicros int64 `bson:"timeReadingMicros" json:"timeReadingMicros"`
|
||||
} `bson:"data" json:"data"`
|
||||
} `bson:"storage" json:"storage"`
|
||||
AppName string `bson:"appName" json:"appName"`
|
||||
Comments string `bson:"comments" json:"comments"`
|
||||
}
|
||||
|
||||
func NewExampleQuery(doc SystemProfile) ExampleQuery {
|
||||
|
@@ -15,7 +15,7 @@ Usage
|
||||
|
||||
::
|
||||
|
||||
pt-secure-data [<flags>] <command> [<args> ...]
|
||||
pt-secure-collect [<flags>] <command> [<args> ...]
|
||||
|
||||
By default, :program:`pt-secure-collect` will collect the output of:
|
||||
|
||||
|
@@ -123,11 +123,11 @@ $output = output(
|
||||
);
|
||||
$output = `cat archive.test.table_2`;
|
||||
is($output, <<EOF
|
||||
1, 2, 3, 4
|
||||
2, "\\N", 3, 4
|
||||
3, 2, 3, "\\\t"
|
||||
4, 2, 3, "\\\n"
|
||||
5, 2, 3, "Zapp \\"Brannigan"
|
||||
1,2,3,4
|
||||
2,\\N,3,4
|
||||
3,2,3,"\\\t"
|
||||
4,2,3,"\\\n"
|
||||
5,2,3,"Zapp \\"Brannigan"
|
||||
EOF
|
||||
, '--output-format=csv');
|
||||
`rm -f archive.test.table_2`;
|
||||
|
75
t/pt-archiver/pt-2410.t
Normal file
75
t/pt-archiver/pt-2410.t
Normal file
@@ -0,0 +1,75 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
|
||||
use charnames ':full';
|
||||
|
||||
use PerconaTest;
|
||||
use Sandbox;
|
||||
require "$trunk/bin/pt-archiver";
|
||||
|
||||
my $dp = new DSNParser(opts=>$dsn_opts);
|
||||
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
|
||||
my $dbh = $sb->get_dbh_for('source');
|
||||
|
||||
if ( !$dbh ) {
|
||||
plan skip_all => 'Cannot connect to sandbox source';
|
||||
}
|
||||
|
||||
my $output;
|
||||
my $exit_status;
|
||||
my $cnf = "/tmp/12345/my.sandbox.cnf";
|
||||
my $cmd = "$trunk/bin/pt-archiver";
|
||||
|
||||
$sb->wipe_clean($dbh);
|
||||
$sb->create_dbs($dbh, ['test']);
|
||||
|
||||
$sb->load_file('source', 't/pt-archiver/samples/pt-2410.sql');
|
||||
|
||||
($output, $exit_status) = full_output(
|
||||
sub { pt_archiver::main(
|
||||
qw(--where 1=1 --output-format=csv),
|
||||
'--source', "L=1,D=pt_2410,t=test,F=$cnf",
|
||||
'--file', '/tmp/pt-2410.csv') },
|
||||
);
|
||||
|
||||
is(
|
||||
$exit_status,
|
||||
0,
|
||||
'pt-archiver comleted'
|
||||
);
|
||||
|
||||
$output = `cat /tmp/pt-2410.csv`;
|
||||
like(
|
||||
$output,
|
||||
qr/1,\\N,"testing..."/,
|
||||
'NULL values stored correctly'
|
||||
) or diag($output);
|
||||
|
||||
$dbh->do("load data local infile '/tmp/pt-2410.csv' into table pt_2410.test COLUMNS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"'");
|
||||
|
||||
$output = `/tmp/12345/use pt_2410 -N -e 'SELECT * FROM test'`;
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/1 NULL testing.../,
|
||||
'NULL values loaded correctly'
|
||||
) or diag($output);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
diag(`rm -f /tmp/pt-2410.csv`);
|
||||
$sb->wipe_clean($dbh);
|
||||
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
|
||||
done_testing;
|
||||
exit;
|
10
t/pt-archiver/samples/pt-2410.sql
Normal file
10
t/pt-archiver/samples/pt-2410.sql
Normal file
@@ -0,0 +1,10 @@
|
||||
CREATE DATABASE pt_2410;
|
||||
USE pt_2410;
|
||||
|
||||
CREATE TABLE test(
|
||||
id int not null primary key auto_increment,
|
||||
column1 int default null,
|
||||
column2 varchar(50) not null);
|
||||
|
||||
INSERT INTO test VALUES (null,null,'testing...');
|
||||
INSERT INTO test VALUES (null,null,'testing...');
|
@@ -137,7 +137,7 @@ set_delay();
|
||||
# We need to sleep, otherwise pt-osc can finish before replica is delayed
|
||||
sleep($max_lag);
|
||||
|
||||
my $args = "$source_dsn,D=test,t=pt1717 --execute --chunk-size ${chunk_size} --max-lag $max_lag --alter 'engine=INNODB' --pid $tmp_file_name --progress time,5 --no-drop-new-table --no-drop-triggers --history";
|
||||
my $args = "$source_dsn,D=test,t=pt1717 --execute --chunk-size ${chunk_size} --max-lag $max_lag --alter 'ADD COLUMN foo varchar(32)' --pid $tmp_file_name --progress time,5 --no-drop-new-table --no-drop-triggers --history";
|
||||
|
||||
$output = run_broken_job($args);
|
||||
|
||||
@@ -165,7 +165,7 @@ my @args = (qw(--execute --chunk-size=10 --history));
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=INNODB', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
'--chunk-index=f2'
|
||||
) }
|
||||
);
|
||||
@@ -186,7 +186,7 @@ like(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--max-lag', $max_lag,
|
||||
'--resume', $job_id,
|
||||
'--alter', 'engine=INNODB',
|
||||
'--alter', 'ADD COLUMN foo varchar(32)',
|
||||
'--plugin', "$plugin/pt-1717.pm",
|
||||
),
|
||||
},
|
||||
@@ -208,8 +208,10 @@ ok(
|
||||
'All rows copied correctly'
|
||||
) or diag("New table checksum: '${new_table_checksum}', original content checksum: '${old_table_checksum}'");
|
||||
|
||||
diag(`/tmp/12345/use test -N -e "ALTER TABLE pt1717 DROP COLUMN foo"`);
|
||||
|
||||
# Tests for chunk-index and chunk-index-columns options
|
||||
$args = "$source_dsn,D=test,t=pt1717 --alter engine=innodb --execute --history --chunk-size=10 --no-drop-new-table --no-drop-triggers --reverse-triggers --chunk-index=f2";
|
||||
$args = "$source_dsn,D=test,t=pt1717 --alter 'ADD COLUMN foo varchar(32)' --execute --history --chunk-size=10 --no-drop-new-table --no-drop-triggers --reverse-triggers --chunk-index=f2";
|
||||
|
||||
set_delay();
|
||||
$output = run_broken_job($args);
|
||||
@@ -220,7 +222,7 @@ $job_id = $1;
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=innodb', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
) }
|
||||
);
|
||||
|
||||
@@ -238,7 +240,7 @@ like(
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=innodb', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
'--chunk-index=f1'
|
||||
) }
|
||||
);
|
||||
@@ -257,7 +259,7 @@ like(
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=innodb', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
'--chunk-index=f2', '--chunk-index-columns=1'
|
||||
) }
|
||||
);
|
||||
@@ -288,7 +290,7 @@ is(
|
||||
$output + 0,
|
||||
3,
|
||||
'Triggers were not dropped'
|
||||
);
|
||||
) or diag($output);
|
||||
|
||||
$output = `/tmp/12345/use -N -e "select count(*) from information_schema.triggers where TRIGGER_SCHEMA='test' AND EVENT_OBJECT_TABLE like '%pt1717%_new' AND trigger_name LIKE 'rt_%'"`;
|
||||
|
||||
@@ -300,7 +302,7 @@ is(
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=innodb', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
'--chunk-size=4',
|
||||
'--chunk-index=f2'
|
||||
) }
|
||||
@@ -348,7 +350,7 @@ ok(
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=innodb', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
'--chunk-size=4',
|
||||
'--chunk-index=f2'
|
||||
) }
|
||||
@@ -372,7 +374,7 @@ $output =~ /New table `test`.`([_]+pt1717_new)` not found, restart operation fro
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=innodb', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
'--chunk-size=4',
|
||||
'--chunk-index=f2'
|
||||
) }
|
||||
|
239
t/pt-online-schema-change/pt-2355.t
Normal file
239
t/pt-online-schema-change/pt-2355.t
Normal file
@@ -0,0 +1,239 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use threads;
|
||||
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
|
||||
use Data::Dumper;
|
||||
use PerconaTest;
|
||||
use Sandbox;
|
||||
use SqlModes;
|
||||
use File::Temp qw/ tempdir tempfile /;
|
||||
|
||||
our $delay = 10;
|
||||
my $max_lag = $delay / 2;
|
||||
my $output;
|
||||
my $exit;
|
||||
|
||||
my $tmp_file = File::Temp->new();
|
||||
my $tmp_file_name = $tmp_file->filename;
|
||||
unlink $tmp_file_name;
|
||||
|
||||
require "$trunk/bin/pt-online-schema-change";
|
||||
|
||||
my $dp = new DSNParser(opts=>$dsn_opts);
|
||||
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
|
||||
if ($sb->is_cluster_mode) {
|
||||
plan skip_all => 'Not for PXC';
|
||||
}
|
||||
|
||||
my $source_dbh = $sb->get_dbh_for('source');
|
||||
my $replica_dbh1 = $sb->get_dbh_for('replica1');
|
||||
my $replica_dbh2 = $sb->get_dbh_for('replica2');
|
||||
my $source_dsn = 'h=127.0.0.1,P=12345,u=msandbox,p=msandbox';
|
||||
my $replica_dsn1 = 'h=127.0.0.1,P=12346,u=msandbox,p=msandbox';
|
||||
my $replica_dsn2 = 'h=127.0.0.1,P=12347,u=msandbox,p=msandbox';
|
||||
my $sample = "t/pt-online-schema-change/samples";
|
||||
my $plugin = "$trunk/$sample/plugins";
|
||||
|
||||
# We need sync_relay_log=1 to keep changes after replica restart
|
||||
my $cnf = '/tmp/12347/my.sandbox.cnf';
|
||||
diag(`cp $cnf $cnf.bak`);
|
||||
diag(`echo "[mysqld]" > /tmp/12347/my.sandbox.2.cnf`);
|
||||
diag(`echo "sync_relay_log=1" >> /tmp/12347/my.sandbox.2.cnf`);
|
||||
diag(`echo "sync_relay_log_info=1" >> /tmp/12347/my.sandbox.2.cnf`);
|
||||
diag(`echo "relay_log_recovery=1" >> /tmp/12347/my.sandbox.2.cnf`);
|
||||
diag(`echo "!include /tmp/12347/my.sandbox.2.cnf" >> $cnf`);
|
||||
diag(`/tmp/12347/stop >/dev/null`);
|
||||
sleep 1;
|
||||
diag(`/tmp/12347/start >/dev/null`);
|
||||
|
||||
sub reset_query_cache {
|
||||
my @dbhs = @_;
|
||||
return if ($sandbox_version ge '8.0');
|
||||
foreach my $dbh (@dbhs) {
|
||||
$dbh->do('RESET QUERY CACHE');
|
||||
}
|
||||
}
|
||||
|
||||
sub run_broken_job {
|
||||
my ($args) = @_;
|
||||
my ($fh, $filename) = tempfile();
|
||||
my $pid = fork();
|
||||
|
||||
if (!$pid) {
|
||||
open(STDERR, '>', $filename);
|
||||
open(STDOUT, '>', $filename);
|
||||
exec("$trunk/bin/pt-online-schema-change $args");
|
||||
}
|
||||
|
||||
sleep($max_lag + $max_lag/2);
|
||||
# stop replica 12347
|
||||
diag(`/tmp/12347/stop >/dev/null`);
|
||||
sleep 1;
|
||||
|
||||
waitpid($pid, 0);
|
||||
my $output = do {
|
||||
local $/ = undef;
|
||||
<$fh>;
|
||||
};
|
||||
|
||||
return $output;
|
||||
}
|
||||
|
||||
sub set_delay {
|
||||
$sb->wait_for_replicas();
|
||||
|
||||
diag("Setting replica delay to $delay seconds");
|
||||
diag(`/tmp/12345/use -N test -e "DROP TABLE IF EXISTS pt1717_back"`);
|
||||
|
||||
$replica_dbh1->do("STOP ${replica_name}");
|
||||
$replica_dbh1->do("CHANGE ${source_change} TO ${source_name}_DELAY=$delay");
|
||||
$replica_dbh1->do("START ${replica_name}");
|
||||
|
||||
# Run a full table scan query to ensure the replica is behind the source
|
||||
# There is no query cache in MySQL 8.0+
|
||||
reset_query_cache($source_dbh, $source_dbh);
|
||||
# Update one row so replica is delayed
|
||||
$source_dbh->do('UPDATE `test`.`pt1717` SET f2 = f2 + 1 LIMIT 1');
|
||||
$source_dbh->do('UPDATE `test`.`pt1717` SET f2 = f2 + 1 WHERE f1 = ""');
|
||||
|
||||
# Creating copy of table pt1717, so we can compare data later
|
||||
diag(`/tmp/12345/use -N test -e "CREATE TABLE pt1717_back like pt1717"`);
|
||||
diag(`/tmp/12345/use -N test -e "INSERT INTO pt1717_back SELECT * FROM pt1717"`);
|
||||
}
|
||||
|
||||
# 1) Set the replica delay to 0 just in case we are re-running the tests without restarting the sandbox.
|
||||
# 2) Load sample data
|
||||
# 3) Set the replica delay to 30 seconds to be able to see the 'waiting' message.
|
||||
diag("Setting replica delay to 0 seconds");
|
||||
$replica_dbh1->do("STOP ${replica_name}");
|
||||
$source_dbh->do("RESET ${source_reset}");
|
||||
$replica_dbh1->do("RESET ${replica_name}");
|
||||
$replica_dbh1->do("START ${replica_name}");
|
||||
|
||||
diag('Loading test data');
|
||||
$sb->load_file('source', "t/pt-online-schema-change/samples/pt-1717.sql");
|
||||
|
||||
# Should be greater than chunk-size and big enough, so pt-osc will wait for delay
|
||||
my $num_rows = 5000;
|
||||
my $chunk_size = 10;
|
||||
diag("Loading $num_rows into the table. This might take some time.");
|
||||
diag(`util/mysql_random_data_load --host=127.0.0.1 --port=12345 --user=msandbox --password=msandbox test pt1717 $num_rows`);
|
||||
|
||||
diag("Starting tests...");
|
||||
|
||||
set_delay();
|
||||
|
||||
# We need to sleep, otherwise pt-osc can finish before replica is delayed
|
||||
sleep($max_lag);
|
||||
|
||||
my $args = "$source_dsn,D=test,t=pt1717 --execute --chunk-size ${chunk_size} --max-lag $max_lag --alter 'ADD INDEX idx1(f1)' --pid $tmp_file_name --progress time,5 --no-drop-new-table --no-drop-triggers --history";
|
||||
|
||||
$output = run_broken_job($args);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/`test`.`pt1717` was not altered/s,
|
||||
"pt-osc stopped with error as expected",
|
||||
) or diag($output);
|
||||
|
||||
diag(`/tmp/12347/start >/dev/null`);
|
||||
$sb->wait_for_replicas();
|
||||
|
||||
$output = `/tmp/12345/use -N -e "select job_id, upper_boundary from percona.pt_osc_history"`;
|
||||
my ($job_id, $upper_boundary) = split(/\s+/, $output);
|
||||
|
||||
my $copied_rows = `/tmp/12345/use -N -e "select count(*) from test._pt1717_new"`;
|
||||
chomp($copied_rows);
|
||||
|
||||
ok(
|
||||
$copied_rows eq $upper_boundary,
|
||||
'Upper chunk boundary stored correctly'
|
||||
) or diag("Copied_rows: ${copied_rows}, upper boundary: ${upper_boundary}");;
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main("$source_dsn,D=test,t=pt1717",
|
||||
"--execute", "--chunk-size=${chunk_size}", "--max-lag=${max_lag}",
|
||||
"--alter=ADD INDEX idx1(f1)",
|
||||
"--resume=${job_id}",
|
||||
) }
|
||||
);
|
||||
|
||||
is(
|
||||
$exit,
|
||||
0,
|
||||
'pt-osc works correctly with --resume'
|
||||
) or diag($exit);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Successfully altered/,
|
||||
'Success message printed'
|
||||
) or diag($output);
|
||||
|
||||
# Corrupting job record, so we can test error message
|
||||
diag(`/tmp/12345/use -N -e "update percona.pt_osc_history set new_table_name=NULL where job_id=${job_id}"`);
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main("$source_dsn,D=test,t=pt1717",
|
||||
"--execute", "--chunk-size=${chunk_size}", "--max-lag=${max_lag}",
|
||||
"--alter=ADD INDEX idx1(f1)",
|
||||
"--resume=${job_id}",
|
||||
) }
|
||||
);
|
||||
|
||||
is(
|
||||
$exit,
|
||||
17,
|
||||
'pt-osc works correctly fails with empty boundaries'
|
||||
) or diag($exit);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Option --resume refers job \d+ with empty boundaries. Exiting./,
|
||||
'Correct error message printed'
|
||||
) or diag($output);
|
||||
|
||||
unlike(
|
||||
$output,
|
||||
qr/Option --resume refers non-existing job ID: \d+. Exiting./,
|
||||
'Misleading error message not printed'
|
||||
) or diag($output);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
diag("Cleaning");
|
||||
$replica_dbh2 = $sb->get_dbh_for('replica2');
|
||||
diag("Setting replica delay to 0 seconds");
|
||||
$replica_dbh1->do("STOP ${replica_name}");
|
||||
$replica_dbh2->do("STOP ${replica_name}");
|
||||
$source_dbh->do("RESET ${source_reset}");
|
||||
$replica_dbh1->do("RESET ${source_reset}");
|
||||
$replica_dbh1->do("RESET ${replica_name}");
|
||||
$replica_dbh2->do("RESET ${replica_name}");
|
||||
$replica_dbh1->do("START ${replica_name}");
|
||||
$replica_dbh2->do("START ${replica_name}");
|
||||
|
||||
diag(`mv $cnf.bak $cnf`);
|
||||
|
||||
diag(`/tmp/12347/stop >/dev/null`);
|
||||
diag(`/tmp/12347/start >/dev/null`);
|
||||
|
||||
diag("Dropping test database");
|
||||
$source_dbh->do("DROP DATABASE IF EXISTS test");
|
||||
$sb->wait_for_replicas();
|
||||
|
||||
$sb->wipe_clean($source_dbh);
|
||||
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
|
||||
done_testing;
|
71
t/pt-online-schema-change/pt-2407.t
Normal file
71
t/pt-online-schema-change/pt-2407.t
Normal file
@@ -0,0 +1,71 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
|
||||
use PerconaTest;
|
||||
use Sandbox;
|
||||
require "$trunk/bin/pt-online-schema-change";
|
||||
require VersionParser;
|
||||
|
||||
use Data::Dumper;
|
||||
|
||||
my $dp = new DSNParser(opts=>$dsn_opts);
|
||||
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
|
||||
my $source_dbh = $sb->get_dbh_for('source');
|
||||
my $replica_dbh = $sb->get_dbh_for('replica1');
|
||||
|
||||
if ( !$source_dbh ) {
|
||||
plan skip_all => 'Cannot connect to sandbox source';
|
||||
}
|
||||
elsif ( !$replica_dbh ) {
|
||||
plan skip_all => 'Cannot connect to sandbox replica';
|
||||
}
|
||||
|
||||
my @args = qw(--set-vars innodb_lock_wait_timeout=3);
|
||||
my $output = "";
|
||||
my $dsn = "h=127.1,P=12345,u=msandbox,p=msandbox";
|
||||
my $exit = 0;
|
||||
my $sample = "t/pt-online-schema-change/samples";
|
||||
|
||||
$sb->load_file('source', "$sample/pt-2407.sql");
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$dsn,D=pt_2407,t=t1",
|
||||
'--alter', 'alter table t1 ADD COLUMN payout_group_id VARCHAR(255) DEFAULT NULL, ALGORITHM=INSTANT;', '--execute') }
|
||||
);
|
||||
|
||||
is(
|
||||
$exit,
|
||||
11,
|
||||
'Return code non-zero for failed operation'
|
||||
) or diag($exit);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/You have an error in your SQL syntax/,
|
||||
'Job failed due to SQL syntax error'
|
||||
) or diag($output);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Error altering new table/,
|
||||
'Error altering new table message printed'
|
||||
) or diag($output);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
|
||||
$sb->wipe_clean($source_dbh);
|
||||
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
|
||||
#
|
||||
done_testing;
|
120
t/pt-online-schema-change/pt-2422.t
Normal file
120
t/pt-online-schema-change/pt-2422.t
Normal file
@@ -0,0 +1,120 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
|
||||
use PerconaTest;
|
||||
use Sandbox;
|
||||
require "$trunk/bin/pt-online-schema-change";
|
||||
require VersionParser;
|
||||
|
||||
use Data::Dumper;
|
||||
|
||||
my $dp = new DSNParser(opts=>$dsn_opts);
|
||||
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
|
||||
my $source_dbh = $sb->get_dbh_for('source');
|
||||
my $replica_dbh = $sb->get_dbh_for('replica1');
|
||||
|
||||
if ( !$source_dbh ) {
|
||||
plan skip_all => 'Cannot connect to sandbox source';
|
||||
}
|
||||
elsif ( !$replica_dbh ) {
|
||||
plan skip_all => 'Cannot connect to sandbox replica';
|
||||
}
|
||||
|
||||
my @args = qw(--set-vars innodb_lock_wait_timeout=3);
|
||||
my $output = "";
|
||||
my $dsn = "h=127.1,P=12345,u=msandbox,p=msandbox";
|
||||
my $exit = 0;
|
||||
my $sample = "t/pt-online-schema-change/samples";
|
||||
|
||||
$sb->load_file('source', "$sample/basic_no_fks_innodb.sql");
|
||||
$source_dbh->do('CREATE TABLE pt_osc.pt_2422 LIKE pt_osc.t');
|
||||
$source_dbh->do('INSERT INTO pt_osc.pt_2422 SELECT * FROM pt_osc.t');
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$dsn,D=pt_osc,t=t",
|
||||
'--alter', 'engine=innodb', '--execute', '--history') }
|
||||
);
|
||||
|
||||
is(
|
||||
$exit,
|
||||
0,
|
||||
'basic test with option --history finished OK'
|
||||
) or diag($output);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Job \d started/,
|
||||
'Job id printed in the beginning of the tool output'
|
||||
);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Job \d finished successfully/,
|
||||
'Job id printed for successful copy'
|
||||
);
|
||||
|
||||
$output = `/tmp/12345/use -N -e "SELECT new_table_name FROM percona.pt_osc_history WHERE job_id=1"`;
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/_t_new/,
|
||||
'Correct new table name inserted'
|
||||
) or diag($output);
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$dsn,D=pt_osc,t=pt_2422",
|
||||
'--alter', 'engine=innodb', '--execute', '--history') }
|
||||
);
|
||||
|
||||
is(
|
||||
$exit,
|
||||
0,
|
||||
'basic test with second table and option --history finished OK'
|
||||
) or diag($output);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Job \d started/,
|
||||
'Job id printed in the beginning of the tool output for the second table'
|
||||
);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Job \d finished successfully/,
|
||||
'Job id printed for successful copy of the second table'
|
||||
);
|
||||
|
||||
$output = `/tmp/12345/use -N -e "SELECT new_table_name FROM percona.pt_osc_history WHERE job_id=1"`;
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/_t_new/,
|
||||
'New table name for previouse job was not updated'
|
||||
) or diag($output);
|
||||
|
||||
$output = `/tmp/12345/use -N -e "SELECT new_table_name FROM percona.pt_osc_history WHERE job_id=2"`;
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/_pt_2422_new/,
|
||||
'Correct new table name inserted for the second table'
|
||||
) or diag($output);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
|
||||
$sb->wipe_clean($source_dbh);
|
||||
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
|
||||
#
|
||||
done_testing;
|
12
t/pt-online-schema-change/samples/pt-2407.sql
Normal file
12
t/pt-online-schema-change/samples/pt-2407.sql
Normal file
@@ -0,0 +1,12 @@
|
||||
CREATE DATABASE pt_2407;
|
||||
|
||||
USE pt_2407;
|
||||
|
||||
CREATE TABLE t1 (
|
||||
c1 int NOT NULL,
|
||||
c2 varchar(100) NOT NULL,
|
||||
PRIMARY KEY (c1),
|
||||
KEY idx (c2)
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
INSERT INTO t1 VALUES(1,1),(2,2),(3,3),(4,4),(5,5);
|
Reference in New Issue
Block a user