Compare commits

..

2 Commits

Author SHA1 Message Date
Carlos Salguero
3ebe939ce7 Fixed small error displaying the advices 2018-10-26 16:08:59 -03:00
Carlos Salguero
b33e63cdf2 Replaced HTTP::Micro by HTTP::Tiny 2018-10-26 16:02:34 -03:00
78 changed files with 23819 additions and 11712 deletions

View File

@@ -3,7 +3,6 @@ language: go
go:
- 1.9.x
- 1.10.x
- 1.12.x
services:
- docker
@@ -32,7 +31,7 @@ before_script:
- dep ensure
script:
- go test -timeout 20m ./src/...
- go test -timeout 1m ./src/...
allow_failures:
- tip

View File

@@ -1,19 +1,5 @@
Changelog for Percona Toolkit
* Fixed bug PT-1633: Fix incorrect parsing of a variable with number + K,M,G,T (Thanks Dieter Adriaenssens)
v3.0.13 released 2018-12-28
* Fixed bug PT-1673: Fix pt-show-grants for MariaDB 10+ (thanks Tim Birkett)
* Fixed bug PT-1638: pt-online-schema-change not working with MariaDB 10.x
* Improvement PT-1637: Added --fail-on stopped-replication param to table checksum
* Fixed bug PT-1616: pt-table-checksum fails to --resume on certain binary strings
* Fixed bug PT-1573: pt-query-digest log_timestamps = SYSTEM # No events processed
* Improvement PT-1340: pt-stalk should not call mysqladmin debug by default
* Fixed bug PT-157: Specifying the index to use for pt-archiver ignores --primary-key-only
v3.0.12 released 2018-09-13
* Fixed bug PT-1611: pt-archiver fails with UTF-8 chars
* Fixed bug PT-1574: pt-online-schema-change fails on UK and NULLs
* Fixed bug PT-1572: Better usage of ENUM fields in keys in NibbleIterator

282
Gopkg.lock generated
View File

@@ -2,206 +2,126 @@
[[projects]]
digest = "1:b856d8248663c39265a764561c1a1a149783f6cc815feb54a1f3a591b91f6eca"
name = "github.com/Masterminds/semver"
packages = ["."]
pruneopts = ""
revision = "c7af12943936e8c39859482e61f0574c2fd7fc75"
version = "v1.4.2"
[[projects]]
digest = "1:f82b8ac36058904227087141017bb82f4b0fc58272990a4cdae3e2d6d222644e"
name = "github.com/StackExchange/wmi"
packages = ["."]
pruneopts = ""
revision = "5d049714c4a64225c3c79a7cf7d02f7fb5b96338"
version = "1.0.0"
[[projects]]
digest = "1:15d017551627c8bb091bde628215b2861bed128855343fdd570c62d08871f6e1"
name = "github.com/alecthomas/kingpin"
packages = ["."]
pruneopts = ""
revision = "947dcec5ba9c011838740e680966fd7087a71d0d"
version = "v2.2.6"
[[projects]]
branch = "master"
digest = "1:a74730e052a45a3fab1d310fdef2ec17ae3d6af16228421e238320846f2aaec8"
name = "github.com/alecthomas/template"
packages = [
".",
"parse",
"parse"
]
pruneopts = ""
revision = "a0175ee3bccc567396460bf5acd36800cb10c49c"
[[projects]]
branch = "master"
digest = "1:8483994d21404c8a1d489f6be756e25bfccd3b45d65821f25695577791a08e68"
name = "github.com/alecthomas/units"
packages = ["."]
pruneopts = ""
revision = "2efee857e7cfd4f3d0138cc3cbb1b4966962b93a"
[[projects]]
digest = "1:03edf882162b807cdf1bc558c66226167fa2f8eb44359eac2eeb3794a91cb168"
branch = "master"
name = "github.com/bradfitz/slice"
packages = ["."]
revision = "d9036e2120b5ddfa53f3ebccd618c4af275f47da"
[[projects]]
name = "github.com/go-ini/ini"
packages = ["."]
pruneopts = ""
revision = "c85607071cf08ca1adaf48319cd1aa322e81d8c1"
version = "v1.42.0"
revision = "ace140f73450505f33e8b8418216792275ae82a7"
version = "v1.35.0"
[[projects]]
digest = "1:fd2ee29b7807f198e72dbd6371267b34d05aa83151c8c81b2ade14854e50f4ee"
name = "github.com/go-logr/logr"
packages = ["."]
pruneopts = ""
revision = "d18fcbf02861580d05a1f23601145b272c4e7b4b"
version = "v0.2.0"
[[projects]]
digest = "1:b6581f9180e0f2d5549280d71819ab951db9d511478c87daca95669589d505c0"
name = "github.com/go-ole/go-ole"
packages = [
".",
"oleutil",
"oleutil"
]
pruneopts = ""
revision = "97b6244175ae18ea6eef668034fd6565847501c9"
version = "v1.2.4"
revision = "a41e3c4b706f6ae8dfbff342b06e40fa4d2d0506"
version = "v1.2.1"
[[projects]]
digest = "1:d69d2ba23955582a64e367ff2b0808cdbd048458c178cea48f11ab8c40bd7aea"
name = "github.com/gogo/protobuf"
packages = [
"proto",
"sortkeys",
]
pruneopts = ""
revision = "5628607bb4c51c3157aacc3a50f0ab707582b805"
version = "v1.3.1"
[[projects]]
digest = "1:530233672f656641b365f8efb38ed9fba80e420baff2ce87633813ab3755ed6d"
name = "github.com/golang/mock"
packages = ["gomock"]
pruneopts = ""
revision = "51421b967af1f557f93a59e0057aaf15ca02e29c"
version = "v1.2.0"
[[projects]]
digest = "1:16ecf9e89b8b1310d9566a53484c31c5241bb47c32162eba780b46c0dfb58fef"
name = "github.com/google/gofuzz"
packages = ["."]
pruneopts = ""
revision = "db92cf7ae75e4a7a28abc005addab2b394362888"
version = "v1.1.0"
revision = "c34cdb4725f4c3844d095133c6e40e448b86589b"
version = "v1.1.1"
[[projects]]
branch = "master"
digest = "1:b759103c9b4135568253c17d2866064cde398e93764b611caabf5aa8e3059685"
name = "github.com/hashicorp/go-version"
packages = ["."]
pruneopts = ""
revision = "d40cf49b3a77bba84a7afdbd7f1dc295d114efb1"
revision = "23480c0665776210b5fbbac6eaaee40e3e6a96b7"
[[projects]]
branch = "master"
digest = "1:f81c8d7354cc0c6340f2f7a48724ee6c2b3db3e918ecd441c985b4d2d97dd3e7"
name = "github.com/howeyc/gopass"
packages = ["."]
pruneopts = ""
revision = "bf9dde6d0d2c004a008c27aaee91170c786f6db8"
[[projects]]
digest = "1:0f51cee70b0d254dbc93c22666ea2abf211af81c1701a96d04e2284b408621db"
name = "github.com/konsorten/go-windows-terminal-sequences"
packages = ["."]
pruneopts = ""
revision = "f55edac94c9bbba5d6182a4be46d86a2c9b5b50e"
version = "v1.0.2"
[[projects]]
digest = "1:3108ec0946181c60040ff51b811908f89d03e521e2b4ade5ef5c65b3c0e911ae"
name = "github.com/kr/pretty"
packages = ["."]
pruneopts = ""
revision = "73f6ac0b30a98e433b289500d779f50c1a6f0712"
version = "v0.1.0"
[[projects]]
digest = "1:11b056b4421396ab14e384ab8ab8c2079b03f1e51aa5eb4d9b81f9e0d1aa8fbf"
name = "github.com/kr/text"
packages = ["."]
pruneopts = ""
revision = "e2ffdb16a802fe2bb95e2e35ff34f0e53aeef34f"
version = "v0.1.0"
[[projects]]
digest = "1:0093a7c66d5b9e0cdaf4be5c20e0a9b889d1d839148eeed1d587e99b4cfd90ff"
name = "github.com/mattn/go-shellwords"
packages = ["."]
pruneopts = ""
revision = "a72fbe27a1b0ed0df2f02754945044ce1456608b"
version = "v1.0.5"
revision = "02e3cf038dcea8290e44424da473dd12be796a8a"
version = "v1.0.3"
[[projects]]
digest = "1:a067513044dc491395a58f56f39cedddb5ad35789b832b570c283a64d712f81b"
name = "github.com/montanaflynn/stats"
packages = ["."]
pruneopts = ""
revision = "eeaced052adbcfeea372c749c281099ed7fdaa38"
version = "0.2.0"
[[projects]]
branch = "master"
digest = "1:020f67c818cb9c3fdc77d92c5744fb2d5b90930280cc43311ba43c6459fd0b98"
name = "github.com/pborman/getopt"
packages = [
".",
"v2",
"v2"
]
pruneopts = ""
revision = "fd6d657c3083960b8d604310c34a621ec24bdc6a"
revision = "7148bc3a4c3008adfcab60cbebfd0576018f330b"
[[projects]]
branch = "master"
digest = "1:7a840dbacabd648e5b511010dea5da9eed99030dd185b3c7c7195fdadb3051a8"
name = "github.com/percona/go-mysql"
packages = ["query"]
pruneopts = ""
revision = "f5cfaf6a5e55b754b7b106f4488e1bc24cb8c2d6"
revision = "82ed67a1d0f1779cd60a025c54e0827da0c0838b"
[[projects]]
digest = "1:16b4510ba61ab0bb7a4e694ea6396a7b2879f5fabb21e93066e182691f790173"
name = "github.com/percona/pmgo"
packages = [
".",
"pmgomock",
"pmgomock"
]
pruneopts = ""
revision = "497d06e28f910fbe26d5d60f59d36284a6901c6f"
version = "0.5.2"
[[projects]]
digest = "1:1d7e1867c49a6dd9856598ef7c3123604ea3daabf5b83f303ff457bcbc410b1d"
name = "github.com/pkg/errors"
packages = ["."]
pruneopts = ""
revision = "ba968bfe8b2f7e042a574c888954fccecfa385b4"
version = "v0.8.1"
revision = "645ef00459ed84a119197bfb8d8205042c6df63d"
version = "v0.8.0"
[[projects]]
digest = "1:7f569d906bdd20d906b606415b7d794f798f91a62fcfb6a4daa6d50690fb7a3f"
name = "github.com/satori/go.uuid"
packages = ["."]
pruneopts = ""
revision = "f58768cc1a7a7e77a3bd49e98cdd21419399b6a3"
version = "v1.2.0"
[[projects]]
digest = "1:d77a85cf43b70ae61fa2543d402d782b40dca0f5f41413839b5f916782b0fab9"
name = "github.com/shirou/gopsutil"
packages = [
"cpu",
@@ -209,97 +129,52 @@
"internal/common",
"mem",
"net",
"process",
"process"
]
pruneopts = ""
revision = "6c6abd6d1666d6b27f1c261e0f850441ba22aa3a"
version = "v2.19.02"
revision = "fc04d2dd9a512906a2604242b35275179e250eda"
version = "v2.18.03"
[[projects]]
branch = "master"
digest = "1:99c6a6dab47067c9b898e8c8b13d130c6ab4ffbcc4b7cc6236c2cd0b1e344f5b"
name = "github.com/shirou/w32"
packages = ["."]
pruneopts = ""
revision = "bb4de0191aa41b5507caa14b0650cdbddcd9280b"
[[projects]]
digest = "1:b73fe282e350b3ef2c71d8ff08e929e0b9670b1bb5b7fde1d3c1b4cd6e6dc8b1"
name = "github.com/sirupsen/logrus"
packages = ["."]
pruneopts = ""
revision = "dae0fa8d5b0c810a8ab733fbd5510c7cae84eca4"
version = "v1.4.0"
revision = "d682213848ed68c0a260ca37d6dd5ace8423f5ba"
version = "v1.0.4"
[[projects]]
branch = "master"
name = "go4.org"
packages = ["reflectutil"]
revision = "9599cf28b011184741f249bd9f9330756b506cbc"
[[projects]]
branch = "master"
digest = "1:36ef1d8645934b1744cc7d8726e00d3dd9d8d84c18617bf7367a3a6d532f3370"
name = "golang.org/x/crypto"
packages = ["ssh/terminal"]
pruneopts = ""
revision = "a5d413f7728c81fb97d96a2b722368945f651e78"
revision = "d6449816ce06963d9d136eee5a56fca5b0616e7e"
[[projects]]
branch = "master"
digest = "1:adcb9e84ce154ef1d45851b57c40f8a211db3e36373a65b7c4f10c79b7428718"
name = "golang.org/x/net"
packages = [
"context",
"http/httpguts",
"http2",
"http2/hpack",
"idna",
]
pruneopts = ""
revision = "74de082e2cca95839e88aa0aeee5aadf6ce7710f"
packages = ["context"]
revision = "d41e8174641f662c5a2d1c7a5f9e828788eb8706"
[[projects]]
branch = "master"
digest = "1:1b0de777d8ddd63356d5a4d76799ea8f47e811aa9dda85ddc72b2a061c799cc9"
name = "golang.org/x/sys"
packages = [
"unix",
"windows",
"windows"
]
pruneopts = ""
revision = "9eb1bfa1ce65ae8a6ff3114b0aaf9a41a6cf3560"
[[projects]]
digest = "1:fccda34e4c58111b1908d8d69bf8d57c41c8e2542bc18ec8cd38c4fa21057f71"
name = "golang.org/x/text"
packages = [
"collate",
"collate/build",
"internal/colltab",
"internal/gen",
"internal/language",
"internal/language/compact",
"internal/tag",
"internal/triegen",
"internal/ucd",
"language",
"secure/bidirule",
"transform",
"unicode/bidi",
"unicode/cldr",
"unicode/norm",
"unicode/rangetable",
]
pruneopts = ""
revision = "23ae387dee1f90d29a23c0e87ee0b46038fbed0e"
version = "v0.3.3"
[[projects]]
digest = "1:75fb3fcfc73a8c723efde7777b40e8e8ff9babf30d8c56160d01beffea8a95a6"
name = "gopkg.in/inf.v0"
packages = ["."]
pruneopts = ""
revision = "d2d2541c53f18d2a059457998ce2876cc8e67cbf"
version = "v0.9.1"
revision = "3ccc7e5779793fd54564baf60c51bf017955e0ba"
[[projects]]
branch = "v2"
digest = "1:f54ba71a035aac92ced3e902d2bff3734a15d1891daff73ec0f90ef236750139"
name = "gopkg.in/mgo.v2"
packages = [
".",
@@ -307,92 +182,19 @@
"dbtest",
"internal/json",
"internal/sasl",
"internal/scram",
"internal/scram"
]
pruneopts = ""
revision = "9856a29383ce1c59f308dd1cf0363a79b5bef6b5"
revision = "3f83fa5005286a7fe593b055f0d7771a7dce4655"
[[projects]]
branch = "v2"
digest = "1:61a650a53e5e865a91ae9581f02990a4b6e3afcb8d280f19b1e67a3c284944e6"
name = "gopkg.in/tomb.v2"
packages = ["."]
pruneopts = ""
revision = "d5d1b5820637886def9eef33e03a27a9f166942c"
[[projects]]
digest = "1:a249e341b9bf261a982ab262c69f08223e839302d0a21cfe6e00f2ef2e8695a2"
name = "k8s.io/api"
packages = ["core/v1"]
pruneopts = ""
revision = "f822fed505d4c9dd4eb2c5f4ca2f4c49c19ea394"
version = "v0.18.6"
[[projects]]
digest = "1:74eeecf1188777314a92348555adcb977912d530269130143daf7fc0e80bb512"
name = "k8s.io/apimachinery"
packages = [
"pkg/api/resource",
"pkg/apis/meta/v1",
"pkg/conversion",
"pkg/conversion/queryparams",
"pkg/fields",
"pkg/labels",
"pkg/runtime",
"pkg/runtime/schema",
"pkg/selection",
"pkg/types",
"pkg/util/errors",
"pkg/util/intstr",
"pkg/util/json",
"pkg/util/naming",
"pkg/util/net",
"pkg/util/runtime",
"pkg/util/sets",
"pkg/util/validation",
"pkg/util/validation/field",
"pkg/watch",
"third_party/forked/golang/reflect",
]
pruneopts = ""
revision = "fbe88689c3c2735e949f67884a4f58cb99379159"
version = "v0.17.9"
[[projects]]
digest = "1:5ad0a3bf1b13f9b8bd99f4079c635cb813d87b70db65b98fe5503762e1d39735"
name = "k8s.io/klog"
packages = ["."]
pruneopts = ""
revision = "b5c3182dac44f851522e32c97c86ac32755c296d"
version = "v2.3.0"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
input-imports = [
"github.com/Masterminds/semver",
"github.com/alecthomas/kingpin",
"github.com/go-ini/ini",
"github.com/golang/mock/gomock",
"github.com/hashicorp/go-version",
"github.com/howeyc/gopass",
"github.com/kr/pretty",
"github.com/mattn/go-shellwords",
"github.com/montanaflynn/stats",
"github.com/pborman/getopt",
"github.com/pborman/getopt/v2",
"github.com/percona/go-mysql/query",
"github.com/percona/pmgo",
"github.com/percona/pmgo/pmgomock",
"github.com/pkg/errors",
"github.com/satori/go.uuid",
"github.com/shirou/gopsutil/process",
"github.com/sirupsen/logrus",
"golang.org/x/crypto/ssh/terminal",
"gopkg.in/mgo.v2",
"gopkg.in/mgo.v2/bson",
"gopkg.in/mgo.v2/dbtest",
"k8s.io/api/core/v1",
]
inputs-digest = "6e2c1fd110c892297e79498479e87f7d4c63755a5a1d63d13c82eb42a7e5fbf2"
solver-name = "gps-cdcl"
solver-version = 1

View File

@@ -24,6 +24,10 @@
name = "github.com/Masterminds/semver"
version = "1.4.0"
[[constraint]]
branch = "master"
name = "github.com/bradfitz/slice"
[[constraint]]
name = "github.com/golang/mock"
version = "1.0.0"

View File

@@ -2,7 +2,7 @@ use ExtUtils::MakeMaker;
WriteMakefile(
NAME => 'percona-toolkit',
VERSION => '3.0.13',
VERSION => '3.0.12',
EXE_FILES => [ <bin/*> ],
MAN1PODS => {
'docs/percona-toolkit.pod' => 'blib/man1/percona-toolkit.1p',

View File

@@ -1,5 +1,4 @@
# Percona Toolkit
[![CLA assistant](https://cla-assistant.percona.com/readme/badge/percona/percona-toolkit)](https://cla-assistant.percona.com/percona/percona-toolkit)
*Percona Toolkit* is a collection of advanced command-line tools used by
[Percona](http://www.percona.com/) support staff to perform a variety of

View File

@@ -1287,7 +1287,7 @@ reasonably new version of Perl.
For a list of known bugs, see L<http://www.percona.com/bugs/pt-align>.
Please report bugs at L<https://jira.percona.com/projects/PT>.
Please report bugs at L<https://bugs.launchpad.net/percona-toolkit>.
Include the following information in your bug report:
=over
@@ -1359,6 +1359,6 @@ Place, Suite 330, Boston, MA 02111-1307 USA.
=head1 VERSION
pt-align 3.0.13
pt-align 3.0.12
=cut

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1576,7 +1576,7 @@ installed in any reasonably new version of Perl.
For a list of known bugs, see L<http://www.percona.com/bugs/pt-fifo-split>.
Please report bugs at L<https://jira.percona.com/projects/PT>.
Please report bugs at L<https://bugs.launchpad.net/percona-toolkit>.
Include the following information in your bug report:
=over
@@ -1648,6 +1648,6 @@ Place, Suite 330, Boston, MA 02111-1307 USA.
=head1 VERSION
pt-fifo-split 3.0.13
pt-fifo-split 3.0.12
=cut

File diff suppressed because it is too large Load Diff

View File

@@ -2168,7 +2168,7 @@ installed in any reasonably new version of Perl.
For a list of known bugs, see L<http://www.percona.com/bugs/pt-fingerprint>.
Please report bugs at L<https://jira.percona.com/projects/PT>.
Please report bugs at L<https://bugs.launchpad.net/percona-toolkit>.
Include the following information in your bug report:
=over
@@ -2239,6 +2239,6 @@ Place, Suite 330, Boston, MA 02111-1307 USA.
=head1 VERSION
pt-fingerprint 3.0.13
pt-fingerprint 3.0.12
=cut

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1055,7 +1055,7 @@ This tool requires the Bourne shell (F</bin/sh>).
For a list of known bugs, see L<http://www.percona.com/bugs/pt-ioprofile>.
Please report bugs at L<https://jira.percona.com/projects/PT>.
Please report bugs at L<https://bugs.launchpad.net/percona-toolkit>.
Include the following information in your bug report:
=over
@@ -1127,7 +1127,7 @@ Place, Suite 330, Boston, MA 02111-1307 USA.
=head1 VERSION
pt-ioprofile 3.0.13
pt-ioprofile 3.0.12
=cut

File diff suppressed because it is too large Load Diff

View File

@@ -732,7 +732,7 @@ This tool requires the Bourne shell (F</bin/sh>) and the seq program.
For a list of known bugs, see L<http://www.percona.com/bugs/pt-mext>.
Please report bugs at L<https://jira.percona.com/projects/PT>.
Please report bugs at L<https://bugs.launchpad.net/percona-toolkit>.
Include the following information in your bug report:
=over
@@ -804,7 +804,7 @@ Place, Suite 330, Boston, MA 02111-1307 USA.
=head1 VERSION
pt-mext 3.0.13
pt-mext 3.0.12
=cut

View File

@@ -2358,9 +2358,10 @@ report_mysql_summary () {
section_percona_server_features "$dir/mysql-variables"
section "Percona XtraDB Cluster"
local has_wsrep=$($CMD_MYSQL $EXT_ARGV -ss -e 'show session variables like "%wsrep_on%";' | cut -f2 | grep -i "on")
local has_wsrep="$(get_var "wsrep_on" "$dir/mysql-variables")"
if [ -n "${has_wsrep:-""}" ]; then
if [ "${has_wsrep:-""}" = "ON" ]; then
local wsrep_on="$(feat_on "$dir/mysql-variables" "wsrep_on")"
if [ "${wsrep_on:-""}" = "Enabled" ]; then
section_percona_xtradb_cluster "$dir/mysql-variables" "$dir/mysql-status"
else
name_val "wsrep_on" "OFF"
@@ -3217,7 +3218,7 @@ On BSD systems, it may require a mounted procfs.
For a list of known bugs, see L<http://www.percona.com/bugs/pt-mysql-summary>.
Please report bugs at L<https://jira.percona.com/projects/PT>.
Please report bugs at L<https://bugs.launchpad.net/percona-toolkit>.
Include the following information in your bug report:
=over
@@ -3289,7 +3290,7 @@ Place, Suite 330, Boston, MA 02111-1307 USA.
=head1 VERSION
pt-mysql-summary 3.0.13
pt-mysql-summary 3.0.12
=cut

File diff suppressed because it is too large Load Diff

View File

@@ -825,7 +825,7 @@ on the command line.
For a list of known bugs, see L<http://www.percona.com/bugs/pt-pmp>.
Please report bugs at L<https://jira.percona.com/projects/PT>.
Please report bugs at L<https://bugs.launchpad.net/percona-toolkit>.
Include the following information in your bug report:
=over
@@ -897,7 +897,7 @@ Place, Suite 330, Boston, MA 02111-1307 USA.
=head1 VERSION
pt-pmp 3.0.13
pt-pmp 3.0.12
=cut

File diff suppressed because it is too large Load Diff

View File

@@ -84,7 +84,7 @@ sub new {
rules => [], # desc of rules for --help
mutex => [], # rule: opts are mutually exclusive
atleast1 => [], # rule: at least one opt is required
disables => {}, # rule: opt disables other opts
disables => {}, # rule: opt disables other opts
defaults_to => {}, # rule: opt defaults to value of other opt
DSNParser => undef,
default_files => [
@@ -247,7 +247,7 @@ sub _pod_to_specs {
}
push @specs, {
spec => $self->{parse_attributes}->($self, $option, \%attribs),
spec => $self->{parse_attributes}->($self, $option, \%attribs),
desc => $para
. (defined $attribs{default} ? " (default $attribs{default})" : ''),
group => ($attribs{'group'} ? $attribs{'group'} : 'default'),
@@ -338,7 +338,7 @@ sub _parse_specs {
$self->{opts}->{$long} = $opt;
}
else { # It's an option rule, not a spec.
PTDEBUG && _d('Parsing rule:', $opt);
PTDEBUG && _d('Parsing rule:', $opt);
push @{$self->{rules}}, $opt;
my @participants = $self->_get_participants($opt);
my $rule_ok = 0;
@@ -383,7 +383,7 @@ sub _parse_specs {
PTDEBUG && _d('Option', $long, 'disables', @participants);
}
return;
return;
}
sub _get_participants {
@@ -470,7 +470,7 @@ sub _set_option {
}
sub get_opts {
my ( $self ) = @_;
my ( $self ) = @_;
foreach my $long ( keys %{$self->{opts}} ) {
$self->{opts}->{$long}->{got} = 0;
@@ -601,7 +601,7 @@ sub _check_opts {
else {
$err = join(', ',
map { "--$self->{opts}->{$_}->{long}" }
grep { $_ }
grep { $_ }
@restricted_opts[0..scalar(@restricted_opts) - 2]
)
. ' or --'.$self->{opts}->{$restricted_opts[-1]}->{long};
@@ -611,7 +611,7 @@ sub _check_opts {
}
}
elsif ( $opt->{is_required} ) {
elsif ( $opt->{is_required} ) {
$self->save_error("Required option --$long must be specified");
}
@@ -995,7 +995,7 @@ sub clone {
$clone{$scalar} = $self->{$scalar};
}
return bless \%clone;
return bless \%clone;
}
sub _parse_size {
@@ -1361,7 +1361,7 @@ sub get_dbh {
my $dbh;
my $tries = 2;
while ( !$dbh && $tries-- ) {
PTDEBUG && _d($cxn_string, ' ', $user, ' ', $pass,
PTDEBUG && _d($cxn_string, ' ', $user, ' ', $pass,
join(', ', map { "$_=>$defaults->{$_}" } keys %$defaults ));
$dbh = eval { DBI->connect($cxn_string, $user, $pass, $defaults) };
@@ -1537,7 +1537,7 @@ sub set_vars {
}
}
return;
return;
}
sub _d {
@@ -1624,7 +1624,7 @@ sub daemonize {
close STDERR;
open STDERR, ">&STDOUT"
or die "Cannot dupe STDERR to STDOUT: $OS_ERROR";
or die "Cannot dupe STDERR to STDOUT: $OS_ERROR";
}
else {
if ( -t STDOUT ) {
@@ -1777,13 +1777,13 @@ sub cmp {
$v1 =~ s/[^\d\.]//;
$v2 =~ s/[^\d\.]//;
my @a = ( $v1 =~ /(\d+)\.?/g );
my @b = ( $v2 =~ /(\d+)\.?/g );
my @a = ( $v1 =~ /(\d+)\.?/g );
my @b = ( $v2 =~ /(\d+)\.?/g );
foreach my $n1 (@a) {
$n1 += 0;
if (!@b) {
return 1;
}
}
my $n2 = shift @b;
$n2 += 0;
if ($n1 == $n2) {
@@ -1791,8 +1791,8 @@ sub cmp {
}
else {
return $n1 <=> $n2;
}
}
}
}
return @b ? -1 : 0;
}
@@ -1852,7 +1852,7 @@ sub main {
# ########################################################################
# Parse --only and --ignore users.
# ########################################################################
# ########################################################################
my @all_hosts;
if ( my $users = $o->get('only') ) {
my @users = map {
@@ -1902,7 +1902,7 @@ sub main {
{ AutoCommit => 1, });
my ( $version, $ts ) = $dbh->selectrow_array("SELECT VERSION(), NOW()");
print join("\n",
"-- Grants dumped by pt-show-grants",
"-- Dumped from server " . ($dbh->{mysql_hostinfo} || '')
@@ -1910,10 +1910,10 @@ sub main {
), "\n" if $o->get('header');
# MySQL 8 roles must be excluded from the regular users list.
# Roles can be identified because the user password is expired, the authentication
# Roles can be identified because the user password is expired, the authentication
# string is empty and the account is locked
my $mysql8_where = '';
if ($version !~ "-MariaDB" && VersionCompare::cmp($version, '8.0.0') >= 0) {
if (VersionCompare::cmp($version, '8.0.0') >= 0) {
$mysql8_where = ' WHERE NOT ( `account_locked`="Y" AND ' .
' `password_expired`="Y" AND ' .
' `authentication_string`="" ) ';
@@ -1948,7 +1948,7 @@ sub main {
}
print "-- End of roles listing\n";
}
USER:
foreach my $u ( @$users ) {
my $user_host = "'$u->{User}'\@'$u->{Host}'";
@@ -2053,7 +2053,7 @@ sub main {
}
# The WITH GRANT OPTION must be revoked separately.
if ( $grant =~ m/WITH GRANT OPTION/ ) {
if ( $grant =~ m/WITH GRANT OPTION/ ) {
push @result, "REVOKE GRANT OPTION ON *.* FROM $user" if $user;
}
@@ -2073,7 +2073,7 @@ sub main {
"DELETE FROM `mysql`.`user` WHERE `User`='$u->{User}' AND `Host`='$u->{Host}';",
), "\n";
}
print join( "\n", "-- Grants for $user_host",
map {"$_;"} @grants ), "\n";
@@ -2126,7 +2126,7 @@ __EOQ
$query .= " AND to_user IN ($user_names)";
}
PTDEBUG && _d("Getting roles");
PTDEBUG && _d($query);
PTDEBUG && _d($query);
my $roles;
eval { $roles = $dbh->selectall_arrayref($query, { Slice => {} }) };
if ($EVAL_ERROR) {
@@ -2591,6 +2591,6 @@ Place, Suite 330, Boston, MA 02111-1307 USA.
=head1 VERSION
pt-show-grants 3.0.13
pt-show-grants 3.0.12
=cut

View File

@@ -1173,7 +1173,7 @@ they will be fetched from the Internet if curl is available.
For a list of known bugs, see L<http://www.percona.com/bugs/pt-sift>.
Please report bugs at L<https://jira.percona.com/projects/PT>.
Please report bugs at L<https://bugs.launchpad.net/percona-toolkit>.
Include the following information in your bug report:
=over
@@ -1245,7 +1245,7 @@ Place, Suite 330, Boston, MA 02111-1307 USA.
=head1 VERSION
pt-sift 3.0.13
pt-sift 3.0.12
=cut

File diff suppressed because it is too large Load Diff

View File

@@ -4410,7 +4410,7 @@ installed in any reasonably new version of Perl.
For a list of known bugs, see L<http://www.percona.com/bugs/pt-slave-find>.
Please report bugs at L<https://jira.percona.com/projects/PT>.
Please report bugs at L<https://bugs.launchpad.net/percona-toolkit>.
Include the following information in your bug report:
=over
@@ -4482,6 +4482,6 @@ Place, Suite 330, Boston, MA 02111-1307 USA.
=head1 VERSION
pt-slave-find 3.0.13
pt-slave-find 3.0.12
=cut

File diff suppressed because it is too large Load Diff

View File

@@ -2306,7 +2306,7 @@ This tool requires Bash v3 or newer. Certain options require other programs:
For a list of known bugs, see L<http://www.percona.com/bugs/pt-stalk>.
Please report bugs at L<https://jira.percona.com/projects/PT>.
Please report bugs at L<https://bugs.launchpad.net/percona-toolkit>.
Include the following information in your bug report:
=over
@@ -2379,7 +2379,7 @@ Place, Suite 330, Boston, MA 02111-1307 USA.
=head1 VERSION
pt-stalk 3.0.13
pt-stalk 3.0.12
=cut

View File

@@ -2651,7 +2651,7 @@ This tool requires the Bourne shell (F</bin/sh>).
For a list of known bugs, see L<http://www.percona.com/bugs/pt-summary>.
Please report bugs at L<https://jira.percona.com/projects/PT>.
Please report bugs at L<https://bugs.launchpad.net/percona-toolkit>.
Include the following information in your bug report:
=over
@@ -2723,7 +2723,7 @@ Place, Suite 330, Boston, MA 02111-1307 USA.
=head1 VERSION
pt-summary 3.0.13
pt-summary 3.0.12
=cut

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -8416,7 +8416,7 @@ installed in any reasonably new version of Perl.
For a list of known bugs, see L<http://www.percona.com/bugs/pt-table-usage>.
Please report bugs at L<https://jira.percona.com/projects/PT>.
Please report bugs at L<https://bugs.launchpad.net/percona-toolkit>.
Include the following information in your bug report:
=over
@@ -8487,6 +8487,6 @@ Place, Suite 330, Boston, MA 02111-1307 USA.
=head1 VERSION
pt-table-usage 3.0.13
pt-table-usage 3.0.12
=cut

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -3209,7 +3209,7 @@ installed in any reasonably new version of Perl.
For a list of known bugs, see L<http://www.percona.com/bugs/pt-visual-explain>.
Please report bugs at L<https://jira.percona.com/projects/PT>.
Please report bugs at L<https://bugs.launchpad.net/percona-toolkit>.
Include the following information in your bug report:
=over
@@ -3281,6 +3281,6 @@ Place, Suite 330, Boston, MA 02111-1307 USA.
=head1 VERSION
pt-visual-explain 3.0.13
pt-visual-explain 3.0.12
=cut

View File

@@ -1,16 +1,3 @@
percona-toolkit (3.0.13-1) unstable; urgency=low
* Fixed bug PT-1673: Fix pt-show-grants for MariaDB 10+ (thanks Tim Birkett)
* Fixed bug PT-1638: pt-online-schema-change not working with MariaDB 10.x
* Improvement PT-1637: Added --fail-on stopped-replication param to table checksum
* Fixed bug PT-1616: pt-table-checksum fails to --resume on certain binary strings
* Fixed bug PT-1573: pt-query-digest log_timestamps = SYSTEM # No events processed
* Improvement PT-1340: pt-stalk should not call mysqladmin debug by default
* Fixed bug PT-1114: pt-table-checksum fails when table is empty
* Fixed bug PT-157: Specifying the index to use for pt-archiver ignores --primary-key-only
-- Percona Toolkit Developers <toolkit-dev@percona.com> Fri, 28 Dec 2018 08:06:00 +0000
percona-toolkit (3.0.12-1) unstable; urgency=low
* Fixed bug PT-1611: pt-archiver fails with UTF-8 chars

View File

@@ -1,6 +1,3 @@
%undefine _missing_build_ids_terminate_build
%define debug_package %{nil}
Name: percona-toolkit
Summary: Advanced MySQL and system command-line tools
Version: %{version}
@@ -12,8 +9,6 @@ URL: http://www.percona.com/software/percona-toolkit/
Source: percona-toolkit-%{version}.tar.gz
BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root
BuildArch: x86_64
BuildRequires: perl(ExtUtils::MakeMaker) make
Requires: perl(DBI) >= 1.13, perl(DBD::mysql) >= 1.0, perl(Time::HiRes), perl(IO::Socket::SSL), perl(Digest::MD5), perl(Term::ReadKey)
AutoReq: no

View File

@@ -50,7 +50,7 @@ copyright = u'2017, Percona LLC and/or its affiliates'
# The short X.Y version.
version = '3.0'
# The full version, including alpha/beta/rc tags.
release = '3.0.13'
release = '3.0.12'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.

View File

@@ -560,6 +560,6 @@ Place, Suite 330, Boston, MA 02111-1307 USA.
=head1 VERSION
Percona Toolkit v3.0.13 released 2018-12-28
Percona Toolkit v3.0.12 released 2018-09-11
=cut

View File

@@ -1,175 +1,319 @@
.. program:: pt-secure-collect
pt-secure-collect
=================
============================
:program:`pt-secure-collect`
============================
Collect, sanitize, pack and encrypt data. By default, this program will
collect the output of:
NAME
====
- ``pt-stalk --no-stalk --iterations=2 --sleep=30 --host=$mysql-host --dest=$temp-dir --port=$mysql-port --user=$mysql-user --password=$mysql-pass``
- ``pt-summary``
- ``pt-mysql-summary --host=$mysql-host --port=$mysql-port --user=$mysql-user --password=$mysql-pass``
:program:`pt-secure-collect` - collect, sanitize, pack and encrypt data.
Internal variables placeholders will be replaced with the corresponding
flag values. For example, ``$mysql-host`` will be replaced with the
values specified in the ``--mysql-host`` flag.
SYNOPSIS
========
Usage
-----
Usage:
::
pt-secure-data [<flags>] <command> [<args> ...]
By default, :program:`pt-secure-collect` will collect the output of:
- ``pt-stalk``
- ``pt-summary``
- ``pt-mysql-summary``
pt-secure-data [<flags>] <command> [<args> ...]
Global flags
------------
~~~~~~~~~~~~
.. option:: --help
+-----------+----------------------------------------------------------------------+
| Flag | Description |
+===========+======================================================================+
| --help | Show context-sensitive help (also try --help-long and --help-man). |
+-----------+----------------------------------------------------------------------+
| --debug | Enable debug log level. |
+-----------+----------------------------------------------------------------------+
Show context-sensitive help (also try --help-long and --help-man).
**Commands**
~~~~~~~~~~~~
.. option:: --debug
**Help command**
^^^^^^^^^^^^^^^^
Enable debug log level.
Show help
COMMANDS
========
**Collect command**
^^^^^^^^^^^^^^^^^^^
* **Help command**
Collect, sanitize, pack and encrypt data from pt-tools. Usage:
Show help
* **Collect command**
Collect, sanitize, pack and encrypt data from pt-tools. Usage:
::
::
pt-secure-collect collect <flags>
.. option:: --bin-dir
+--------+--------+
| Flag | Descri |
| | ption |
+========+========+
| --bin- | Direct |
| dir | ory |
| | having |
| | the |
| | Percon |
| | a |
| | Toolki |
| | t |
| | binari |
| | es |
| | (if |
| | they |
| | are |
| | not in |
| | PATH). |
+--------+--------+
| --temp | Tempor |
| -dir | ary |
| | direct |
| | ory |
| | used |
| | for |
| | the |
| | data |
| | collec |
| | tion. |
| | Defaul |
| | t: |
| | ${HOME |
| | }/data |
| | \_coll |
| | ection |
| | \_{tim |
| | estamp |
| | } |
+--------+--------+
| --incl | Includ |
| ude-di | e |
| r | this |
| | dir |
| | into |
| | the |
| | saniti |
| | zed |
| | tar |
| | file |
+--------+--------+
| --conf | Path |
| ig-fil | to the |
| e | config |
| | file. |
| | Defaul |
| | t: |
| | ``~/.m |
| | y.cnf` |
| | ` |
+--------+--------+
| --mysq | MySQL |
| l-host | host. |
| | Defaul |
| | t: |
| | ``127. |
| | 0.0.1` |
| | ` |
+--------+--------+
| --mysq | MySQL |
| l-port | port. |
| | Defaul |
| | t: |
| | ``3306 |
| | `` |
+--------+--------+
| --mysq | MySQL |
| l-user | user |
| | name. |
+--------+--------+
| --mysq | MySQL |
| l-pass | passwo |
| word | rd. |
+--------+--------+
| --ask- | Ask |
| mysql- | MySQL |
| pass | passwo |
| | rd. |
+--------+--------+
| --extr | Also |
| a-cmd | run |
| | this |
| | comman |
| | d |
| | as |
| | part |
| | of the |
| | data |
| | collec |
| | tion. |
| | This |
| | parame |
| | ter |
| | can be |
| | used |
| | more |
| | than |
| | once. |
+--------+--------+
| --encr | Encryp |
| ypt-pa | t |
| ssword | the |
| | output |
| | file |
| | using |
| | this |
| | passwo |
| | rd.If |
| | ommite |
| | d, |
| | it |
| | will |
| | be |
| | asked |
| | in the |
| | comman |
| | d |
| | line. |
+--------+--------+
| --no-c | Do not |
| ollect | collec |
| | t |
| | data |
+--------+--------+
| --no-s | Do not |
| anitiz | saniti |
| e | ze |
| | data |
+--------+--------+
| --no-e | Do not |
| ncrypt | encryp |
| | t |
| | the |
| | output |
| | file. |
+--------+--------+
| --no-s | Do not |
| anitiz | saniti |
| e-host | ze |
| names | host |
| | names. |
+--------+--------+
| --no-s | Do not |
| anitiz | replac |
| e-quer | e |
| ies | querie |
| | s |
| | by |
| | their |
| | finger |
| | prints |
| | . |
+--------+--------+
| --no-r | Do not |
| emove- | remove |
| temp-f | tempor |
| iles | ary |
| | files. |
+--------+--------+
Directory having the Percona Toolkit binaries (if they are not in PATH).
**Decrypt command**
^^^^^^^^^^^^^^^^^^^
.. option:: --temp-dir
| Decrypt an encrypted file. The password will be requested from the
terminal.
| Usage:
Temporary directory used for the data collection.
Default: ``${HOME}/data_collection_{timestamp}``
.. option:: --include-dir
Include this dir into the sanitized tar file.
.. option:: --config-file
Path to the config file. Default: ``~/.my.cnf``
.. option:: --mysql-host
MySQL host. Default: ``127.0.0.1``
.. option:: --mysql-port
MySQL port. Default: ``3306``
.. option:: --mysql-user
MySQL user name.
.. option:: --mysql-password
MySQL password.
.. option:: --ask-mysql-pass
password.
.. option:: --extra-cmd
Also run this command as part of the data collection. This parameter can
be used more than once.
.. option:: --encrypt-password
Encrypt the output file using this password. If ommited, it will be asked
in the command line.
.. option:: --no-collect
Do not collect data
.. option:: --no-sanitize
Do not sanitize data
.. option:: --no-encrypt
Do not encrypt the output file.
.. option:: --no-sanitize-hostnames
Do not sanitize hostnames.
.. option:: --no-sanitize-queries
Do not replace queries by their fingerprints.
.. option:: --no-remove-temp-files
Do not remove temporary files.
* **Decrypt command**
Decrypt an encrypted file. The password will be requested from the
terminal. Usage:
::
::
pt-secure-collect decrypt [flags] <input file>
.. option:: --outfile
+--------+---------+
| Flag | Descrip |
| | tion |
+========+=========+
| --outf | Write |
| ile | the |
| | output |
| | to this |
| | file.If |
| | ommited |
| | , |
| | the |
| | output |
| | file |
| | name |
| | will be |
| | the |
| | same as |
| | the |
| | input |
| | file, |
| | adding |
| | the |
| | ``.aes` |
| | ` |
| | extensi |
| | on |
+--------+---------+
Write the output to this file. If ommited, the output file
name will be the same as the input file, adding the ``.aes`` extension.
**Encrypt command**
^^^^^^^^^^^^^^^^^^^
* **Encrypt command**
| Encrypt a file. The password will be requested from the terminal.
| Usage:
Encrypt a file. The password will be requested from the terminal. Usage:
::
::
pt-secure-collect encrypt [flags] <input file>
.. option:: --outfile
+--------+---------+
| Flag | Descrip |
| | tion |
+========+=========+
| --outf | Write |
| ile | the |
| | output |
| | to this |
| | file.If |
| | ommited |
| | , |
| | the |
| | output |
| | file |
| | name |
| | will be |
| | the |
| | same as |
| | the |
| | input |
| | file, |
| | without |
| | the |
| | ``.aes` |
| | ` |
| | extensi |
| | on |
+--------+---------+
Write the output to this file. If ommited, the output file
name will be the same as the input file, without the ``.aes`` extension.
**Sanitize command**
^^^^^^^^^^^^^^^^^^^^
* **Sanitize command**
| Replace queries in a file by their fingerprints and obfuscate
hostnames.
| Usage:
Replace queries in a file by their fingerprints and obfuscate hostnames.
Usage:
::
::
pt-secure-collect sanitize [flags]
.. option:: --input-file
Input file. If not specified, the input will be Stdin.
.. option:: --output-file
Output file. If not specified, the input will be Stdout.
.. option:: --no-sanitize-hostnames
Do not sanitize host names.
.. option:: --no-sanitize-queries
Do not replace queries by their fingerprints.
+---------------------------+------------------------------------------------------------+
| Flag | Description |
+===========================+============================================================+
| --input-file | Input file. If not specified, the input will be Stdin. |
+---------------------------+------------------------------------------------------------+
| --output-file | Output file. If not specified, the input will be Stdout. |
+---------------------------+------------------------------------------------------------+
| --no-sanitize-hostnames | Do not sanitize host names. |
+---------------------------+------------------------------------------------------------+
| --no-sanitize-queries | Do not replace queries by their fingerprints. |
+---------------------------+------------------------------------------------------------+

View File

@@ -1,34 +1,6 @@
Percona Toolkit
***************
v3.0.13 released 2019-01-03
===========================
Improvements
* :jirabug:`PT-1340`: ``pt-stalk`` now doesn't call ``mysqladmin debug`` command
by default to avoid flooding in the error log when not needed.
``CMD_MYSQLADMIN="mysqladmin debug"`` environment variable reverts
``pt-stalk`` to the previous way of operation.
* :jirabug:`PT-1637`: A new ``--fail-on-stopped-replication`` option allows
``pt-table-checksum`` to detect failing slave nodes.
Fixed bugs
* :jirabug:`PT-1673`: ``pt-show-grants`` was incompatible with MariaDB 10+
(thanks `Tim Birkett <https://github.com/pysysops>`_)
* :jirabug:`PT-1638`: ``pt-online-schema-change`` was erroneously taking MariaDB
10.x for MySQL 8.0 and rejecting to work with it to avoid the upstream bug
`#89441 <https://bugs.mysql.com/bug.php?id=89441>`_ scope.
* :jirabug:`PT-1616`: ``pt-table-checksum`` failed to resume on large tables
with binary strings containing invalid UTF-8 characters.
* :jirabug:`PT-1573`: ``pt-query-digest`` didn't work in case of
``log_timestamps = SYSTEM`` my.cnf option.
* :jirabug:`PT-1114`: ``pt-table-checksum`` failed when the table was empty.
* :jirabug:`PT-157`: Specifying a non-primary key index with the ``i`` part of
the ``--source`` argument made ``pt-archiver`` to ignore the
``--primary-key-only`` option presence.
v3.0.12 released 2018-09-13
===========================

View File

@@ -1,26 +0,0 @@
v3.0.13 released 2019-01-09
===========================
Improvements
* :jirabug:`PT-1340`: ``pt-stalk`` now doesn't call ``mysqladmin debug`` command
by default to avoid flooding in the error log.
``CMD_MYSQLADMIN="mysqladmin debug"`` environment variable reverts
``pt-stalk`` to the previous way of operation.
* :jirabug:`PT-1637`: A new ``--fail-on-stopped-replication`` option allows
``pt-table-checksum`` to detect failing slave nodes.
Fixed bugs
* :jirabug:`PT-1673`: ``pt-show-grants`` was incompatible with MariaDB 10+
(thanks `Tim Birkett <https://github.com/pysysops>`_)
* :jirabug:`PT-1638`: ``pt-online-schema-change`` was erroneously taking MariaDB
10.x for MySQL 8.0 and rejecting to work with it to avoid the upstream bug
`#89441 <https://bugs.mysql.com/bug.php?id=89441>`_ scope.
* :jirabug:`PT-1616`: ``pt-table-checksum`` failed to resume on large tables
with binary strings containing invalid UTF-8 characters.
* :jirabug:`PT-1573`: ``pt-query-digest`` didn't work in case of
``log_timestamps = SYSTEM`` my.cnf option.
* :jirabug:`PT-157`: Specifying a non-primary key index with the ``i`` part of
the ``--source`` argument made ``pt-archiver`` to ignore the
``--primary-key-only`` option presence.

1401
lib/HTTP/Tiny.pm Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -26,7 +26,6 @@ use strict;
use warnings FATAL => 'all';
use English qw(-no_match_vars);
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
use IndexLength;
use Data::Dumper;
$Data::Dumper::Indent = 1;
@@ -488,11 +487,11 @@ sub row_estimate {
sub can_nibble {
my (%args) = @_;
my @required_args = qw(Cxn tbl chunk_size OptionParser TableParser Quoter);
my @required_args = qw(Cxn tbl chunk_size OptionParser TableParser);
foreach my $arg ( @required_args ) {
die "I need a $arg argument" unless $args{$arg};
}
my ($cxn, $tbl, $chunk_size, $o, $q) = @args{@required_args};
my ($cxn, $tbl, $chunk_size, $o) = @args{@required_args};
my $where = $o->has('where') ? $o->get('where') : '';
@@ -503,23 +502,6 @@ sub can_nibble {
where => $where,
);
my $can_get_keys;
if ($mysql_index) {
my $idx_len = IndexLength->new(Quoter => $q);
my ($key_len, $key) = $idx_len->index_length(
Cxn => $args{Cxn},
tbl => $tbl,
index => $mysql_index,
n_index_cols => $o->get('chunk-index-columns'),
);
if ( !$key || !$key_len || lc($key) ne lc($mysql_index)) {
$can_get_keys = 0;
} else {
$can_get_keys = 1;
}
}
# MySQL's chosen index is only something we should prefer
# if --where is used. Else, we can chose our own index
# and disregard the MySQL index from the row estimate.
@@ -539,10 +521,6 @@ sub can_nibble {
my $one_nibble = !defined $args{one_nibble} || $args{one_nibble}
? $row_est <= $chunk_size * $chunk_size_limit
: 0;
if (!$can_get_keys) {
$one_nibble = 1;
}
PTDEBUG && _d('One nibble:', $one_nibble ? 'yes' : 'no');
# Special case: we're resuming and there's no boundaries, so the table

View File

@@ -18,7 +18,7 @@
# ###########################################################################
package Percona::Toolkit;
our $VERSION = '3.0.13';
our $VERSION = '3.0.13-dev';
use strict;
use warnings FATAL => 'all';

View File

@@ -91,9 +91,6 @@ sub wait {
. " seconds on $dsn_name. Waiting.\n";
}
else {
if ($self->{fail_on_stopped_replication}) {
die 'replication is stopped';
}
print STDERR "Replica $dsn_name is stopped. Waiting.\n";
}
return;
@@ -106,9 +103,6 @@ sub wait {
$pr_first_report = sub {
my $dsn_name = $worst->{cxn}->name();
if ( !defined $worst->{lag} ) {
if ($self->{fail_on_stopped_replication}) {
die 'replication is stopped';
}
print STDERR "Replica $dsn_name is stopped. Waiting.\n";
}
return;

View File

@@ -67,7 +67,6 @@ my %port_for = (
chan_master1 => 2900,
chan_master2 => 2901,
chan_slave1 => 2902,
chan_slave2 => 2903,
);
my %server_type = (

View File

@@ -45,7 +45,7 @@ use FindBin qw();
eval {
require Percona::Toolkit;
require HTTP::Micro;
require HTTP::Tiny;
};
my $home = $ENV{HOME} || $ENV{HOMEPATH} || $ENV{USERPROFILE} || '.';
@@ -153,16 +153,14 @@ sub version_check {
return;
}
PTDEBUG && _d('Using', $protocol);
my $url = $args{url} # testing
|| $ENV{PERCONA_VERSION_CHECK_URL} # testing
|| "$protocol://v.percona.com";
PTDEBUG && _d('API URL:', $url);
# Get list of programs to check from Percona.
my $advice = pingback(
instances => $instances_to_check,
protocol => $protocol,
url => $url,
url => $args{url} # testing
|| $ENV{PERCONA_VERSION_CHECK_URL} # testing
|| "$protocol://v.percona.com",
);
if ( $advice ) {
PTDEBUG && _d('Advice:', Dumper($advice));
@@ -173,7 +171,7 @@ sub version_check {
else {
print "\n# A software update is available:\n";
}
print join("\n", map { "# * $_" } @$advice), "\n\n";
print join("\n", map { "# * ".($_ || '') } @$advice), "\n\n";
}
};
if ( $EVAL_ERROR ) {
@@ -344,14 +342,9 @@ sub get_uuid {
my $filename = $ENV{"HOME"} . $uuid_file;
my $uuid = _generate_uuid();
my $fh;
eval {
open($fh, '>', $filename);
};
if (!$EVAL_ERROR) {
print $fh $uuid;
close $fh;
}
open(my $fh, '>', $filename) or die "Could not open file '$filename' $!";
print $fh $uuid;
close $fh;
return $uuid;
}
@@ -391,7 +384,7 @@ sub pingback {
my $instances = $args{instances};
# Optional args
my $ua = $args{ua} || HTTP::Micro->new( timeout => 3 );
my $ua = $args{ua} || HTTP::Tiny->new( timeout => 3 );
# GET https://upgrade.percona.com, the server will return
# a plaintext list of items/programs it wants the tool
@@ -441,9 +434,8 @@ sub pingback {
general_id => get_uuid(),
);
my $tool_name = $ENV{XTRABACKUP_VERSION} ? "Percona XtraBackup" : File::Basename::basename($0);
my $client_response = {
headers => { "X-Percona-Toolkit-Tool" => $tool_name },
headers => { "X-Percona-Toolkit-Tool" => File::Basename::basename($0) },
content => $client_content,
};
PTDEBUG && _d('Client response:', Dumper($client_response));
@@ -536,7 +528,6 @@ my %sub_for_type = (
perl_version => \&get_perl_version,
perl_module_version => \&get_perl_module_version,
mysql_variable => \&get_mysql_variable,
xtrabackup => \&get_xtrabackup_version,
);
sub valid_item {
@@ -668,10 +659,6 @@ sub get_perl_version {
return $version;
}
sub get_xtrabackup_version {
return $ENV{XTRABACKUP_VERSION};
}
sub get_perl_module_version {
my (%args) = @_;
my $item = $args{item};

Binary file not shown.

View File

@@ -1,29 +0,0 @@
[client]
user = msandbox
password = msandbox
port = PORT
socket = /tmp/PORT/mysql_sandboxPORT.sock
[mysqld]
port = PORT
socket = /tmp/PORT/mysql_sandboxPORT.sock
pid-file = /tmp/PORT/data/mysql_sandboxPORT.pid
basedir = PERCONA_TOOLKIT_SANDBOX
datadir = /tmp/PORT/data
key_buffer_size = 16M
innodb_buffer_pool_size = 16M
innodb_data_home_dir = /tmp/PORT/data
innodb_log_group_home_dir = /tmp/PORT/data
innodb_data_file_path = ibdata1:10M:autoextend
innodb_log_file_size = 64M
log-bin = mysql-bin
relay_log = mysql-relay-bin
log_slave_updates
server-id = PORT
report-host = 127.0.0.1
report-port = PORT
log-error = /tmp/PORT/data/mysqld.log
innodb_lock_wait_timeout = 3
general_log
general_log_file = genlog
performance_schema = ON

View File

@@ -1,627 +0,0 @@
USE `mysql`;
-- MySQL dump 10.16 Distrib 10.1.21-MariaDB, for Linux (x86_64)
--
-- Host: localhost Database: localhost
-- ------------------------------------------------------
-- Server version 10.1.21-MariaDB
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `column_stats`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `column_stats` (
`db_name` varchar(64) COLLATE utf8_bin NOT NULL,
`table_name` varchar(64) COLLATE utf8_bin NOT NULL,
`column_name` varchar(64) COLLATE utf8_bin NOT NULL,
`min_value` varbinary(255) DEFAULT NULL,
`max_value` varbinary(255) DEFAULT NULL,
`nulls_ratio` decimal(12,4) DEFAULT NULL,
`avg_length` decimal(12,4) DEFAULT NULL,
`avg_frequency` decimal(12,4) DEFAULT NULL,
`hist_size` tinyint(3) unsigned DEFAULT NULL,
`hist_type` enum('SINGLE_PREC_HB','DOUBLE_PREC_HB') COLLATE utf8_bin DEFAULT NULL,
`histogram` varbinary(255) DEFAULT NULL,
PRIMARY KEY (`db_name`,`table_name`,`column_name`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='Statistics on Columns';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `columns_priv`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `columns_priv` (
`Host` char(60) COLLATE utf8_bin NOT NULL DEFAULT '',
`Db` char(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`User` char(80) COLLATE utf8_bin NOT NULL DEFAULT '',
`Table_name` char(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`Column_name` char(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`Timestamp` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`Column_priv` set('Select','Insert','Update','References') CHARACTER SET utf8 NOT NULL DEFAULT '',
PRIMARY KEY (`Host`,`Db`,`User`,`Table_name`,`Column_name`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='Column privileges';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `db`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `db` (
`Host` char(60) COLLATE utf8_bin NOT NULL DEFAULT '',
`Db` char(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`User` char(80) COLLATE utf8_bin NOT NULL DEFAULT '',
`Select_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Insert_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Update_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Delete_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Create_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Drop_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Grant_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`References_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Index_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Alter_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Create_tmp_table_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Lock_tables_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Create_view_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Show_view_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Create_routine_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Alter_routine_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Execute_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Event_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Trigger_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
PRIMARY KEY (`Host`,`Db`,`User`),
KEY `User` (`User`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='Database privileges';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `event`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `event` (
`db` char(64) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL DEFAULT '',
`name` char(64) NOT NULL DEFAULT '',
`body` longblob NOT NULL,
`definer` char(141) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL DEFAULT '',
`execute_at` datetime DEFAULT NULL,
`interval_value` int(11) DEFAULT NULL,
`interval_field` enum('YEAR','QUARTER','MONTH','DAY','HOUR','MINUTE','WEEK','SECOND','MICROSECOND','YEAR_MONTH','DAY_HOUR','DAY_MINUTE','DAY_SECOND','HOUR_MINUTE','HOUR_SECOND','MINUTE_SECOND','DAY_MICROSECOND','HOUR_MICROSECOND','MINUTE_MICROSECOND','SECOND_MICROSECOND') DEFAULT NULL,
`created` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`modified` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00',
`last_executed` datetime DEFAULT NULL,
`starts` datetime DEFAULT NULL,
`ends` datetime DEFAULT NULL,
`status` enum('ENABLED','DISABLED','SLAVESIDE_DISABLED') NOT NULL DEFAULT 'ENABLED',
`on_completion` enum('DROP','PRESERVE') NOT NULL DEFAULT 'DROP',
`sql_mode` set('REAL_AS_FLOAT','PIPES_AS_CONCAT','ANSI_QUOTES','IGNORE_SPACE','IGNORE_BAD_TABLE_OPTIONS','ONLY_FULL_GROUP_BY','NO_UNSIGNED_SUBTRACTION','NO_DIR_IN_CREATE','POSTGRESQL','ORACLE','MSSQL','DB2','MAXDB','NO_KEY_OPTIONS','NO_TABLE_OPTIONS','NO_FIELD_OPTIONS','MYSQL323','MYSQL40','ANSI','NO_AUTO_VALUE_ON_ZERO','NO_BACKSLASH_ESCAPES','STRICT_TRANS_TABLES','STRICT_ALL_TABLES','NO_ZERO_IN_DATE','NO_ZERO_DATE','INVALID_DATES','ERROR_FOR_DIVISION_BY_ZERO','TRADITIONAL','NO_AUTO_CREATE_USER','HIGH_NOT_PRECEDENCE','NO_ENGINE_SUBSTITUTION','PAD_CHAR_TO_FULL_LENGTH') NOT NULL DEFAULT '',
`comment` char(64) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL DEFAULT '',
`originator` int(10) unsigned NOT NULL,
`time_zone` char(64) CHARACTER SET latin1 NOT NULL DEFAULT 'SYSTEM',
`character_set_client` char(32) CHARACTER SET utf8 COLLATE utf8_bin DEFAULT NULL,
`collation_connection` char(32) CHARACTER SET utf8 COLLATE utf8_bin DEFAULT NULL,
`db_collation` char(32) CHARACTER SET utf8 COLLATE utf8_bin DEFAULT NULL,
`body_utf8` longblob,
PRIMARY KEY (`db`,`name`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='Events';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `func`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `func` (
`name` char(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`ret` tinyint(1) NOT NULL DEFAULT '0',
`dl` char(128) COLLATE utf8_bin NOT NULL DEFAULT '',
`type` enum('function','aggregate') CHARACTER SET utf8 NOT NULL,
PRIMARY KEY (`name`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='User defined functions';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `gtid_slave_pos`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `gtid_slave_pos` (
`domain_id` int(10) unsigned NOT NULL,
`sub_id` bigint(20) unsigned NOT NULL,
`server_id` int(10) unsigned NOT NULL,
`seq_no` bigint(20) unsigned NOT NULL,
PRIMARY KEY (`domain_id`,`sub_id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 COMMENT='Replication slave GTID position';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `help_category`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `help_category` (
`help_category_id` smallint(5) unsigned NOT NULL,
`name` char(64) NOT NULL,
`parent_category_id` smallint(5) unsigned DEFAULT NULL,
`url` text NOT NULL,
PRIMARY KEY (`help_category_id`),
UNIQUE KEY `name` (`name`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='help categories';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `help_keyword`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `help_keyword` (
`help_keyword_id` int(10) unsigned NOT NULL,
`name` char(64) NOT NULL,
PRIMARY KEY (`help_keyword_id`),
UNIQUE KEY `name` (`name`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='help keywords';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `help_relation`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `help_relation` (
`help_topic_id` int(10) unsigned NOT NULL,
`help_keyword_id` int(10) unsigned NOT NULL,
PRIMARY KEY (`help_keyword_id`,`help_topic_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='keyword-topic relation';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `help_topic`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `help_topic` (
`help_topic_id` int(10) unsigned NOT NULL,
`name` char(64) NOT NULL,
`help_category_id` smallint(5) unsigned NOT NULL,
`description` text NOT NULL,
`example` text NOT NULL,
`url` text NOT NULL,
PRIMARY KEY (`help_topic_id`),
UNIQUE KEY `name` (`name`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='help topics';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `host`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `host` (
`Host` char(60) COLLATE utf8_bin NOT NULL DEFAULT '',
`Db` char(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`Select_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Insert_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Update_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Delete_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Create_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Drop_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Grant_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`References_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Index_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Alter_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Create_tmp_table_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Lock_tables_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Create_view_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Show_view_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Create_routine_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Alter_routine_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Execute_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Trigger_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
PRIMARY KEY (`Host`,`Db`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='Host privileges; Merged with database privileges';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `index_stats`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `index_stats` (
`db_name` varchar(64) COLLATE utf8_bin NOT NULL,
`table_name` varchar(64) COLLATE utf8_bin NOT NULL,
`index_name` varchar(64) COLLATE utf8_bin NOT NULL,
`prefix_arity` int(11) unsigned NOT NULL,
`avg_frequency` decimal(12,4) DEFAULT NULL,
PRIMARY KEY (`db_name`,`table_name`,`index_name`,`prefix_arity`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='Statistics on Indexes';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `innodb_index_stats`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `innodb_index_stats` (
`database_name` varchar(64) COLLATE utf8_bin NOT NULL,
`table_name` varchar(64) COLLATE utf8_bin NOT NULL,
`index_name` varchar(64) COLLATE utf8_bin NOT NULL,
`last_update` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`stat_name` varchar(64) COLLATE utf8_bin NOT NULL,
`stat_value` bigint(20) unsigned NOT NULL,
`sample_size` bigint(20) unsigned DEFAULT NULL,
`stat_description` varchar(1024) COLLATE utf8_bin NOT NULL,
PRIMARY KEY (`database_name`,`table_name`,`index_name`,`stat_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin STATS_PERSISTENT=0;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `innodb_table_stats`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `innodb_table_stats` (
`database_name` varchar(64) COLLATE utf8_bin NOT NULL,
`table_name` varchar(64) COLLATE utf8_bin NOT NULL,
`last_update` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`n_rows` bigint(20) unsigned NOT NULL,
`clustered_index_size` bigint(20) unsigned NOT NULL,
`sum_of_other_index_sizes` bigint(20) unsigned NOT NULL,
PRIMARY KEY (`database_name`,`table_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin STATS_PERSISTENT=0;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `plugin`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `plugin` (
`name` varchar(64) NOT NULL DEFAULT '',
`dl` varchar(128) NOT NULL DEFAULT '',
PRIMARY KEY (`name`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='MySQL plugins';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `proc`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `proc` (
`db` char(64) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL DEFAULT '',
`name` char(64) NOT NULL DEFAULT '',
`type` enum('FUNCTION','PROCEDURE') NOT NULL,
`specific_name` char(64) NOT NULL DEFAULT '',
`language` enum('SQL') NOT NULL DEFAULT 'SQL',
`sql_data_access` enum('CONTAINS_SQL','NO_SQL','READS_SQL_DATA','MODIFIES_SQL_DATA') NOT NULL DEFAULT 'CONTAINS_SQL',
`is_deterministic` enum('YES','NO') NOT NULL DEFAULT 'NO',
`security_type` enum('INVOKER','DEFINER') NOT NULL DEFAULT 'DEFINER',
`param_list` blob NOT NULL,
`returns` longblob NOT NULL,
`body` longblob NOT NULL,
`definer` char(141) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL DEFAULT '',
`created` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`modified` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00',
`sql_mode` set('REAL_AS_FLOAT','PIPES_AS_CONCAT','ANSI_QUOTES','IGNORE_SPACE','IGNORE_BAD_TABLE_OPTIONS','ONLY_FULL_GROUP_BY','NO_UNSIGNED_SUBTRACTION','NO_DIR_IN_CREATE','POSTGRESQL','ORACLE','MSSQL','DB2','MAXDB','NO_KEY_OPTIONS','NO_TABLE_OPTIONS','NO_FIELD_OPTIONS','MYSQL323','MYSQL40','ANSI','NO_AUTO_VALUE_ON_ZERO','NO_BACKSLASH_ESCAPES','STRICT_TRANS_TABLES','STRICT_ALL_TABLES','NO_ZERO_IN_DATE','NO_ZERO_DATE','INVALID_DATES','ERROR_FOR_DIVISION_BY_ZERO','TRADITIONAL','NO_AUTO_CREATE_USER','HIGH_NOT_PRECEDENCE','NO_ENGINE_SUBSTITUTION','PAD_CHAR_TO_FULL_LENGTH') NOT NULL DEFAULT '',
`comment` text CHARACTER SET utf8 COLLATE utf8_bin NOT NULL,
`character_set_client` char(32) CHARACTER SET utf8 COLLATE utf8_bin DEFAULT NULL,
`collation_connection` char(32) CHARACTER SET utf8 COLLATE utf8_bin DEFAULT NULL,
`db_collation` char(32) CHARACTER SET utf8 COLLATE utf8_bin DEFAULT NULL,
`body_utf8` longblob,
PRIMARY KEY (`db`,`name`,`type`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='Stored Procedures';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `procs_priv`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `procs_priv` (
`Host` char(60) COLLATE utf8_bin NOT NULL DEFAULT '',
`Db` char(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`User` char(80) COLLATE utf8_bin NOT NULL DEFAULT '',
`Routine_name` char(64) CHARACTER SET utf8 NOT NULL DEFAULT '',
`Routine_type` enum('FUNCTION','PROCEDURE') COLLATE utf8_bin NOT NULL,
`Grantor` char(141) COLLATE utf8_bin NOT NULL DEFAULT '',
`Proc_priv` set('Execute','Alter Routine','Grant') CHARACTER SET utf8 NOT NULL DEFAULT '',
`Timestamp` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`Host`,`Db`,`User`,`Routine_name`,`Routine_type`),
KEY `Grantor` (`Grantor`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='Procedure privileges';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `proxies_priv`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `proxies_priv` (
`Host` char(60) COLLATE utf8_bin NOT NULL DEFAULT '',
`User` char(80) COLLATE utf8_bin NOT NULL DEFAULT '',
`Proxied_host` char(60) COLLATE utf8_bin NOT NULL DEFAULT '',
`Proxied_user` char(80) COLLATE utf8_bin NOT NULL DEFAULT '',
`With_grant` tinyint(1) NOT NULL DEFAULT '0',
`Grantor` char(141) COLLATE utf8_bin NOT NULL DEFAULT '',
`Timestamp` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`Host`,`User`,`Proxied_host`,`Proxied_user`),
KEY `Grantor` (`Grantor`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='User proxy privileges';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `roles_mapping`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `roles_mapping` (
`Host` char(60) COLLATE utf8_bin NOT NULL DEFAULT '',
`User` char(80) COLLATE utf8_bin NOT NULL DEFAULT '',
`Role` char(80) COLLATE utf8_bin NOT NULL DEFAULT '',
`Admin_option` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
UNIQUE KEY `Host` (`Host`,`User`,`Role`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='Granted roles';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `servers`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `servers` (
`Server_name` char(64) NOT NULL DEFAULT '',
`Host` char(64) NOT NULL DEFAULT '',
`Db` char(64) NOT NULL DEFAULT '',
`Username` char(80) NOT NULL DEFAULT '',
`Password` char(64) NOT NULL DEFAULT '',
`Port` int(4) NOT NULL DEFAULT '0',
`Socket` char(64) NOT NULL DEFAULT '',
`Wrapper` char(64) NOT NULL DEFAULT '',
`Owner` char(64) NOT NULL DEFAULT '',
PRIMARY KEY (`Server_name`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='MySQL Foreign Servers table';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `table_stats`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `table_stats` (
`db_name` varchar(64) COLLATE utf8_bin NOT NULL,
`table_name` varchar(64) COLLATE utf8_bin NOT NULL,
`cardinality` bigint(21) unsigned DEFAULT NULL,
PRIMARY KEY (`db_name`,`table_name`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='Statistics on Tables';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `tables_priv`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `tables_priv` (
`Host` char(60) COLLATE utf8_bin NOT NULL DEFAULT '',
`Db` char(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`User` char(80) COLLATE utf8_bin NOT NULL DEFAULT '',
`Table_name` char(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`Grantor` char(141) COLLATE utf8_bin NOT NULL DEFAULT '',
`Timestamp` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`Table_priv` set('Select','Insert','Update','Delete','Create','Drop','Grant','References','Index','Alter','Create View','Show view','Trigger') CHARACTER SET utf8 NOT NULL DEFAULT '',
`Column_priv` set('Select','Insert','Update','References') CHARACTER SET utf8 NOT NULL DEFAULT '',
PRIMARY KEY (`Host`,`Db`,`User`,`Table_name`),
KEY `Grantor` (`Grantor`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='Table privileges';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `time_zone`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `time_zone` (
`Time_zone_id` int(10) unsigned NOT NULL AUTO_INCREMENT,
`Use_leap_seconds` enum('Y','N') NOT NULL DEFAULT 'N',
PRIMARY KEY (`Time_zone_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='Time zones';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `time_zone_leap_second`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `time_zone_leap_second` (
`Transition_time` bigint(20) NOT NULL,
`Correction` int(11) NOT NULL,
PRIMARY KEY (`Transition_time`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='Leap seconds information for time zones';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `time_zone_name`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `time_zone_name` (
`Name` char(64) NOT NULL,
`Time_zone_id` int(10) unsigned NOT NULL,
PRIMARY KEY (`Name`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='Time zone names';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `time_zone_transition`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `time_zone_transition` (
`Time_zone_id` int(10) unsigned NOT NULL,
`Transition_time` bigint(20) NOT NULL,
`Transition_type_id` int(10) unsigned NOT NULL,
PRIMARY KEY (`Time_zone_id`,`Transition_time`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='Time zone transitions';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `time_zone_transition_type`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `time_zone_transition_type` (
`Time_zone_id` int(10) unsigned NOT NULL,
`Transition_type_id` int(10) unsigned NOT NULL,
`Offset` int(11) NOT NULL DEFAULT '0',
`Is_DST` tinyint(3) unsigned NOT NULL DEFAULT '0',
`Abbreviation` char(8) NOT NULL DEFAULT '',
PRIMARY KEY (`Time_zone_id`,`Transition_type_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='Time zone transition types';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `user`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `user` (
`Host` char(60) COLLATE utf8_bin NOT NULL DEFAULT '',
`User` char(80) COLLATE utf8_bin NOT NULL DEFAULT '',
`Password` char(41) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL DEFAULT '',
`Select_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Insert_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Update_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Delete_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Create_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Drop_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Reload_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Shutdown_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Process_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`File_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Grant_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`References_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Index_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Alter_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Show_db_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Super_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Create_tmp_table_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Lock_tables_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Execute_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Repl_slave_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Repl_client_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Create_view_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Show_view_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Create_routine_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Alter_routine_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Create_user_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Event_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Trigger_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`Create_tablespace_priv` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`ssl_type` enum('','ANY','X509','SPECIFIED') CHARACTER SET utf8 NOT NULL DEFAULT '',
`ssl_cipher` blob NOT NULL,
`x509_issuer` blob NOT NULL,
`x509_subject` blob NOT NULL,
`max_questions` int(11) unsigned NOT NULL DEFAULT '0',
`max_updates` int(11) unsigned NOT NULL DEFAULT '0',
`max_connections` int(11) unsigned NOT NULL DEFAULT '0',
`max_user_connections` int(11) NOT NULL DEFAULT '0',
`plugin` char(64) CHARACTER SET latin1 NOT NULL DEFAULT '',
`authentication_string` text COLLATE utf8_bin NOT NULL,
`password_expired` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`is_role` enum('N','Y') CHARACTER SET utf8 NOT NULL DEFAULT 'N',
`default_role` char(80) COLLATE utf8_bin NOT NULL DEFAULT '',
`max_statement_time` decimal(12,6) NOT NULL DEFAULT '0.000000',
PRIMARY KEY (`Host`,`User`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='Users and global privileges';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `general_log`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `general_log` (
`event_time` timestamp(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6),
`user_host` mediumtext NOT NULL,
`thread_id` bigint(21) unsigned NOT NULL,
`server_id` int(10) unsigned NOT NULL,
`command_type` varchar(64) NOT NULL,
`argument` mediumtext NOT NULL
) ENGINE=CSV DEFAULT CHARSET=utf8 COMMENT='General log';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `slow_log`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE IF NOT EXISTS `slow_log` (
`start_time` timestamp(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6),
`user_host` mediumtext NOT NULL,
`query_time` time(6) NOT NULL,
`lock_time` time(6) NOT NULL,
`rows_sent` int(11) NOT NULL,
`rows_examined` int(11) NOT NULL,
`db` varchar(512) NOT NULL,
`last_insert_id` int(11) NOT NULL,
`insert_id` int(11) NOT NULL,
`server_id` int(10) unsigned NOT NULL,
`sql_text` mediumtext NOT NULL,
`thread_id` bigint(21) unsigned NOT NULL,
`rows_affected` int(11) NOT NULL
) ENGINE=CSV DEFAULT CHARSET=utf8 COMMENT='Slow log';
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2017-03-10 19:49:04

View File

@@ -1,6 +1,6 @@
GO := go
pkgs = $(shell find . -type d -name "pt-*" -exec basename {} \;)
VERSION="3.0.13"
VERSION="3.0.12"
BUILD=$(shell date +%FT%T%z)
GOVERSION=$(shell go version | cut --delimiter=" " -f3)
GOUTILSDIR ?= $(GOPATH)/bin

View File

@@ -52,7 +52,6 @@ func LoadBson(filename string, destination interface{}) error {
if err != nil {
return err
}
defer file.Close()
buf, err := ioutil.ReadAll(file)
if err != nil {

View File

@@ -57,7 +57,7 @@ func NewProfiler(iterator pmgo.IterManager, filters []filter.Filter, ticker <-ch
// internal
docsChan: make(chan proto.SystemProfile, DocsBufferSize),
timeoutsChan: make(chan time.Time),
timeoutsChan: nil,
keyFilters: []string{"^shardVersion$", "^\\$"},
}
}
@@ -97,6 +97,9 @@ func (p *Profile) Stop() {
func (p *Profile) TimeoutsChan() <-chan time.Time {
p.lock.Lock()
defer p.lock.Unlock()
if p.timeoutsChan == nil {
p.timeoutsChan = make(chan time.Time)
}
return p.timeoutsChan
}
@@ -126,15 +129,14 @@ func (p *Profile) getDocs() {
for p.iterator.Next(&doc) || p.iterator.Timeout() {
if p.iterator.Timeout() {
select {
case p.timeoutsChan <- time.Now().UTC():
default:
if p.timeoutsChan != nil {
p.timeoutsChan <- time.Now().UTC()
}
continue
}
valid := true
for _, filter := range p.filters {
if !filter(doc) {
if filter(doc) == false {
valid = false
break
}

View File

@@ -8,9 +8,8 @@ import (
"time"
"github.com/montanaflynn/stats"
"gopkg.in/mgo.v2/bson"
"github.com/percona/percona-toolkit/src/go/mongolib/proto"
"gopkg.in/mgo.v2/bson"
)
type StatsError struct {
@@ -92,7 +91,6 @@ func (s *Stats) Add(doc proto.SystemProfile) error {
qiac.Count++
// docsExamined is renamed from nscannedObjects in 3.2.0.
// https://docs.mongodb.com/manual/reference/database-profiler/#system.profile.docsExamined
s.Lock()
if doc.NscannedObjects > 0 {
qiac.NScanned = append(qiac.NScanned, float64(doc.NscannedObjects))
} else {
@@ -107,15 +105,14 @@ func (s *Stats) Add(doc proto.SystemProfile) error {
if qiac.LastSeen.IsZero() || qiac.LastSeen.Before(doc.Ts) {
qiac.LastSeen = doc.Ts
}
s.Unlock()
return nil
}
// Queries returns all collected statistics
func (s *Stats) Queries() Queries {
s.Lock()
defer s.Unlock()
s.RLock()
defer s.RUnlock()
keys := GroupKeys{}
for key := range s.queryInfoAndCounters {
@@ -247,7 +244,6 @@ type Statistics struct {
Max float64
Avg float64
Pct95 float64
Pct99 float64
StdDev float64
Median float64
}
@@ -327,7 +323,6 @@ func calcStats(samples []float64) Statistics {
s.Max, _ = stats.Max(samples)
s.Avg, _ = stats.Mean(samples)
s.Pct95, _ = stats.PercentileNearestRank(samples, 95)
s.Pct99, _ = stats.PercentileNearestRank(samples, 99)
s.StdDev, _ = stats.StandardDeviation(samples)
s.Median, _ = stats.Median(samples)
return s

View File

@@ -188,6 +188,8 @@ func TestStatsSingle(t *testing.T) {
for _, file := range files {
f := file.Name()
t.Run(f, func(t *testing.T) {
t.Parallel()
doc := proto.SystemProfile{}
err = tutil.LoadBson(dir+f, &doc)
if err != nil {

View File

@@ -1,10 +1,10 @@
package util
import (
"sort"
"strings"
"time"
"github.com/bradfitz/slice"
"github.com/percona/percona-toolkit/src/go/mongolib/proto"
"github.com/percona/pmgo"
"github.com/pkg/errors"
@@ -58,7 +58,9 @@ func GetReplicasetMembers(dialer pmgo.Dialer, di *pmgo.DialInfo) ([]proto.Member
if serverStatus, err := GetServerStatus(dialer, di, m.Name); err == nil {
m.ID = serverStatus.Pid
m.StorageEngine = serverStatus.StorageEngine
if cmdOpts.Parsed.Sharding.ClusterRole != "" {
if cmdOpts.Parsed.Sharding.ClusterRole == "" {
m.StateStr = m.StateStr
} else {
m.StateStr = cmdOpts.Parsed.Sharding.ClusterRole + "/" + m.StateStr
}
m.StateStr = strings.ToUpper(m.StateStr)
@@ -73,7 +75,7 @@ func GetReplicasetMembers(dialer pmgo.Dialer, di *pmgo.DialInfo) ([]proto.Member
members = append(members, member)
}
sort.Slice(members, func(i, j int) bool { return members[i].Name < members[j].Name })
slice.Sort(members, func(i, j int) bool { return members[i].Name < members[j].Name })
return members, nil
}
@@ -143,13 +145,13 @@ func buildHostsListFromShardMap(shardsMap proto.ShardsMap) []string {
/* Example
mongos> db.getSiblingDB('admin').runCommand('getShardMap')
{
"map" : {
"config" : "localhost:19001,localhost:19002,localhost:19003",
"localhost:17001" : "r1/localhost:17001,localhost:17002,localhost:17003",
"r1" : "r1/localhost:17001,localhost:17002,localhost:17003",
"r1/localhost:17001,localhost:17002,localhost:17003" : "r1/localhost:17001,localhost:17002,localhost:17003",
},
"ok" : 1
"map" : {
"config" : "localhost:19001,localhost:19002,localhost:19003",
"localhost:17001" : "r1/localhost:17001,localhost:17002,localhost:17003",
"r1" : "r1/localhost:17001,localhost:17002,localhost:17003",
"r1/localhost:17001,localhost:17002,localhost:17003" : "r1/localhost:17001,localhost:17002,localhost:17003",
},
"ok" : 1
}
*/
@@ -198,10 +200,12 @@ func GetShardedHosts(dialer pmgo.Dialer, di *pmgo.DialInfo) ([]string, error) {
return hostnames, errors.Wrap(err, "cannot list shards")
}
for _, shardInfo := range shardsInfo.Shards {
m := strings.Split(shardInfo.Host, "/")
h := strings.Split(m[1], ",")
hostnames = append(hostnames, h[0])
if shardsInfo != nil {
for _, shardInfo := range shardsInfo.Shards {
m := strings.Split(shardInfo.Host, "/")
h := strings.Split(m[1], ",")
hostnames = append(hostnames, h[0])
}
}
return hostnames, nil
}
@@ -226,11 +230,7 @@ func GetServerStatus(dialer pmgo.Dialer, di *pmgo.DialInfo, hostname string) (pr
defer session.Close()
session.SetMode(mgo.Monotonic, true)
query := bson.D{
{Name: "serverStatus", Value: 1},
{Name: "recordStats", Value: 1},
}
if err := session.DB("admin").Run(query, &ss); err != nil {
if err := session.DB("admin").Run(bson.D{{"serverStatus", 1}, {"recordStats", 1}}, &ss); err != nil {
return ss, errors.Wrap(err, "GetHostInfo.serverStatus")
}

View File

@@ -1,44 +0,0 @@
# Debug collector tool
Collects debug data (logs, resource statuses etc.) from a k8s/opeshift cluster. Data packed into "cluster-dump.tar.gz" archive in the current working directory.
### Data that will be collected
"pods",
"replicasets",
"deployments",
"statefulsets",
"replicationcontrollers",
"events",
"configmaps",
"secrets",
"cronjobs",
"jobs",
"podsecuritypolicies",
"poddisruptionbudgets",
"perconaxtradbbackups",
"perconaxtradbclusterbackups",
"perconaxtradbclusterrestores",
"perconaxtradbclusters",
"clusterrolebindings",
"clusterroles",
"rolebindings",
"roles",
"storageclasses",
"persistentvolumeclaims",
"persistentvolumes",
"modes",
"your-custom-resource" (depend on 'resource' flag)
### Usage
`pt-k8s-debug-collector <flags>`
Flags:
`--resource` targeted custom resource name (default "pxc")
`--namespace` targeted namespace. By default data will be collected from all namespaces
`--cluster` targeted pxc/psmdb cluster. By default data from all available clusters to be collected
### Requirements
Installed and configured 'kubectl'
Installed and configured 'pt-mysql-summary'
Installed and configured 'pt-mongodb-summary'

View File

@@ -1,355 +0,0 @@
package dumper
import (
"archive/tar"
"bytes"
"compress/gzip"
"encoding/base64"
"encoding/json"
"fmt"
"log"
"os"
"os/exec"
"path/filepath"
"strings"
"time"
"github.com/pkg/errors"
corev1 "k8s.io/api/core/v1"
)
// Dumper struct is for dumping cluster
type Dumper struct {
cmd string
resources []string
namespace string
location string
errors string
mode int64
crType string
}
// New return new Dumper object
func New(location, namespace, resource string) Dumper {
resources := []string{
"pods",
"replicasets",
"deployments",
"statefulsets",
"replicationcontrollers",
"events",
"configmaps",
"secrets",
"cronjobs",
"jobs",
"podsecuritypolicies",
"poddisruptionbudgets",
"perconaxtradbbackups",
"perconaxtradbclusterbackups",
"perconaxtradbclusterrestores",
"perconaxtradbclusters",
"clusterrolebindings",
"clusterroles",
"rolebindings",
"roles",
"storageclasses",
"persistentvolumeclaims",
"persistentvolumes",
}
if len(resource) > 0 {
resources = append(resources, resource)
}
return Dumper{
cmd: "kubectl",
resources: resources,
location: "cluster-dump",
mode: int64(0777),
namespace: namespace,
crType: resource,
}
}
type k8sPods struct {
Items []corev1.Pod `json:"items"`
}
type namespaces struct {
Items []corev1.Namespace `json:"items"`
}
// DumpCluster create dump of a cluster in Dumper.location
func (d *Dumper) DumpCluster() error {
file, err := os.Create(d.location + ".tar.gz")
if err != nil {
return errors.Wrap(err, "create tar file")
}
zr := gzip.NewWriter(file)
tw := tar.NewWriter(zr)
defer func() {
err = addToArchive(d.location+"/errors.txt", d.mode, []byte(d.errors), tw)
if err != nil {
log.Println("Error: add errors.txt to archive:", err)
}
err = tw.Close()
if err != nil {
log.Println("close tar writer", err)
return
}
err = zr.Close()
if err != nil {
log.Println("close gzip writer", err)
return
}
err = file.Close()
if err != nil {
log.Println("close file", err)
return
}
}()
var nss namespaces
if len(d.namespace) > 0 {
ns := corev1.Namespace{}
ns.Name = d.namespace
nss.Items = append(nss.Items, ns)
} else {
args := []string{"get", "namespaces", "-o", "json"}
output, err := d.runCmd(args...)
if err != nil {
d.logError(err.Error(), args...)
return errors.Wrap(err, "get namespaces")
}
err = json.Unmarshal(output, &nss)
if err != nil {
d.logError(err.Error(), "unmarshal namespaces")
return errors.Wrap(err, "unmarshal namespaces")
}
}
for _, ns := range nss.Items {
args := []string{"get", "pods", "-o", "json", "--namespace", ns.Name}
output, err := d.runCmd(args...)
if err != nil {
d.logError(err.Error(), args...)
continue
}
var pods k8sPods
err = json.Unmarshal(output, &pods)
if err != nil {
d.logError(err.Error(), "unmarshal pods from namespace", ns.Name)
log.Printf("Error: unmarshal pods in namespace %s: %v", ns.Name, err)
}
for _, pod := range pods.Items {
location := filepath.Join(d.location, ns.Name, pod.Name, "logs.txt")
args := []string{"logs", pod.Name, "--namespace", ns.Name, "--all-containers"}
output, err = d.runCmd(args...)
if err != nil {
d.logError(err.Error(), args...)
err = addToArchive(location, d.mode, []byte(err.Error()), tw)
if err != nil {
log.Printf("Error: create archive with logs for pod %s in namespace %s: %v", pod.Name, ns.Name, err)
}
continue
}
err = addToArchive(location, d.mode, output, tw)
if err != nil {
d.logError(err.Error(), "create archive for pod "+pod.Name)
log.Printf("Error: create archive for pod %s: %v", pod.Name, err)
}
if len(pod.Labels) == 0 {
continue
}
location = filepath.Join(d.location, ns.Name, pod.Name, "/pt-summary.txt")
component := d.crType
if d.crType == "psmdb" {
component = "mongod"
}
if pod.Labels["app.kubernetes.io/component"] == component {
output, err = d.getPodSummary(d.crType, pod.Name, pod.Labels["app.kubernetes.io/instance"], tw)
if err != nil {
d.logError(err.Error(), d.crType, pod.Name)
err = addToArchive(location, d.mode, []byte(err.Error()), tw)
if err != nil {
log.Printf("Error: create pt-summary errors archive for pod %s in namespace %s: %v", pod.Name, ns.Name, err)
}
continue
}
err = addToArchive(location, d.mode, output, tw)
if err != nil {
d.logError(err.Error(), "create pt-summary archive for pod "+pod.Name)
log.Printf("Error: create pt-summary archive for pod %s: %v", pod.Name, err)
}
}
}
for _, resource := range d.resources {
err = d.getResource(resource, ns.Name, tw)
if err != nil {
log.Printf("Error: get %s resource: %v", resource, err)
}
}
}
err = d.getResource("nodes", "", tw)
if err != nil {
return errors.Wrapf(err, "get nodes")
}
return nil
}
// runCmd run command (Dumper.cmd) with given args, return it output
func (d *Dumper) runCmd(args ...string) ([]byte, error) {
var outb, errb bytes.Buffer
cmd := exec.Command(d.cmd, args...)
cmd.Stdout = &outb
cmd.Stderr = &errb
err := cmd.Run()
if err != nil || errb.Len() > 0 {
return nil, errors.Errorf("error: %v, stderr: %s, stdout: %s", err, errb.String(), outb.String())
}
return outb.Bytes(), nil
}
func (d *Dumper) getResource(name, namespace string, tw *tar.Writer) error {
location := d.location
args := []string{"get", name, "-o", "yaml"}
if len(namespace) > 0 {
args = append(args, "--namespace", namespace)
location = filepath.Join(d.location, namespace)
}
location = filepath.Join(location, name+".yaml")
output, err := d.runCmd(args...)
if err != nil {
d.logError(err.Error(), args...)
log.Printf("Error: get resource %s in namespace %s: %v", name, namespace, err)
return addToArchive(location, d.mode, []byte(err.Error()), tw)
}
return addToArchive(location, d.mode, output, tw)
}
func (d *Dumper) logError(err string, args ...string) {
d.errors += d.cmd + " " + strings.Join(args, " ") + ": " + err + "\n"
}
func addToArchive(location string, mode int64, content []byte, tw *tar.Writer) error {
hdr := &tar.Header{
Name: location,
Mode: mode,
Size: int64(len(content)),
}
if err := tw.WriteHeader(hdr); err != nil {
return errors.Wrapf(err, "write header to %s", location)
}
if _, err := tw.Write(content); err != nil {
return errors.Wrapf(err, "write content to %s", location)
}
return nil
}
type crSecrets struct {
Spec struct {
SecretName string `json:"secretsName,omitempty"`
Secrets struct {
Users string `json:"users,omitempty"`
} `json:"secrets,omitempty"`
} `json:"spec"`
}
func (d *Dumper) getPodSummary(resource, podName, crName string, tw *tar.Writer) ([]byte, error) {
var (
summCmdName string
ports string
summCmdArgs []string
)
switch resource {
case "pxc":
cr, err := d.getCR("pxc/" + crName)
if err != nil {
return nil, errors.Wrap(err, "get cr")
}
pass, err := d.getDataFromSecret(cr.Spec.SecretName, "root")
if err != nil {
return nil, errors.Wrap(err, "get password from pxc users secret")
}
ports = "3306:3306"
summCmdName = "pt-mysql-summary"
summCmdArgs = []string{"--host=127.0.0.1", "--port=3306", "--user=root", "--password=" + string(pass)}
case "psmdb":
cr, err := d.getCR("psmdb/" + crName)
if err != nil {
return nil, errors.Wrap(err, "get cr")
}
pass, err := d.getDataFromSecret(cr.Spec.Secrets.Users, "MONGODB_CLUSTER_ADMIN_PASSWORD")
if err != nil {
return nil, errors.Wrap(err, "get password from psmdb users secret")
}
ports = "27017:27017"
summCmdName = "pt-mongodb-summary"
summCmdArgs = []string{"--username=clusterAdmin", "--password=" + pass, "--authenticationDatabase=admin", "127.0.0.1:27017"}
}
cmdPortFwd := exec.Command(d.cmd, "port-forward", "pod/"+podName, ports)
go func() {
err := cmdPortFwd.Run()
if err != nil {
d.logError(err.Error(), "port-forward")
}
}()
defer func() {
err := cmdPortFwd.Process.Kill()
if err != nil {
d.logError(err.Error(), "kill port-forward")
}
}()
time.Sleep(3 * time.Second) // wait for port-forward command
var outb, errb bytes.Buffer
cmd := exec.Command(summCmdName, summCmdArgs...)
cmd.Stdout = &outb
cmd.Stderr = &errb
err := cmd.Run()
if err != nil {
return nil, errors.Errorf("error: %v, stderr: %s, stdout: %s", err, errb.String(), outb.String())
}
return []byte(fmt.Sprintf("stderr: %s, stdout: %s", errb.String(), outb.String())), nil
}
func (d *Dumper) getCR(crName string) (crSecrets, error) {
var cr crSecrets
output, err := d.runCmd("get", crName, "-o", "json")
if err != nil {
return cr, errors.Wrap(err, "get "+crName)
}
err = json.Unmarshal(output, &cr)
if err != nil {
return cr, errors.Wrap(err, "unmarshal psmdb cr")
}
return cr, nil
}
func (d *Dumper) getDataFromSecret(secretName, dataName string) (string, error) {
passEncoded, err := d.runCmd("get", "secrets/"+secretName, "--template={{.data."+dataName+"}}")
if err != nil {
return "", errors.Wrap(err, "run get secret cmd")
}
pass, err := base64.StdEncoding.DecodeString(string(passEncoded))
if err != nil {
return "", errors.Wrap(err, "decode data")
}
return string(pass), nil
}

View File

@@ -1,35 +0,0 @@
package main
import (
"flag"
"log"
"os"
"github.com/percona/percona-toolkit/src/go/pt-k8s-debug-collector/dumper"
)
func main() {
namespace := ""
resource := ""
clusterName := ""
flag.StringVar(&namespace, "namespace", "", "Namespace for collecting data. If empty data will be collected from all namespaces")
flag.StringVar(&resource, "resource", "pxc", "Resource name. Default value - 'pxc'")
flag.StringVar(&clusterName, "cluster", "", "Cluster name")
flag.Parse()
if len(clusterName) > 0 {
resource += "/" + clusterName
}
d := dumper.New("", namespace, resource)
log.Println("Start collecting cluster data")
err := d.DumpCluster()
if err != nil {
log.Println("Error:", err)
os.Exit(1)
}
log.Println("Done")
}

View File

@@ -30,13 +30,12 @@ import (
const (
TOOLNAME = "pt-mongodb-summary"
DefaultAuthDB = "admin"
DefaultHost = "localhost:27017"
DefaultLogLevel = "warn"
DefaultRunningOpsInterval = 1000 // milliseconds
DefaultRunningOpsSamples = 5
DefaultOutputFormat = "text"
typeMongos = "mongos"
DEFAULT_AUTHDB = "admin"
DEFAULT_HOST = "localhost:27017"
DEFAULT_LOGLEVEL = "warn"
DEFAULT_RUNNINGOPSINTERVAL = 1000 // milliseconds
DEFAULT_RUNNINGOPSSAMPLES = 5
DEFAULT_OUTPUT_FORMAT = "text"
)
var (
@@ -100,10 +99,10 @@ type security struct {
type databases struct {
Databases []struct {
Name string `bson:"name"`
// SizeOnDisk int64 `bson:"sizeOnDisk"`
// Empty bool `bson:"empty"`
// Shards map[string]int64 `bson:"shards"`
Name string `bson:"name"`
SizeOnDisk int64 `bson:"sizeOnDisk"`
Empty bool `bson:"empty"`
Shards map[string]int64 `bson:"shards"`
} `bson:"databases"`
TotalSize int64 `bson:"totalSize"`
TotalSizeMb int64 `bson:"totalSizeMb"`
@@ -228,7 +227,7 @@ func main() {
ci := &collectedInfo{}
ci.HostInfo, err = getHostinfo(session)
ci.HostInfo, err = GetHostinfo(session)
if err != nil {
message := fmt.Sprintf("Cannot get host info for %q: %s", di.Addrs[0], err.Error())
log.Errorf(message)
@@ -242,18 +241,13 @@ func main() {
log.Debugf("replicaMembers:\n%+v\n", ci.ReplicaMembers)
if opts.RunningOpsSamples > 0 && opts.RunningOpsInterval > 0 {
ci.RunningOps, err = getOpCountersStats(
session,
opts.RunningOpsSamples,
time.Duration(opts.RunningOpsInterval)*time.Millisecond,
)
if err != nil {
if ci.RunningOps, err = GetOpCountersStats(session, opts.RunningOpsSamples, time.Duration(opts.RunningOpsInterval)*time.Millisecond); err != nil {
log.Printf("[Error] cannot get Opcounters stats: %v\n", err)
}
}
if ci.HostInfo != nil {
if ci.SecuritySettings, err = getSecuritySettings(session, ci.HostInfo.Version); err != nil {
if ci.SecuritySettings, err = GetSecuritySettings(session, ci.HostInfo.Version); err != nil {
log.Errorf("[Error] cannot get security settings: %v\n", err)
}
} else {
@@ -261,7 +255,7 @@ func main() {
}
if ci.OplogInfo, err = oplog.GetOplogInfo(hostnames, di); err != nil {
log.Infof("Cannot get Oplog info: %s\n", err)
log.Info("Cannot get Oplog info: %v\n", err)
} else {
if len(ci.OplogInfo) == 0 {
log.Info("oplog info is empty. Skipping")
@@ -271,13 +265,13 @@ func main() {
}
// individual servers won't know about this info
if ci.HostInfo.NodeType == typeMongos {
if ci.ClusterWideInfo, err = getClusterwideInfo(session); err != nil {
if ci.HostInfo.NodeType == "mongos" {
if ci.ClusterWideInfo, err = GetClusterwideInfo(session); err != nil {
log.Printf("[Error] cannot get cluster wide info: %v\n", err)
}
}
if ci.HostInfo.NodeType == typeMongos {
if ci.HostInfo.NodeType == "mongos" {
if ci.BalancerStats, err = GetBalancerStats(session); err != nil {
log.Printf("[Error] cannot get balancer stats: %v\n", err)
}
@@ -332,7 +326,7 @@ func formatResults(ci *collectedInfo, format string) ([]byte, error) {
return buf.Bytes(), nil
}
func getHostinfo(session pmgo.SessionManager) (*hostInfo, error) {
func GetHostinfo(session pmgo.SessionManager) (*hostInfo, error) {
hi := proto.HostInfo{}
if err := session.Run(bson.M{"hostInfo": 1}, &hi); err != nil {
@@ -363,7 +357,7 @@ func getHostinfo(session pmgo.SessionManager) (*hostInfo, error) {
Hostname: hi.System.Hostname,
HostOsType: hi.Os.Type,
HostSystemCPUArch: hi.System.CpuArch,
DBPath: "", // Sets default. It will be overridden later if necessary
DBPath: "", // Sets default. It will be overriden later if necessary
ProcessName: ss.Process,
ProcProcessCount: procCount,
@@ -396,19 +390,19 @@ func countMongodProcesses() (int, error) {
if err != nil {
continue
}
if name, _ := p.Name(); name == "mongod" || name == typeMongos {
if name, _ := p.Name(); name == "mongod" || name == "mongos" {
count++
}
}
return count, nil
}
func getClusterwideInfo(session pmgo.SessionManager) (*clusterwideInfo, error) {
func GetClusterwideInfo(session pmgo.SessionManager) (*clusterwideInfo, error) {
var databases databases
err := session.Run(bson.M{"listDatabases": 1}, &databases)
if err != nil {
return nil, errors.Wrap(err, "getClusterwideInfo.listDatabases ")
return nil, errors.Wrap(err, "GetClusterwideInfo.listDatabases ")
}
cwi := &clusterwideInfo{
@@ -462,7 +456,7 @@ func sizeAndUnit(size int64) (float64, string) {
return newSize, unit[idx]
}
func getSecuritySettings(session pmgo.SessionManager, ver string) (*security, error) {
func GetSecuritySettings(session pmgo.SessionManager, ver string) (*security, error) {
s := security{
Auth: "disabled",
SSL: "disabled",
@@ -504,22 +498,13 @@ func getSecuritySettings(session pmgo.SessionManager, ver string) (*security, er
isPrivate, err := isPrivateNetwork(strings.TrimSpace(ip))
if !isPrivate && err == nil {
if s.Auth == "enabled" {
s.WarningMsgs = append(
s.WarningMsgs,
fmt.Sprintf("Warning: You might be insecure (bind ip %s is public)", ip),
)
s.WarningMsgs = append(s.WarningMsgs, fmt.Sprintf("Warning: You might be insecure (bind ip %s is public)", ip))
} else {
s.WarningMsgs = append(
s.WarningMsgs,
fmt.Sprintf("Error. You are insecure: bind ip %s is public and auth is disabled", ip),
)
s.WarningMsgs = append(s.WarningMsgs, fmt.Sprintf("Error. You are insecure: bind ip %s is public and auth is disabled", ip))
}
} else {
if ip != "127.0.0.1" && ip != extIP {
s.WarningMsgs = append(
s.WarningMsgs,
fmt.Sprintf("WARNING: You might be insecure. IP binding %s is not localhost", ip),
)
s.WarningMsgs = append(s.WarningMsgs, fmt.Sprintf("WARNING: You might be insecure. IP binding %s is not localhost", ip))
}
}
}
@@ -568,12 +553,12 @@ func getNodeType(session pmgo.SessionManager) (string, error) {
} else if md.Msg == "isdbgrid" {
// isdbgrid is always the msg value when calling isMaster on a mongos
// see http://docs.mongodb.org/manual/core/sharded-cluster-query-router/
return typeMongos, nil
return "mongos", nil
}
return "mongod", nil
}
func getOpCountersStats(session pmgo.SessionManager, count int, sleep time.Duration) (*opCounters, error) {
func GetOpCountersStats(session pmgo.SessionManager, count int, sleep time.Duration) (*opCounters, error) {
oc := &opCounters{}
prevOpCount := &opCounters{}
ss := proto.ServerStatus{}
@@ -709,7 +694,7 @@ func getProcInfo(pid int32, templateData *procInfo) error {
//proc, err := process.NewProcess(templateData.ServerStatus.Pid)
proc, err := process.NewProcess(pid)
if err != nil {
return fmt.Errorf("cannot get process %d", pid)
return fmt.Errorf("cannot get process %d\n", pid)
}
ct, err := proc.CreateTime()
if err != nil {
@@ -865,12 +850,12 @@ func externalIP() (string, error) {
func parseFlags() (*options, error) {
opts := &options{
Host: DefaultHost,
LogLevel: DefaultLogLevel,
RunningOpsSamples: DefaultRunningOpsSamples,
RunningOpsInterval: DefaultRunningOpsInterval, // milliseconds
AuthDB: DefaultAuthDB,
OutputFormat: DefaultOutputFormat,
Host: DEFAULT_HOST,
LogLevel: DEFAULT_LOGLEVEL,
RunningOpsSamples: DEFAULT_RUNNINGOPSSAMPLES,
RunningOpsInterval: DEFAULT_RUNNINGOPSINTERVAL, // milliseconds
AuthDB: DEFAULT_AUTHDB,
OutputFormat: DEFAULT_OUTPUT_FORMAT,
}
gop := getopt.New()
@@ -879,22 +864,17 @@ func parseFlags() (*options, error) {
gop.BoolVarLong(&opts.NoVersionCheck, "no-version-check", 'c', "", "Default: Don't check for updates")
gop.StringVarLong(&opts.User, "username", 'u', "", "Username to use for optional MongoDB authentication")
gop.StringVarLong(&opts.Password, "password", 'p', "", "Password to use for optional MongoDB authentication").
SetOptional()
gop.StringVarLong(&opts.Password, "password", 'p', "", "Password to use for optional MongoDB authentication").SetOptional()
gop.StringVarLong(&opts.AuthDB, "authenticationDatabase", 'a', "admin",
"Database to use for optional MongoDB authentication. Default: admin")
gop.StringVarLong(&opts.LogLevel, "log-level", 'l', "error",
"Log level: panic, fatal, error, warn, info, debug. Default: error")
gop.StringVarLong(&opts.LogLevel, "log-level", 'l', "error", "Log level: panic, fatal, error, warn, info, debug. Default: error")
gop.StringVarLong(&opts.OutputFormat, "output-format", 'f', "text", "Output format: text, json. Default: text")
gop.IntVarLong(&opts.RunningOpsSamples, "running-ops-samples", 's',
fmt.Sprintf("Number of samples to collect for running ops. Default: %d", opts.RunningOpsSamples),
)
fmt.Sprintf("Number of samples to collect for running ops. Default: %d", opts.RunningOpsSamples))
gop.IntVarLong(&opts.RunningOpsInterval, "running-ops-interval", 'i',
fmt.Sprintf("Interval to wait betwwen running ops samples in milliseconds. Default %d milliseconds",
opts.RunningOpsInterval),
)
fmt.Sprintf("Interval to wait betwwen running ops samples in milliseconds. Default %d milliseconds", opts.RunningOpsInterval))
gop.StringVarLong(&opts.SSLCAFile, "sslCAFile", 0, "SSL CA cert file used for authentication")
gop.StringVarLong(&opts.SSLPEMKeyFile, "sslPEMKeyFile", 0, "SSL client PEM file used for authentication")

View File

@@ -29,51 +29,32 @@ func TestGetOpCounterStats(t *testing.T) {
database := pmgomock.NewMockDatabaseManager(ctrl)
ss := proto.ServerStatus{}
if err := tutil.LoadJson("test/sample/serverstatus.json", &ss); err != nil {
t.Fatalf("Cannot load sample file: %s", err)
}
tutil.LoadJson("test/sample/serverstatus.json", &ss)
session.EXPECT().DB("admin").Return(database)
database.EXPECT().Run(bson.D{
{Name: "serverStatus", Value: 1},
{Name: "recordStats", Value: 1},
}, gomock.Any()).SetArg(1, ss)
database.EXPECT().Run(bson.D{{"serverStatus", 1}, {"recordStats", 1}}, gomock.Any()).SetArg(1, ss)
session.EXPECT().DB("admin").Return(database)
database.EXPECT().Run(bson.D{
{Name: "serverStatus", Value: 1},
{Name: "recordStats", Value: 1},
}, gomock.Any()).SetArg(1, ss)
database.EXPECT().Run(bson.D{{"serverStatus", 1}, {"recordStats", 1}}, gomock.Any()).SetArg(1, ss)
session.EXPECT().DB("admin").Return(database)
database.EXPECT().Run(bson.D{
{Name: "serverStatus", Value: 1},
{Name: "recordStats", Value: 1},
}, gomock.Any()).SetArg(1, ss)
database.EXPECT().Run(bson.D{{"serverStatus", 1}, {"recordStats", 1}}, gomock.Any()).SetArg(1, ss)
session.EXPECT().DB("admin").Return(database)
database.EXPECT().Run(bson.D{
{Name: "serverStatus", Value: 1},
{Name: "recordStats", Value: 1},
}, gomock.Any()).SetArg(1, ss)
database.EXPECT().Run(bson.D{{"serverStatus", 1}, {"recordStats", 1}}, gomock.Any()).SetArg(1, ss)
session.EXPECT().DB("admin").Return(database)
database.EXPECT().Run(bson.D{
{Name: "serverStatus", Value: 1},
{Name: "recordStats", Value: 1},
}, gomock.Any()).SetArg(1, ss)
database.EXPECT().Run(bson.D{{"serverStatus", 1}, {"recordStats", 1}}, gomock.Any()).SetArg(1, ss)
ss = addToCounters(ss, 1)
session.EXPECT().DB("admin").Return(database)
database.EXPECT().Run(bson.D{
{Name: "serverStatus", Value: 1}, {Name: "recordStats", Value: 1},
}, gomock.Any()).SetArg(1, ss)
database.EXPECT().Run(bson.D{{"serverStatus", 1}, {"recordStats", 1}}, gomock.Any()).SetArg(1, ss)
sampleCount := 5
sampleRate := 10 * time.Millisecond // in seconds
var sampleCount int = 5
var sampleRate time.Duration = 10 * time.Millisecond // in seconds
expect := TimedStats{Min: 0, Max: 0, Total: 0, Avg: 0}
os, err := getOpCountersStats(session, sampleCount, sampleRate)
os, err := GetOpCountersStats(session, sampleCount, sampleRate)
if err != nil {
t.Error(err)
}
@@ -86,7 +67,7 @@ func TestGetOpCounterStats(t *testing.T) {
func TestSecurityOpts(t *testing.T) {
cmdopts := []proto.CommandLineOptions{
// 1
{
proto.CommandLineOptions{
Parsed: proto.Parsed{
Net: proto.Net{
SSL: proto.SSL{
@@ -100,7 +81,7 @@ func TestSecurityOpts(t *testing.T) {
},
},
// 2
{
proto.CommandLineOptions{
Parsed: proto.Parsed{
Net: proto.Net{
SSL: proto.SSL{
@@ -114,7 +95,7 @@ func TestSecurityOpts(t *testing.T) {
},
},
// 3
{
proto.CommandLineOptions{
Parsed: proto.Parsed{
Net: proto.Net{
SSL: proto.SSL{
@@ -128,7 +109,7 @@ func TestSecurityOpts(t *testing.T) {
},
},
// 4
{
proto.CommandLineOptions{
Parsed: proto.Parsed{
Net: proto.Net{
SSL: proto.SSL{
@@ -142,7 +123,7 @@ func TestSecurityOpts(t *testing.T) {
},
},
// 5
{
proto.CommandLineOptions{
Parsed: proto.Parsed{
Net: proto.Net{
SSL: proto.SSL{
@@ -162,7 +143,7 @@ func TestSecurityOpts(t *testing.T) {
expect := []*security{
// 1
{
&security{
Users: 1,
Roles: 2,
Auth: "disabled",
@@ -172,7 +153,7 @@ func TestSecurityOpts(t *testing.T) {
WarningMsgs: nil,
},
// 2
{
&security{
Users: 1,
Roles: 2,
Auth: "enabled",
@@ -181,7 +162,7 @@ func TestSecurityOpts(t *testing.T) {
WarningMsgs: nil,
},
// 3
{
&security{
Users: 1,
Roles: 2,
Auth: "enabled",
@@ -191,7 +172,7 @@ func TestSecurityOpts(t *testing.T) {
WarningMsgs: nil,
},
// 4
{
&security{
Users: 1,
Roles: 2,
Auth: "disabled",
@@ -201,7 +182,7 @@ func TestSecurityOpts(t *testing.T) {
WarningMsgs: nil,
},
// 5
{
&security{
Users: 1,
Roles: 2,
Auth: "enabled",
@@ -223,9 +204,7 @@ func TestSecurityOpts(t *testing.T) {
for i, cmd := range cmdopts {
session.EXPECT().DB("admin").Return(database)
database.EXPECT().Run(bson.D{
{Name: "getCmdLineOpts", Value: 1}, {Name: "recordStats", Value: 1},
}, gomock.Any()).SetArg(1, cmd)
database.EXPECT().Run(bson.D{{"getCmdLineOpts", 1}, {"recordStats", 1}}, gomock.Any()).SetArg(1, cmd)
session.EXPECT().Clone().Return(session)
session.EXPECT().SetMode(mgo.Strong, true)
@@ -239,7 +218,7 @@ func TestSecurityOpts(t *testing.T) {
rolesCol.EXPECT().Count().Return(2, nil)
session.EXPECT().Close().Return()
got, err := getSecuritySettings(session, "3.2")
got, err := GetSecuritySettings(session, "3.2")
if err != nil {
t.Errorf("cannot get sec settings: %v", err)
@@ -347,9 +326,7 @@ func TestGetChunks(t *testing.T) {
col := pmgomock.NewMockCollectionManager(ctrl)
var res []proto.ChunksByCollection
if err := tutil.LoadJson("test/sample/chunks.json", &res); err != nil {
t.Errorf("Cannot load samples file: %s", err)
}
tutil.LoadJson("test/sample/chunks.json", &res)
pipe.EXPECT().All(gomock.Any()).SetArg(0, res)
@@ -379,12 +356,10 @@ func TestIntegrationGetChunks(t *testing.T) {
server.SetPath(tempDir)
session := pmgo.NewSessionManager(server.Session())
if err := session.DB("config").C("chunks").Insert(bson.M{"ns": "samples.col1", "count": 2}); err != nil {
t.Errorf("Cannot insert sample data: %s", err)
}
session.DB("config").C("chunks").Insert(bson.M{"ns": "samples.col1", "count": 2})
want := []proto.ChunksByCollection{
{
proto.ChunksByCollection{
ID: "samples.col1",
Count: 1,
},
@@ -398,9 +373,7 @@ func TestIntegrationGetChunks(t *testing.T) {
t.Errorf("Invalid integration chunks count.\ngot: %+v\nwant: %+v", got, want)
}
if err := server.Session().DB("config").DropDatabase(); err != nil {
t.Logf("Cannot drop config database (cleanup): %s", err)
}
server.Session().DB("config").DropDatabase()
server.Session().Close()
server.Stop()
@@ -424,11 +397,11 @@ func TestParseArgs(t *testing.T) {
{
args: []string{TOOLNAME}, // arg[0] is the command itself
want: &options{
Host: DefaultHost,
LogLevel: DefaultLogLevel,
AuthDB: DefaultAuthDB,
RunningOpsSamples: DefaultRunningOpsSamples,
RunningOpsInterval: DefaultRunningOpsInterval,
Host: DEFAULT_HOST,
LogLevel: DEFAULT_LOGLEVEL,
AuthDB: DEFAULT_AUTHDB,
RunningOpsSamples: DEFAULT_RUNNINGOPSSAMPLES,
RunningOpsInterval: DEFAULT_RUNNINGOPSINTERVAL,
OutputFormat: "text",
},
},

View File

@@ -28,7 +28,6 @@
"Max": 1061230,
"Avg": 1061230,
"Pct95": 1061230,
"Pct99": 1061230,
"StdDev": 0,
"Median": 1061230
},
@@ -39,7 +38,6 @@
"Max": 75,
"Avg": 75,
"Pct95": 75,
"Pct99": 75,
"StdDev": 0,
"Median": 75
},
@@ -50,7 +48,6 @@
"Max": 75,
"Avg": 75,
"Pct95": 75,
"Pct99": 75,
"StdDev": 0,
"Median": 75
}
@@ -74,7 +71,6 @@
"Max": 7,
"Avg": 7,
"Pct95": 7,
"Pct99": 7,
"StdDev": 0,
"Median": 7
},
@@ -85,7 +81,6 @@
"Max": 215,
"Avg": 215,
"Pct95": 215,
"Pct99": 215,
"StdDev": 0,
"Median": 215
},
@@ -106,7 +101,6 @@
"Max": 10000,
"Avg": 10000,
"Pct95": 10000,
"Pct99": 10000,
"StdDev": 0,
"Median": 10000
}
@@ -140,7 +134,6 @@
"Max": 215,
"Avg": 215,
"Pct95": 215,
"Pct99": 215,
"StdDev": 0,
"Median": 215
},
@@ -165,4 +158,4 @@
"Median": 0
}
}
]
]

View File

@@ -17,7 +17,6 @@
"Max": 7,
"Avg": 2.3333333333333335,
"Pct95": 7,
"Pct99": 7,
"StdDev": 3.2998316455372216,
"Median": 0
},
@@ -28,7 +27,6 @@
"Max": 1061230,
"Avg": 353886.6666666667,
"Pct95": 1061230,
"Pct99": 1061230,
"StdDev": 500167.26762709644,
"Median": 215
},
@@ -39,7 +37,6 @@
"Max": 75,
"Avg": 25,
"Pct95": 75,
"Pct99": 75,
"StdDev": 35.35533905932738,
"Median": 0
},
@@ -50,8 +47,7 @@
"Max": 10000,
"Avg": 3358.3333333333335,
"Pct95": 10000,
"Pct99": 10000,
"StdDev": 4696.46734850308,
"Median": 75
}
}
}

View File

@@ -1,113 +0,0 @@
#!/usr/bin/env perl
BEGIN {
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
};
use strict;
use warnings FATAL => 'all';
use English qw(-no_match_vars);
use Test::More;
use PerconaTest;
use Sandbox;
require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $master_dbh = $sb->get_dbh_for('master');
my $slave_dbh = $sb->get_dbh_for('slave1');
if ( !$master_dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
}
elsif ( !$slave_dbh ) {
plan skip_all => 'Cannot connect to sandbox slave';
} elsif ($sandbox_version lt '5.7') {
plan skip_all => 'Only on MySQL 5.7+';
} else {
plan tests => 5;
}
my ($master1_dbh, $master1_dsn) = $sb->start_sandbox(
server => 'chan_master1',
type => 'master',
);
my ($master2_dbh, $master2_dsn) = $sb->start_sandbox(
server => 'chan_master2',
type => 'master',
);
my ($slave1_dbh, $slave1_dsn) = $sb->start_sandbox(
server => 'chan_slave1',
type => 'master',
);
my $slave1_port = $sb->port_for('chan_slave1');
$sb->load_file('chan_master1', "sandbox/gtid_on.sql", undef, no_wait => 1);
$sb->load_file('chan_master2', "sandbox/gtid_on.sql", undef, no_wait => 1);
$sb->load_file('chan_slave1', "sandbox/slave_channels.sql", undef, no_wait => 1);
my $master1_port = $sb->port_for('chan_master1');
my $num_rows = 40000;
# Load some rows into masters 1 & 2.
$sb->load_file('chan_master1', "t/pt-archiver/samples/channels.sql", undef, no_wait => 1);
diag("Loading $num_rows into the test.t1 table on first master. This might take some time.");
diag(`util/mysql_random_data_load --host=127.0.0.1 --port=$master1_port --user=msandbox --password=msandbox test t1 $num_rows`);
diag("$num_rows rows loaded. Starting tests.");
$master_dbh->do("FLUSH TABLES");
my $rows = $master1_dbh->selectrow_arrayref('SELECT COUNT(*) FROM test.t1 ');
is(
@$rows[0],
$num_rows,
"All rows were loaded into master 1",
);
my @args = ('--source', $master1_dsn.',D=test,t=t1', '--purge', '--where', sprintf('id >= %d', $num_rows / 2), '--check-slave-lag', $slave1_dsn);
my ($exit_status, $output);
$output = output(
sub { $exit_status = pt_archiver::main(@args) },
stderr => 1,
);
isnt(
$exit_status,
0,
'Must specify a channel name',
);
like (
$output,
qr/"channel" was not specified/,
'Message saying channel name must be specified'
);
push @args, ('--channel', 'masterchan1');
output(
sub { $exit_status = pt_archiver::main(@args, '--channel', 'masterchan1') },
stderr => 1,
);
is(
$exit_status,
0,
'Ok if channel name was specified',
);
$sb->stop_sandbox(qw(chan_master1 chan_master2 chan_slave1));
# #############################################################################
# Done.
# #############################################################################
$sb->wipe_clean($master_dbh);
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
exit;

View File

@@ -38,7 +38,6 @@ my $cmd = "perl -I $trunk/t/pt-archiver/samples $trunk/bin/pt-archiver";
$sb->load_file('master', "t/pt-archiver/samples/delete_more.sql");
$dbh->do('use dm');
#1
is_deeply(
$dbh->selectall_arrayref('select * from `main_table-123` order by id'),
[
@@ -50,7 +49,7 @@ is_deeply(
],
'main_table-123 data before archiving'
);
#2
is_deeply(
$dbh->selectall_arrayref('select * from `other_table-123` order by id'),
[
@@ -68,10 +67,6 @@ is_deeply(
`$cmd --purge --primary-key-only --source F=$cnf,D=dm,t=main_table-123,i=pub_date,b=1,m=delete_more --where "pub_date < '2010-02-16'" --bulk-delete --limit 2`;
# PT-157 has changed the program behavior when using --primary-key-only even if an index was
# specified in the DSN
#3
is_deeply(
$dbh->selectall_arrayref('select * from `main_table-123` order by id'),
[
@@ -79,19 +74,17 @@ is_deeply(
# [2, '2010-02-15', 'b'],
# [3, '2010-02-15', 'c'],
[4, '2010-02-16', 'd'],
[5, '2010-02-14', 'e'],
# [5, '2010-02-14', 'e'],
],
'main_table-123 data after archiving (limit 2)'
);
#4
is_deeply(
$dbh->selectall_arrayref('select * from `other_table-123` order by id'),
[
[1, 'a'],
[4, 'd'],
[5, 'e'],
[6, 'ot1']
[6, 'ot1'],
],
'other_table-123 data after archiving (limit 2)'
);
@@ -99,7 +92,6 @@ is_deeply(
SKIP: {
skip 'Cannot connect to slave sandbox', 6 unless $slave_dbh;
$slave_dbh->do('use dm');
#5
is_deeply(
$slave_dbh->selectall_arrayref('select * from `main_table-123` order by id'),
[
@@ -111,7 +103,7 @@ SKIP: {
],
'Slave main_table-123 not changed'
);
#6
is_deeply(
$slave_dbh->selectall_arrayref('select * from `other_table-123` order by id'),
[
@@ -130,7 +122,6 @@ SKIP: {
# Run it again without DSN b so changes should be made on slave.
$sb->load_file('master', "t/pt-archiver/samples/delete_more.sql");
#7
is_deeply(
$slave_dbh->selectall_arrayref('select * from `main_table-123` order by id'),
[
@@ -142,7 +133,7 @@ SKIP: {
],
'Reset slave main_table-123'
);
#8
is_deeply(
$slave_dbh->selectall_arrayref('select * from `other_table-123` order by id'),
[
@@ -160,7 +151,7 @@ SKIP: {
`$cmd --purge --primary-key-only --source F=$cnf,D=dm,t=main_table-123,i=pub_date,m=delete_more --where "pub_date < '2010-02-16'" --bulk-delete --limit 2`;
sleep 1;
#9
is_deeply(
$slave_dbh->selectall_arrayref('select * from `main_table-123` order by id'),
[
@@ -168,18 +159,16 @@ SKIP: {
# [2, '2010-02-15', 'b'],
# [3, '2010-02-15', 'c'],
[4, '2010-02-16', 'd'],
[5, '2010-02-14', 'e'],
# [5, '2010-02-14', 'e'],
],
'Slave main_table-123 changed'
);
#10
is_deeply(
$slave_dbh->selectall_arrayref('select * from `other_table-123` order by id'),
[
[1, 'a'],
[4, 'd'],
[5, 'e'],
[6, 'ot1'],
],
'Slave other_table-123 changed'
@@ -191,7 +180,7 @@ SKIP: {
# ###########################################################################
$sb->load_file('master', "t/pt-archiver/samples/delete_more.sql");
$dbh->do('use dm');
#11
is_deeply(
$dbh->selectall_arrayref('select * from `main_table-123` order by id'),
[
@@ -203,7 +192,7 @@ is_deeply(
],
'main_table-123 data before archiving'
);
#12
is_deeply(
$dbh->selectall_arrayref('select * from `other_table-123` order by id'),
[
@@ -220,7 +209,7 @@ is_deeply(
);
`$cmd --purge --primary-key-only --source F=$cnf,D=dm,t=main_table-123,i=pub_date,b=1,m=delete_more --where "pub_date < '2010-02-16'" --bulk-delete --limit 100`;
#13
is_deeply(
$dbh->selectall_arrayref('select * from `main_table-123` order by id'),
[
@@ -228,17 +217,16 @@ is_deeply(
# [2, '2010-02-15', 'b'],
# [3, '2010-02-15', 'c'],
[4, '2010-02-16', 'd'],
[5, '2010-02-14', 'e'],
# [5, '2010-02-14', 'e'],
],
'main_table-123 data after archiving (limit 100)'
);
#14
is_deeply(
$dbh->selectall_arrayref('select * from `other_table-123` order by id'),
[
[1, 'a'],
[4, 'd'],
[5, 'e'],
[6, 'ot1'],
],
'other_table-123 data after archiving (limit 100)'
@@ -249,7 +237,7 @@ is_deeply(
# ###########################################################################
$sb->load_file('master', "t/pt-archiver/samples/delete_more.sql");
$dbh->do('use dm');
#15
is_deeply(
$dbh->selectall_arrayref('select * from `main_table-123` order by id'),
[
@@ -261,7 +249,7 @@ is_deeply(
],
'main_table-123 data before archiving'
);
#16
is_deeply(
$dbh->selectall_arrayref('select * from `other_table-123` order by id'),
[
@@ -279,7 +267,7 @@ is_deeply(
`$cmd --purge --primary-key-only --source F=$cnf,D=dm,t=main_table-123,i=pub_date,b=1,m=delete_more --where "pub_date < '2010-02-16'"`;
`$cmd --purge --primary-key-only --source F=$cnf,D=dm,t=main_table-123,i=pub_date,b=1,m=delete_more --where "pub_date < '2010-02-16'"`;
#17
is_deeply(
$dbh->selectall_arrayref('select * from `main_table-123` order by id'),
[
@@ -287,7 +275,7 @@ is_deeply(
# [2, '2010-02-15', 'b'],
# [3, '2010-02-15', 'c'],
[4, '2010-02-16', 'd'],
[5, '2010-02-14', 'e'],
# [5, '2010-02-14', 'e'],
],
'main_table-123 data after archiving (single delete)'
);
@@ -297,7 +285,6 @@ is_deeply(
[
[1, 'a'],
[4, 'd'],
[5, 'e'],
[6, 'ot1'],
],
'other_table-123 data after archiving (single delete)'

View File

@@ -23,7 +23,7 @@ if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
}
else {
plan tests => 8;
plan tests => 7;
}
my $output;
@@ -38,10 +38,8 @@ $sb->load_file('master', 't/pt-archiver/samples/table1.sql');
$output = output(
sub { pt_archiver::main(qw(--where 1=1), "--source", "D=test,t=table_1,F=$cnf", qw(--purge)) },
);
#1
is($output, '', 'Basic test run did not die');
$output = `/tmp/12345/use -N -e "select count(*) from test.table_1"`;
#2
is($output + 0, 0, 'Purged ok');
# Test basic functionality with --commit-each
@@ -49,10 +47,8 @@ $sb->load_file('master', 't/pt-archiver/samples/table1.sql');
$output = output(
sub { pt_archiver::main(qw(--where 1=1), "--source", "D=test,t=table_1,F=$cnf", qw(--commit-each --limit 1 --purge)) },
);
#3
is($output, '', 'Commit-each did not die');
$output = `/tmp/12345/use -N -e "select count(*) from test.table_1"`;
#4
is($output + 0, 0, 'Purged ok with --commit-each');
# Archive only part of the table
@@ -60,20 +56,10 @@ $sb->load_file('master', 't/pt-archiver/samples/table1.sql');
$output = output(
sub { pt_archiver::main(qw(--where 1=1), "--source", "D=test,t=table_1,F=$cnf", qw(--where a<4 --purge)) },
);
#5
is($output, '', 'No output for archiving only part of a table');
$output = `/tmp/12345/use -N -e "select count(*) from test.table_1"`;
#6
is($output + 0, 1, 'Purged some rows ok');
# Fail if --primary-key-only was specified and there is no PK in the table
$sb->load_file('master', 't/pt-archiver/samples/pt_157.sql');
$output = output(
sub { pt_archiver::main(qw(--where 1=1), "--source", "D=test,t=t1,F=$cnf", qw(--purge --primary-key-only)) },
stderr => 1,
);
like($output, qr/does not have a PRIMARY KEY/, 'Fail if --primary-key was specified and there is no PK');
# #############################################################################
# Done.
# #############################################################################

View File

@@ -1,6 +0,0 @@
DROP DATABASE IF EXISTS test;
CREATE DATABASE test;
CREATE TABLE test.t1 (
id INT PRIMARY KEY AUTO_INCREMENT,
f2 VARCHAR(25)
) Engine=InnoDB;

View File

@@ -1,20 +0,0 @@
DROP DATABASE IF EXISTS test;
CREATE DATABASE test;
CREATE TABLE `test`.`t1` (
f1 INT NOT NULL,
f2 VARCHAR(10)
) Engine=InnoDB;
INSERT INTO `test`.`t1` VALUES
(1, 'a'),
(2, 'b'),
(3, 'c'),
(4, 'd'),
(5, 'e'),
(6, 'f'),
(7, 'g'),
(8, 'h'),
(9, 'i'),
(10, 'j');

View File

@@ -1,105 +0,0 @@
#!/usr/bin/env perl
BEGIN {
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
};
use strict;
use warnings FATAL => 'all';
use English qw(-no_match_vars);
use Test::More;
use PerconaTest;
use Sandbox;
use SqlModes;
require "$trunk/bin/pt-table-checksum";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
}
else {
plan tests => 4;
}
diag("loading samples");
$sb->load_file('master', 't/pt-table-checksum/samples/pt-1616.sql');
my $num_rows = 50000;
diag("Loading $num_rows rows into the table. This might take some time.");
# diag(`util/mysql_random_data_load --host=127.0.0.1 --port=12345 --user=msandbox --password=msandbox junk pt_test_100 $num_rows`);
my $sql = "INSERT INTO junk.pt_test_100 (id1, id2) VALUES (?, ?)";
my $sth = $dbh->prepare($sql);
my @chars = ("A".."Z", "a".."z");
# Generate some random data haivng commas
for (my $i=0; $i < $num_rows; $i++) {
# Generate random strings having commas
my ($id1, $id2) = (",,,,", ",,,,");
$id1 .= $chars[rand @chars] for 1..10;
$id2 .= $chars[rand @chars] for 1..10;
$sth->execute($id1, $id2);
}
$sth->finish();
$dbh->do('INSERT INTO junk.pt_test_100 (id1, id2) VALUES(UNHEX("F96DD7"), UNHEX("F96DD7"))');
# The sandbox servers run with lock_wait_timeout=3 and it's not dynamic
# so we need to specify --set-vars innodb_lock_wait_timeout=3 else the tool will die.
my $master_dsn = $sb->dsn_for('master');
my @args = ($master_dsn, "--set-vars", "innodb_lock_wait_timeout=50",
"--ignore-databases", "mysql", "--no-check-binlog-format",
"--chunk-size", "1",
"--empty-replicate-table", "--run-time", "2s"
);
my $output;
my $exit_status;
$output = output(
sub { $exit_status = pt_table_checksum::main(@args) },
stderr => 1,
);
is(
$exit_status,
0,
"PT-1616 pt-table-cheksum before --resume with binary fields exit status",
);
@args = ("--set-vars", "innodb_lock_wait_timeout=50",
"--ignore-databases", "mysql", "--no-check-binlog-format",
"--chunk-size", "1",
"--resume", "--run-time", "5s", $master_dsn
);
$output = output(
sub { $exit_status = pt_table_checksum::main(@args) },
stderr => 1,
);
is(
$exit_status,
0,
"PT-1616 pt-table-cheksum --resume with binary fields exit status",
);
unlike(
$output,
qr/called with \d+ bind variables when \d+ are needed/,
"PT-1616 pt-table-cheksum --resume parameters binding error",
) or die($output);
# #############################################################################
# Done.
# #############################################################################
$sb->wipe_clean($dbh);
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
exit;

View File

@@ -1,90 +0,0 @@
#!/usr/bin/env perl
BEGIN {
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
};
use strict;
use warnings FATAL => 'all';
use English qw(-no_match_vars);
use Test::More;
use PerconaTest;
use Sandbox;
use SqlModes;
require "$trunk/bin/pt-table-checksum";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
diag ('Starting second sandbox master');
my ($master1_dbh, $master1_dsn) = $sb->start_sandbox(
server => 'chan_master1',
type => 'master',
);
diag ('Starting second sandbox slave 1');
my ($slave1_dbh, $slave1_dsn) = $sb->start_sandbox(
server => 'chan_slave1',
type => 'slave',
master => 'chan_master1',
);
diag ('Starting second sandbox slave 2');
my ($slave2_dbh, $slave2_dsn) = $sb->start_sandbox(
server => 'chan_slave2',
type => 'slave',
master => 'chan_master1',
);
my $dbh = $sb->get_dbh_for('master');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
}
else {
plan tests => 2;
}
diag("loading samples");
$sb->load_file('chan_master1', 't/pt-table-checksum/samples/pt-1637.sql');
my @args = ($master1_dsn,
"--set-vars", "innodb_lock_wait_timeout=50",
"--ignore-databases", "mysql", "--no-check-binlog-format",
"--recursion-method", "dsn=h=127.0.0.1,D=test,t=dsns",
"--run-time", "5", "--fail-on-stopped-replication",
);
# The sandbox servers run with lock_wait_timeout=3 and it's not dynamic
# so we need to specify --set-vars innodb_lock_wait_timeout=3 else the tool will die.
$sb->do_as_root("chan_slave1", 'stop slave IO_thread;');
my $output;
my $exit_status;
($output, $exit_status) = full_output(
sub { $exit_status = pt_table_checksum::main(@args) },
stderr => 1,
);
is(
$exit_status,
128,
"PT-1637 exist status 128 if replication is stopped and --fail-on-replication-stopped",
);
$sb->do_as_root("chan_slave1", 'start slave IO_thread;');
sleep(2);
$sb->stop_sandbox(qw(chan_master1 chan_slave2 chan_slave1));
# #############################################################################
# Done.
# #############################################################################
$sb->wipe_clean($dbh);
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
exit;

View File

@@ -1,30 +0,0 @@
DROP DATABASE IF EXISTS junk;
CREATE DATABASE junk;
CREATE TABLE junk.pt_test_100 (
id1 binary(16),
id2 binary(16),
PRIMARY KEY(id1, id2)
);
CREATE DATABASE IF NOT EXISTS percona;
CREATE TABLE IF NOT EXISTS percona.checksums (
db CHAR(64) NOT NULL,
tbl CHAR(64) NOT NULL,
chunk INT NOT NULL,
chunk_time FLOAT NULL,
chunk_index VARCHAR(200) NULL,
lower_boundary TEXT NULL,
upper_boundary TEXT NULL,
this_crc CHAR(40) NOT NULL,
this_cnt INT NOT NULL,
master_crc CHAR(40) NULL,
master_cnt INT NULL,
ts TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (db, tbl, chunk),
INDEX ts_db_tbl (ts, db, tbl)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
TRUNCATE TABLE percona.checksums;
ALTER TABLE percona.checksums MODIFY upper_boundary BLOB, MODIFY lower_boundary BLOB;

View File

@@ -1,37 +0,0 @@
CREATE DATABASE IF NOT EXISTS `percona`;
CREATE TABLE `percona`.`checksums` (
db CHAR(64) NOT NULL,
tbl CHAR(64) NOT NULL,
chunk INT NOT NULL,
chunk_time FLOAT NULL,
chunk_index VARCHAR(200) NULL,
lower_boundary TEXT NULL,
upper_boundary TEXT NULL,
this_crc CHAR(40) NOT NULL,
this_cnt INT NOT NULL,
master_crc CHAR(40) NULL,
master_cnt INT NULL,
ts TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (db, tbl, chunk),
INDEX ts_db_tbl (ts, db, tbl)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE DATABASE IF NOT EXISTS test;
CREATE TABLE `test`.`dsns` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`parent_id` int(11) DEFAULT NULL,
`dsn` varchar(255) NOT NULL,
PRIMARY KEY (`id`)
);
-- From Sandbox.pm
-- chan_master1 => 2900,
-- chan_master2 => 2901,
-- chan_slave1 => 2902,
-- chan_slave2 => 2903,
INSERT INTO `test`.`dsns` VALUES
(1, NULL, "h=127.0.0.1,P=2902,u=msandbox,p=msandbox"),
(2, NULL, "h=127.0.0.1,P=2903,u=msandbox,p=msandbox");

Binary file not shown.