mirror of
https://github.com/percona/percona-toolkit.git
synced 2025-12-11 02:04:38 +08:00
Compare commits
90 Commits
snyk-fix-2
...
PT-2457-pt
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
06d5966106 | ||
|
|
d5d39a83fa | ||
|
|
6fc8f66792 | ||
|
|
71c8d866a5 | ||
|
|
6cff1dfe77 | ||
|
|
9b83a7dc74 | ||
|
|
64157c1fad | ||
|
|
0c7e5d749c | ||
|
|
b3bf684f39 | ||
|
|
6043df2ce4 | ||
|
|
9726e2bfc6 | ||
|
|
3d9d7acccb | ||
|
|
37779c4364 | ||
|
|
6e28207e89 | ||
|
|
528007e2b0 | ||
|
|
d5f091cb9c | ||
|
|
8b4065c4d3 | ||
|
|
42f74af5ba | ||
|
|
14eff5f1df | ||
|
|
46f5a2d220 | ||
|
|
edef468f8e | ||
|
|
fa709c9064 | ||
|
|
11e4588a73 | ||
|
|
a63e3212f0 | ||
|
|
2b2289f9f5 | ||
|
|
dc4aee8dfd | ||
|
|
b1279680fd | ||
|
|
21b7edea4f | ||
|
|
f31ea35421 | ||
|
|
554d8294e1 | ||
|
|
7ad2e73e5d | ||
|
|
a0a1e1decf | ||
|
|
55f2167ed0 | ||
|
|
6e7a867fed | ||
|
|
fd2f0f94b1 | ||
|
|
2201e7f97d | ||
|
|
2383a399a6 | ||
|
|
a66fe3ebf6 | ||
|
|
d81adddfa4 | ||
|
|
84095fd7d7 | ||
|
|
47b8c5b067 | ||
|
|
7717cfe4f1 | ||
|
|
b98ec7baa7 | ||
|
|
f9d9a993e9 | ||
|
|
7f322f7cbd | ||
|
|
3ff98c20bc | ||
|
|
f816053065 | ||
|
|
34a14ec77e | ||
|
|
d2db8f5789 | ||
|
|
e940d154c0 | ||
|
|
ac53883f29 | ||
|
|
808c590e7a | ||
|
|
c09b622c3e | ||
|
|
ef2dbd887f | ||
|
|
2df1bd8950 | ||
|
|
ebacadf098 | ||
|
|
c49c58db2b | ||
|
|
9711db87a7 | ||
|
|
905490bac5 | ||
|
|
e964e17f21 | ||
|
|
c3a201d5f8 | ||
|
|
2474b1f45b | ||
|
|
88367c1dea | ||
|
|
840ba6926b | ||
|
|
25f4ee6d80 | ||
|
|
c83d2f547d | ||
|
|
e4cecc3e69 | ||
|
|
f9ea94f195 | ||
|
|
c92d95bc38 | ||
|
|
6b449ec081 | ||
|
|
af7bd8abd6 | ||
|
|
6fad1f0ff0 | ||
|
|
f4a324581a | ||
|
|
3cb46e61f7 | ||
|
|
2198763042 | ||
|
|
4bf48d864f | ||
|
|
16f5aac023 | ||
|
|
69cbfca27f | ||
|
|
14623c5dce | ||
|
|
71ffb19e9e | ||
|
|
5c16d37020 | ||
|
|
f70f8084dd | ||
|
|
5474f5d5ff | ||
|
|
d0f8fb231b | ||
|
|
8b61618d35 | ||
|
|
9e9f7434d1 | ||
|
|
888af5f5ef | ||
|
|
dc77289d60 | ||
|
|
d5ec5d9ca8 | ||
|
|
bcbb4e59ab |
4
.github/workflows/toolkit.yml
vendored
4
.github/workflows/toolkit.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
- name: Build the Docker image
|
||||
run: echo "FROM oraclelinux:9-slim" > Dockerfile; echo "RUN microdnf -y update" >> Dockerfile; echo "COPY bin/* /usr/bin/" >> Dockerfile; docker build . --file Dockerfile --tag percona-toolkit:${{ github.sha }}
|
||||
- name: Run Trivy vulnerability scanner
|
||||
uses: aquasecurity/trivy-action@0.29.0
|
||||
uses: aquasecurity/trivy-action@0.31.0
|
||||
with:
|
||||
image-ref: 'percona-toolkit:${{ github.sha }}'
|
||||
format: 'table'
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
vuln-type: 'os,library'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
- name: Upload a Build Artifact
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: binaries
|
||||
path: bin/*
|
||||
|
||||
@@ -39,6 +39,9 @@ extend-ignore-re = [
|
||||
"END_ND_TOOLTIPS" = "END_ND_TOOLTIPS"
|
||||
"EXPLAINed" = "EXPLAINed"
|
||||
"FH_ND_FILE" = "FH_ND_FILE"
|
||||
"GTI" = "GTI"
|
||||
"GTID" = "GTID"
|
||||
"GTIDs" = "GTIDs"
|
||||
"INSERTs" = "INSERTs"
|
||||
"IST" = "IST"
|
||||
"istError" = "istError"
|
||||
|
||||
19
Changelog
19
Changelog
@@ -1,5 +1,21 @@
|
||||
Changelog for Percona Toolkit
|
||||
|
||||
v3.7.0-2 released 2025-05-14
|
||||
|
||||
This release addresses multiple security vulnerabilities reported in Percona Toolkit version 3.7.0, including issues related to the `libxml2` component (CVE-2024-56171, CVE-2025-24928), `openssl` (CVE-2024-12797), and `krb5` (CVE-2022-37967).
|
||||
|
||||
* Fixed bug PT-2442: percona-toolkit:latest Vulnerability [CVE-2024-56171 CVE-2024-12797 CVE-2022-37967 CVE-2025-24928]
|
||||
* Fixed bug PT-2375: pt-table-sync does not work with generated columns (Thanks to Henning Pöttker for the contribution)
|
||||
* Fixed bug PT-2400: pt-table-checksum reports the error for warning code 4164
|
||||
* Fixed bug PT-2377: pt-table-sync does not handle utf8 strings in JSON columns correctly (Thanks to Henning Pöttker for the contribution)
|
||||
* Fixed bug PT-2378: pt-table-sync does print floating point numbers in SQL statements with insufficient precision (Thanks to Henning Pöttker for the contribution)
|
||||
* Fixed bug PT-2389: pt-online-schema-change resume functionality doesn't work with ADD/DROP column (Thanks to Perry Harrington for the contribution)
|
||||
* Fixed bug PT-2410: pt-archiver brokes when using output-format=csv and has null values (Thanks to Roberto de Bem for the contribution)
|
||||
* Fixed bug PT-2422: pt-online-schema-change race overwrites new_table_name (Thanks to Perry Harrington for the contribution)
|
||||
* Fixed bug PT-2407: pt-online-schema-change exit status(return code) is 0 even if it does NOT succeed
|
||||
* Fixed bug PT-2355: pt-online-schema-change should not resume a job with empty boundaries
|
||||
* Fixed bug PT-1577: pt-secure-collect, replace pt-secure-data in Usage section
|
||||
|
||||
v3.7.0 released 2024-12-23
|
||||
|
||||
* Feature PT-2340: Support MySQL 8.4
|
||||
@@ -18,8 +34,7 @@ v3.6.0 released 2024-06-12
|
||||
* Improvement PR-160: added support for operf if present, and if CMD_OPCONTROL is not set (Thanks to Fernando Ipar for the contribution)
|
||||
* Improvement PR-140: Add CPU cache configuration info to pt-summary (Thanks to Alexey Kopytov for the contribution)
|
||||
* Improvement PR-765: pt-galera-log-explainer: improvements from feedbacks (Thanks Yoann La Cancellera for the contribution)
|
||||
* Improvement PT-2233: pt-k8s-debug-collector needs psql in the host node
|
||||
(Thanks to Jobin Augustine for the contribution)
|
||||
* Improvement PT-2233: pt-k8s-debug-collector needs psql in the host node (Thanks to Jobin Augustine for the contribution)
|
||||
* Improvement PR-772: pt-galera-log-explainer: add custom regexes parameter (Thanks Yoann La Cancellera for the contribution)
|
||||
* Improvement PT-2301: pt-galera-log-explainer: whois redesign (Thanks Yoann La Cancellera for the contribution)
|
||||
* Improvement PT-2190: pt-show-grants should use print_identified_with_as_hex
|
||||
|
||||
@@ -11,7 +11,7 @@ MAKE_GOTOOLS
|
||||
|
||||
WriteMakefile(
|
||||
NAME => 'Percona::Toolkit',
|
||||
VERSION => '3.7.0',
|
||||
VERSION => '3.7.0-2',
|
||||
EXE_FILES => [
|
||||
map {
|
||||
(my $name = $_) =~ s/^bin.//;
|
||||
|
||||
@@ -5761,7 +5761,7 @@ sub version_check {
|
||||
PTDEBUG && _d('Version check failed:', $EVAL_ERROR);
|
||||
}
|
||||
|
||||
if ( @$instances_to_check ) {
|
||||
if ( $instances_to_check and @$instances_to_check ) {
|
||||
eval {
|
||||
update_check_times(
|
||||
instances => $instances_to_check,
|
||||
@@ -6969,7 +6969,7 @@ sub main {
|
||||
warn "Invalid output format:". $o->get('format');
|
||||
warn "Using default 'dump' format";
|
||||
} elsif ($o->get('output-format') || '' eq 'csv') {
|
||||
$fields_separated_by = ", ";
|
||||
$fields_separated_by = ",";
|
||||
$optionally_enclosed_by = '"';
|
||||
}
|
||||
my $need_hdr = $o->get('header') && !-f $archive_file;
|
||||
@@ -7511,7 +7511,7 @@ sub escape {
|
||||
s/([\t\n\\])/\\$1/g if defined $_; # Escape tabs etc
|
||||
my $s = defined $_ ? $_ : '\N'; # NULL = \N
|
||||
# var & ~var will return 0 only for numbers
|
||||
if ($s !~ /^[0-9,.E]+$/ && $optionally_enclosed_by eq '"') {
|
||||
if ($s !~ /^[0-9,.E]+$/ && $optionally_enclosed_by eq '"' && $s ne '\N') {
|
||||
$s =~ s/([^\\])"/$1\\"/g;
|
||||
$s = $optionally_enclosed_by."$s".$optionally_enclosed_by;
|
||||
}
|
||||
|
||||
@@ -3063,11 +3063,24 @@ sub _parse_config {
|
||||
}
|
||||
elsif ( my $dbh = $args{dbh} ) {
|
||||
$config_data{format} = $args{format} || 'show_variables';
|
||||
my $mysql_version = _get_version($dbh);
|
||||
my $sql = "SHOW /*!40103 GLOBAL*/ VARIABLES";
|
||||
PTDEBUG && _d($dbh, $sql);
|
||||
my $rows = $dbh->selectall_arrayref($sql);
|
||||
$config_data{vars} = { map { @$_ } @$rows };
|
||||
$config_data{mysql_version} = _get_version($dbh);
|
||||
$config_data{vars} = {
|
||||
map {
|
||||
my ($variable, $value) = @$_;
|
||||
if ( length($value) == 1024 && $mysql_version ge '5.7.0' ) {
|
||||
my $var_sql = "SELECT \@\@global.$variable";
|
||||
PTDEBUG && _d($dbh, $var_sql);
|
||||
my $var_sth = $dbh->prepare($var_sql);
|
||||
$var_sth->execute();
|
||||
($value) = $var_sth->fetchrow_array();
|
||||
}
|
||||
$variable => $value
|
||||
} @$rows
|
||||
};
|
||||
$config_data{mysql_version} = $mysql_version;
|
||||
}
|
||||
else {
|
||||
die "Unknown config source";
|
||||
@@ -4934,7 +4947,7 @@ sub version_check {
|
||||
PTDEBUG && _d('Version check failed:', $EVAL_ERROR);
|
||||
}
|
||||
|
||||
if ( @$instances_to_check ) {
|
||||
if ( $instances_to_check and @$instances_to_check ) {
|
||||
eval {
|
||||
update_check_times(
|
||||
instances => $instances_to_check,
|
||||
|
||||
@@ -3984,7 +3984,7 @@ sub version_check {
|
||||
PTDEBUG && _d('Version check failed:', $EVAL_ERROR);
|
||||
}
|
||||
|
||||
if ( @$instances_to_check ) {
|
||||
if ( $instances_to_check and @$instances_to_check ) {
|
||||
eval {
|
||||
update_check_times(
|
||||
instances => $instances_to_check,
|
||||
|
||||
@@ -4431,7 +4431,7 @@ sub version_check {
|
||||
PTDEBUG && _d('Version check failed:', $EVAL_ERROR);
|
||||
}
|
||||
|
||||
if ( @$instances_to_check ) {
|
||||
if ( $instances_to_check and @$instances_to_check ) {
|
||||
eval {
|
||||
update_check_times(
|
||||
instances => $instances_to_check,
|
||||
|
||||
@@ -4548,7 +4548,7 @@ sub version_check {
|
||||
PTDEBUG && _d('Version check failed:', $EVAL_ERROR);
|
||||
}
|
||||
|
||||
if ( @$instances_to_check ) {
|
||||
if ( $instances_to_check and @$instances_to_check ) {
|
||||
eval {
|
||||
update_check_times(
|
||||
instances => $instances_to_check,
|
||||
|
||||
@@ -3310,7 +3310,7 @@ sub version_check {
|
||||
PTDEBUG && _d('Version check failed:', $EVAL_ERROR);
|
||||
}
|
||||
|
||||
if ( @$instances_to_check ) {
|
||||
if ( $instances_to_check and @$instances_to_check ) {
|
||||
eval {
|
||||
update_check_times(
|
||||
instances => $instances_to_check,
|
||||
|
||||
@@ -3491,7 +3491,7 @@ sub version_check {
|
||||
PTDEBUG && _d('Version check failed:', $EVAL_ERROR);
|
||||
}
|
||||
|
||||
if ( @$instances_to_check ) {
|
||||
if ( $instances_to_check and @$instances_to_check ) {
|
||||
eval {
|
||||
update_check_times(
|
||||
instances => $instances_to_check,
|
||||
|
||||
@@ -5352,7 +5352,7 @@ sub version_check {
|
||||
PTDEBUG && _d('Version check failed:', $EVAL_ERROR);
|
||||
}
|
||||
|
||||
if ( @$instances_to_check ) {
|
||||
if ( $instances_to_check and @$instances_to_check ) {
|
||||
eval {
|
||||
update_check_times(
|
||||
instances => $instances_to_check,
|
||||
|
||||
@@ -5947,7 +5947,7 @@ sub version_check {
|
||||
PTDEBUG && _d('Version check failed:', $EVAL_ERROR);
|
||||
}
|
||||
|
||||
if ( @$instances_to_check ) {
|
||||
if ( $instances_to_check and @$instances_to_check ) {
|
||||
eval {
|
||||
update_check_times(
|
||||
instances => $instances_to_check,
|
||||
|
||||
@@ -6575,7 +6575,7 @@ sub version_check {
|
||||
PTDEBUG && _d('Version check failed:', $EVAL_ERROR);
|
||||
}
|
||||
|
||||
if ( @$instances_to_check ) {
|
||||
if ( $instances_to_check and @$instances_to_check ) {
|
||||
eval {
|
||||
update_check_times(
|
||||
instances => $instances_to_check,
|
||||
|
||||
@@ -7858,7 +7858,7 @@ sub version_check {
|
||||
PTDEBUG && _d('Version check failed:', $EVAL_ERROR);
|
||||
}
|
||||
|
||||
if ( @$instances_to_check ) {
|
||||
if ( $instances_to_check and @$instances_to_check ) {
|
||||
eval {
|
||||
update_check_times(
|
||||
instances => $instances_to_check,
|
||||
@@ -8943,12 +8943,20 @@ sub main {
|
||||
$sth->finish();
|
||||
PTDEBUG && _d('Last chunk:', Dumper($last_chunk));
|
||||
|
||||
if ( !$last_chunk || !$last_chunk->{new_table_name} ) {
|
||||
if ( !$last_chunk ) {
|
||||
$oktorun = undef;
|
||||
_die("Option --resume refers non-existing job ID: ${old_job_id}. Exiting."
|
||||
, UNSUPPORTED_OPERATION);
|
||||
}
|
||||
|
||||
if ( !$last_chunk->{new_table_name}
|
||||
|| !$last_chunk->{lower_boundary}
|
||||
|| !$last_chunk->{upper_boundary} ) {
|
||||
$oktorun = undef;
|
||||
_die("Option --resume refers job ${old_job_id} with empty boundaries. Exiting."
|
||||
, UNSUPPORTED_OPERATION);
|
||||
}
|
||||
|
||||
if ( $last_chunk->{db} ne $db
|
||||
|| $last_chunk->{tbl} ne $tbl
|
||||
|| $last_chunk->{altr} ne $o->get('alter') ){
|
||||
@@ -9607,11 +9615,16 @@ sub main {
|
||||
# ''
|
||||
# doesn't match '(?-xism:Failed to find a unique new table name)'
|
||||
|
||||
# (*) Frank: commented them out because it caused infinite loop
|
||||
# and the mentioned test error doesn't arise
|
||||
|
||||
my $original_error = $EVAL_ERROR;
|
||||
my $original_error_code = $?;
|
||||
my $original_error_code;
|
||||
if ( $? ) {
|
||||
$original_error_code = $?;
|
||||
}
|
||||
else {
|
||||
$original_error_code = $!;
|
||||
}
|
||||
|
||||
$SIG{__DIE__} = 'DEFAULT';
|
||||
|
||||
foreach my $task ( reverse @cleanup_tasks ) {
|
||||
eval {
|
||||
@@ -9797,9 +9810,9 @@ sub main {
|
||||
|
||||
if ( $o->get('history') ) {
|
||||
my $sth = $cxn->dbh()->prepare(
|
||||
"UPDATE ${hist_table} SET new_table_name = ?"
|
||||
"UPDATE ${hist_table} SET new_table_name = ? WHERE job_id = ?"
|
||||
);
|
||||
$sth->execute($new_tbl->{tbl});
|
||||
$sth->execute($new_tbl->{tbl}, $job_id);
|
||||
}
|
||||
|
||||
# If the new table still exists, drop it unless the tool was interrupted.
|
||||
@@ -9912,7 +9925,7 @@ sub main {
|
||||
);
|
||||
}
|
||||
|
||||
if ( my $alter = $o->get('alter') ) {
|
||||
if ( (my $alter = $o->get('alter')) && !$o->get('resume') ) {
|
||||
print "Altering new table...\n";
|
||||
my $sql = "ALTER TABLE $new_tbl->{name} $alter";
|
||||
print $sql, "\n" if $o->get('print');
|
||||
@@ -9921,10 +9934,12 @@ sub main {
|
||||
$cxn->dbh()->do($sql);
|
||||
};
|
||||
if ( $EVAL_ERROR ) {
|
||||
if ( $plugin && $plugin->can('before_die') ) {
|
||||
$plugin->before_die(exit_status => $EVAL_ERROR);
|
||||
}
|
||||
if ( $plugin && $plugin->can('before_die') ) {
|
||||
$plugin->before_die(exit_status => $EVAL_ERROR);
|
||||
}
|
||||
# this is trapped by a signal handler. Don't replace it with _die
|
||||
# we need to override $SIG{__DIE__} to return correct error code
|
||||
$SIG{__DIE__} = sub { print(STDERR "$_[0]"); exit ERROR_ALTERING_TABLE; };
|
||||
die "Error altering new table $new_tbl->{name}: $EVAL_ERROR\n";
|
||||
}
|
||||
print "Altered $new_tbl->{name} OK.\n";
|
||||
|
||||
@@ -12792,7 +12792,7 @@ sub version_check {
|
||||
PTDEBUG && _d('Version check failed:', $EVAL_ERROR);
|
||||
}
|
||||
|
||||
if ( @$instances_to_check ) {
|
||||
if ( $instances_to_check and @$instances_to_check ) {
|
||||
eval {
|
||||
update_check_times(
|
||||
instances => $instances_to_check,
|
||||
@@ -16258,12 +16258,12 @@ type: string; default: report
|
||||
How to format and print the query analysis results. Accepted values are:
|
||||
|
||||
VALUE FORMAT
|
||||
======= ===============================
|
||||
======= ======================================
|
||||
report Standard query analysis report
|
||||
slowlog MySQL slow log
|
||||
json JSON, one array per query class
|
||||
json-anon JSON without example queries
|
||||
secure-slowlog JSON without example queries
|
||||
secure-slowlog MySQL slow log with anonymized queries
|
||||
|
||||
The entire C<report> output can be disabled by specifying C<--no-report>
|
||||
(see L<"--[no]report">), and its sections can be disabled or rearranged
|
||||
|
||||
@@ -4656,7 +4656,7 @@ sub version_check {
|
||||
PTDEBUG && _d('Version check failed:', $EVAL_ERROR);
|
||||
}
|
||||
|
||||
if ( @$instances_to_check ) {
|
||||
if ( $instances_to_check and @$instances_to_check ) {
|
||||
eval {
|
||||
update_check_times(
|
||||
instances => $instances_to_check,
|
||||
|
||||
@@ -3790,7 +3790,7 @@ sub version_check {
|
||||
PTDEBUG && _d('Version check failed:', $EVAL_ERROR);
|
||||
}
|
||||
|
||||
if ( @$instances_to_check ) {
|
||||
if ( $instances_to_check and @$instances_to_check ) {
|
||||
eval {
|
||||
update_check_times(
|
||||
instances => $instances_to_check,
|
||||
|
||||
@@ -899,7 +899,7 @@ sub version_check {
|
||||
PTDEBUG && _d('Version check failed:', $EVAL_ERROR);
|
||||
}
|
||||
|
||||
if ( @$instances_to_check ) {
|
||||
if ( $instances_to_check and @$instances_to_check ) {
|
||||
eval {
|
||||
update_check_times(
|
||||
instances => $instances_to_check,
|
||||
@@ -9082,11 +9082,24 @@ sub _parse_config {
|
||||
}
|
||||
elsif ( my $dbh = $args{dbh} ) {
|
||||
$config_data{format} = $args{format} || 'show_variables';
|
||||
my $mysql_version = _get_version($dbh);
|
||||
my $sql = "SHOW /*!40103 GLOBAL*/ VARIABLES";
|
||||
PTDEBUG && _d($dbh, $sql);
|
||||
my $rows = $dbh->selectall_arrayref($sql);
|
||||
$config_data{vars} = { map { @$_ } @$rows };
|
||||
$config_data{mysql_version} = _get_version($dbh);
|
||||
$config_data{vars} = {
|
||||
map {
|
||||
my ($variable, $value) = @$_;
|
||||
if ( length($value) == 1024 && $mysql_version ge '5.7.0' ) {
|
||||
my $var_sql = "SELECT \@\@global.$variable";
|
||||
PTDEBUG && _d($dbh, $var_sql);
|
||||
my $var_sth = $dbh->prepare($var_sql);
|
||||
$var_sth->execute();
|
||||
($value) = $var_sth->fetchrow_array();
|
||||
}
|
||||
$variable => $value
|
||||
} @$rows
|
||||
};
|
||||
$config_data{mysql_version} = $mysql_version;
|
||||
}
|
||||
else {
|
||||
die "Unknown config source";
|
||||
@@ -10272,11 +10285,11 @@ sub main {
|
||||
|
||||
|
||||
if ( $o->get('truncate-replicate-table') && $o->get('resume') ) {
|
||||
die "--resume and truncate-replicate-table are mutually exclusive";
|
||||
die "--resume and truncate-replicate-table are mutually exclusive";
|
||||
}
|
||||
|
||||
if ( $o->get('truncate-replicate-table') && !$o->get('empty-replicate-table') ) {
|
||||
die "--resume and --no-empty-replicate-table are mutually exclusive";
|
||||
die "--truncate-replicate-table and --no-empty-replicate-table are mutually exclusive";
|
||||
}
|
||||
|
||||
# ########################################################################
|
||||
|
||||
@@ -9604,7 +9604,7 @@ sub version_check {
|
||||
PTDEBUG && _d('Version check failed:', $EVAL_ERROR);
|
||||
}
|
||||
|
||||
if ( @$instances_to_check ) {
|
||||
if ( $instances_to_check and @$instances_to_check ) {
|
||||
eval {
|
||||
update_check_times(
|
||||
instances => $instances_to_check,
|
||||
|
||||
@@ -4297,7 +4297,7 @@ sub version_check {
|
||||
PTDEBUG && _d('Version check failed:', $EVAL_ERROR);
|
||||
}
|
||||
|
||||
if ( @$instances_to_check ) {
|
||||
if ( $instances_to_check and @$instances_to_check ) {
|
||||
eval {
|
||||
update_check_times(
|
||||
instances => $instances_to_check,
|
||||
|
||||
@@ -4729,7 +4729,7 @@ sub version_check {
|
||||
PTDEBUG && _d('Version check failed:', $EVAL_ERROR);
|
||||
}
|
||||
|
||||
if ( @$instances_to_check ) {
|
||||
if ( $instances_to_check and @$instances_to_check ) {
|
||||
eval {
|
||||
update_check_times(
|
||||
instances => $instances_to_check,
|
||||
|
||||
@@ -582,8 +582,8 @@ OS_NAME=
|
||||
ARCH=
|
||||
OS=
|
||||
INSTALL=0
|
||||
RPM_RELEASE=1
|
||||
DEB_RELEASE=1
|
||||
RPM_RELEASE=2
|
||||
DEB_RELEASE=2
|
||||
REVISION=0
|
||||
GIT_BRANCH=${GIT_BRANCH}
|
||||
GIT_REPO=https://github.com/percona/percona-toolkit.git
|
||||
|
||||
@@ -41,7 +41,7 @@ master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'Percona Toolkit'
|
||||
copyright = u'2024, Percona LLC and/or its affiliates'
|
||||
copyright = u'2025, Percona LLC and/or its affiliates'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
@@ -50,7 +50,7 @@ copyright = u'2024, Percona LLC and/or its affiliates'
|
||||
# The short X.Y version.
|
||||
version = '3.7'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '3.7.0'
|
||||
release = '3.7.0-2'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
@@ -202,7 +202,7 @@ htmlhelp_basename = 'PerconaToolkitdoc'
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [
|
||||
('index', 'PerconaToolkit.tex', u'Percona Toolkit Documentation',
|
||||
u'2024, Percona LLC and/or its affiliates', 'manual'),
|
||||
u'2025, Percona LLC and/or its affiliates', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
@@ -236,5 +236,5 @@ latex_toplevel_sectioning = 'part'
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
('index', 'perconatoolkit', u'Percona Toolkit Documentation',
|
||||
[u'2024, Percona LLC and/or its affiliates'], 1)
|
||||
[u'2025, Percona LLC and/or its affiliates'], 1)
|
||||
]
|
||||
|
||||
@@ -1,6 +1,26 @@
|
||||
Release Notes
|
||||
***************
|
||||
|
||||
v3.7.0-2 released 2025-05-14
|
||||
==============================
|
||||
|
||||
This release addresses multiple security vulnerabilities reported in Percona Toolkit version 3.7.0, including issues related to the `libxml2` component (CVE-2024-56171, CVE-2025-24928), `openssl` (CVE-2024-12797), and `krb5` (CVE-2022-37967).
|
||||
|
||||
Bug Fixed
|
||||
------------
|
||||
|
||||
* :jirabug:`PT-2442`: percona-toolkit:latest Vulnerability [CVE-2024-56171 CVE-2024-12797 CVE-2022-37967 CVE-2025-24928]
|
||||
* :jirabug:`PT-2375`: pt-table-sync does not work with generated columns (Thanks to Henning Pöttker for the contribution)
|
||||
* :jirabug:`PT-2400`: pt-table-checksum reports the error for warning code 4164
|
||||
* :jirabug:`PT-2377`: pt-table-sync does not handle utf8 strings in JSON columns correctly (Thanks to Henning Pöttker for the contribution)
|
||||
* :jirabug:`PT-2378`: pt-table-sync does print floating point numbers in SQL statements with insufficient precision (Thanks to Henning Pöttker for the contribution)
|
||||
* :jirabug:`PT-2389`: pt-online-schema-change resume functionality doesn't work with ADD/DROP column (Thanks to Perry Harrington for the contribution)
|
||||
* :jirabug:`PT-2410`: pt-archiver brokes when using output-format=csv and has null values (Thanks to Roberto de Bem for the contribution)
|
||||
* :jirabug:`PT-2422`: pt-online-schema-change race overwrites new_table_name (Thanks to Perry Harrington for the contribution)
|
||||
* :jirabug:`PT-2407`: pt-online-schema-change exit status(return code) is 0 even if it does NOT succeed
|
||||
* :jirabug:`PT-2355`: pt-online-schema-change should not resume a job with empty boundaries
|
||||
* :jirabug:`PT-1577`: pt-secure-collect, replace pt-secure-data in Usage section
|
||||
|
||||
v3.7.0 released 2024-12-23
|
||||
==============================
|
||||
|
||||
|
||||
30
go.mod
30
go.mod
@@ -1,13 +1,15 @@
|
||||
module github.com/percona/percona-toolkit
|
||||
|
||||
go 1.23.4
|
||||
go 1.24.0
|
||||
|
||||
toolchain go1.24.1
|
||||
|
||||
require (
|
||||
github.com/AlekSi/pointer v1.2.0
|
||||
github.com/Ladicle/tabwriter v1.0.0
|
||||
github.com/Masterminds/semver v1.5.0
|
||||
github.com/alecthomas/kingpin v2.2.6+incompatible
|
||||
github.com/alecthomas/kong v1.8.1
|
||||
github.com/alecthomas/kong v1.11.0
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc
|
||||
github.com/go-ini/ini v1.67.0
|
||||
github.com/golang/mock v1.6.0
|
||||
@@ -21,17 +23,17 @@ require (
|
||||
github.com/pborman/getopt v1.1.0
|
||||
github.com/percona/go-mysql v0.0.0-20210427141028-73d29c6da78c
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/rs/zerolog v1.33.0
|
||||
github.com/rs/zerolog v1.34.0
|
||||
github.com/shirou/gopsutil v3.21.11+incompatible
|
||||
github.com/sirupsen/logrus v1.9.3
|
||||
github.com/stretchr/testify v1.10.0
|
||||
github.com/xlab/treeprint v1.2.0
|
||||
go.mongodb.org/mongo-driver v1.17.2
|
||||
golang.org/x/crypto v0.35.0
|
||||
go.mongodb.org/mongo-driver v1.17.4
|
||||
golang.org/x/crypto v0.39.0
|
||||
golang.org/x/exp v0.0.0-20230321023759-10a507213a29
|
||||
gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
k8s.io/api v0.32.2
|
||||
k8s.io/api v0.33.1
|
||||
k8s.io/utils v0.0.0-20241104100929-3ea5e8cea738
|
||||
)
|
||||
|
||||
@@ -43,7 +45,6 @@ require (
|
||||
github.com/go-ole/go-ole v1.2.6 // indirect
|
||||
github.com/gogo/protobuf v1.3.2 // indirect
|
||||
github.com/golang/snappy v0.0.4 // indirect
|
||||
github.com/google/gofuzz v1.2.0 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/klauspost/compress v1.16.7 // indirect
|
||||
github.com/kr/text v0.2.0 // indirect
|
||||
@@ -60,16 +61,17 @@ require (
|
||||
github.com/xdg-go/stringprep v1.0.4 // indirect
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
|
||||
github.com/yusufpapurcu/wmi v1.2.2 // indirect
|
||||
golang.org/x/net v0.33.0 // indirect
|
||||
golang.org/x/sync v0.11.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
golang.org/x/term v0.29.0 // indirect
|
||||
golang.org/x/text v0.22.0 // indirect
|
||||
golang.org/x/net v0.38.0 // indirect
|
||||
golang.org/x/sync v0.15.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/term v0.32.0 // indirect
|
||||
golang.org/x/text v0.26.0 // indirect
|
||||
gopkg.in/inf.v0 v0.9.1 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
k8s.io/apimachinery v0.32.2 // indirect
|
||||
k8s.io/apimachinery v0.33.1 // indirect
|
||||
k8s.io/klog/v2 v2.130.1 // indirect
|
||||
sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3 // indirect
|
||||
sigs.k8s.io/structured-merge-diff/v4 v4.4.2 // indirect
|
||||
sigs.k8s.io/randfill v1.0.0 // indirect
|
||||
sigs.k8s.io/structured-merge-diff/v4 v4.6.0 // indirect
|
||||
sigs.k8s.io/yaml v1.4.0 // indirect
|
||||
)
|
||||
|
||||
59
go.sum
59
go.sum
@@ -8,8 +8,8 @@ github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8v
|
||||
github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
|
||||
github.com/alecthomas/kingpin v2.2.6+incompatible h1:5svnBTFgJjZvGKyYBtMB0+m5wvrbUHiqye8wRJMlnYI=
|
||||
github.com/alecthomas/kingpin v2.2.6+incompatible/go.mod h1:59OFYbFVLKQKq+mqrL6Rw5bR0c3ACQaawgXx0QYndlE=
|
||||
github.com/alecthomas/kong v1.8.1 h1:6aamvWBE/REnR/BCq10EcozmcpUPc5aGI1lPAWdB0EE=
|
||||
github.com/alecthomas/kong v1.8.1/go.mod h1:p2vqieVMeTAnaC83txKtXe8FLke2X07aruPWXyMPQrU=
|
||||
github.com/alecthomas/kong v1.11.0 h1:y++1gI7jf8O7G7l4LZo5ASFhrhJvzc+WgF/arranEmM=
|
||||
github.com/alecthomas/kong v1.11.0/go.mod h1:p2vqieVMeTAnaC83txKtXe8FLke2X07aruPWXyMPQrU=
|
||||
github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc=
|
||||
github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
|
||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM=
|
||||
@@ -41,8 +41,6 @@ github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeN
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
|
||||
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY=
|
||||
@@ -86,11 +84,11 @@ github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
|
||||
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
|
||||
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
|
||||
github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8=
|
||||
github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
|
||||
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
|
||||
github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
|
||||
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
|
||||
github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY=
|
||||
github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ=
|
||||
github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI=
|
||||
github.com/shirou/gopsutil v3.21.11+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA=
|
||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||
@@ -125,14 +123,14 @@ github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/yusufpapurcu/wmi v1.2.2 h1:KBNDSne4vP5mbSWnJbO+51IMOXJB67QiYCSBrubbPRg=
|
||||
github.com/yusufpapurcu/wmi v1.2.2/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
|
||||
go.mongodb.org/mongo-driver v1.17.2 h1:gvZyk8352qSfzyZ2UMWcpDpMSGEr1eqE4T793SqyhzM=
|
||||
go.mongodb.org/mongo-driver v1.17.2/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ=
|
||||
go.mongodb.org/mongo-driver v1.17.4 h1:jUorfmVzljjr0FLzYQsGP8cgN/qzzxlY9Vh0C9KFXVw=
|
||||
go.mongodb.org/mongo-driver v1.17.4/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs=
|
||||
golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ=
|
||||
golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM=
|
||||
golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U=
|
||||
golang.org/x/exp v0.0.0-20230321023759-10a507213a29 h1:ooxPy7fPvB4kwsA2h+iBNHkAbp/4JxTSwCmvdjEYmug=
|
||||
golang.org/x/exp v0.0.0-20230321023759-10a507213a29/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc=
|
||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
@@ -146,15 +144,15 @@ golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwY
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
|
||||
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||
golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8=
|
||||
golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w=
|
||||
golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8=
|
||||
golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@@ -170,18 +168,18 @@ golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
||||
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU=
|
||||
golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s=
|
||||
golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg=
|
||||
golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
||||
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
|
||||
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
|
||||
golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M=
|
||||
golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
@@ -205,17 +203,20 @@ gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
k8s.io/api v0.32.2 h1:bZrMLEkgizC24G9eViHGOPbW+aRo9duEISRIJKfdJuw=
|
||||
k8s.io/api v0.32.2/go.mod h1:hKlhk4x1sJyYnHENsrdCWw31FEmCijNGPJO5WzHiJ6Y=
|
||||
k8s.io/apimachinery v0.32.2 h1:yoQBR9ZGkA6Rgmhbp/yuT9/g+4lxtsGYwW6dR6BDPLQ=
|
||||
k8s.io/apimachinery v0.32.2/go.mod h1:GpHVgxoKlTxClKcteaeuF1Ul/lDVb74KpZcxcmLDElE=
|
||||
k8s.io/api v0.33.1 h1:tA6Cf3bHnLIrUK4IqEgb2v++/GYUtqiu9sRVk3iBXyw=
|
||||
k8s.io/api v0.33.1/go.mod h1:87esjTn9DRSRTD4fWMXamiXxJhpOIREjWOSjsW1kEHw=
|
||||
k8s.io/apimachinery v0.33.1 h1:mzqXWV8tW9Rw4VeW9rEkqvnxj59k1ezDUl20tFK/oM4=
|
||||
k8s.io/apimachinery v0.33.1/go.mod h1:BHW0YOu7n22fFv/JkYOEfkUYNRN0fj0BlvMFWA7b+SM=
|
||||
k8s.io/klog/v2 v2.130.1 h1:n9Xl7H1Xvksem4KFG4PYbdQCQxqc/tTUyrgXaOhHSzk=
|
||||
k8s.io/klog/v2 v2.130.1/go.mod h1:3Jpz1GvMt720eyJH1ckRHK1EDfpxISzJ7I9OYgaDtPE=
|
||||
k8s.io/utils v0.0.0-20241104100929-3ea5e8cea738 h1:M3sRQVHv7vB20Xc2ybTt7ODCeFj6JSWYFzOFnYeS6Ro=
|
||||
k8s.io/utils v0.0.0-20241104100929-3ea5e8cea738/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0=
|
||||
sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3 h1:/Rv+M11QRah1itp8VhT6HoVx1Ray9eB4DBr+K+/sCJ8=
|
||||
sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3/go.mod h1:18nIHnGi6636UCz6m8i4DhaJ65T6EruyzmoQqI2BVDo=
|
||||
sigs.k8s.io/structured-merge-diff/v4 v4.4.2 h1:MdmvkGuXi/8io6ixD5wud3vOLwc1rj0aNqRlpuvjmwA=
|
||||
sigs.k8s.io/structured-merge-diff/v4 v4.4.2/go.mod h1:N8f93tFZh9U6vpxwRArLiikrE5/2tiu1w1AGfACIGE4=
|
||||
sigs.k8s.io/randfill v0.0.0-20250304075658-069ef1bbf016/go.mod h1:XeLlZ/jmk4i1HRopwe7/aU3H5n1zNUcX6TM94b3QxOY=
|
||||
sigs.k8s.io/randfill v1.0.0 h1:JfjMILfT8A6RbawdsK2JXGBR5AQVfd+9TbzrlneTyrU=
|
||||
sigs.k8s.io/randfill v1.0.0/go.mod h1:XeLlZ/jmk4i1HRopwe7/aU3H5n1zNUcX6TM94b3QxOY=
|
||||
sigs.k8s.io/structured-merge-diff/v4 v4.6.0 h1:IUA9nvMmnKWcj5jl84xn+T5MnlZKThmUW1TdblaLVAc=
|
||||
sigs.k8s.io/structured-merge-diff/v4 v4.6.0/go.mod h1:dDy58f92j70zLsuZVuUX5Wp9vtxXpaZnkPGWeqDfCps=
|
||||
sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E=
|
||||
sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY=
|
||||
|
||||
@@ -111,11 +111,30 @@ sub _parse_config {
|
||||
}
|
||||
elsif ( my $dbh = $args{dbh} ) {
|
||||
$config_data{format} = $args{format} || 'show_variables';
|
||||
my $mysql_version = _get_version($dbh);
|
||||
my $sql = "SHOW /*!40103 GLOBAL*/ VARIABLES";
|
||||
PTDEBUG && _d($dbh, $sql);
|
||||
my $rows = $dbh->selectall_arrayref($sql);
|
||||
$config_data{vars} = { map { @$_ } @$rows };
|
||||
$config_data{mysql_version} = _get_version($dbh);
|
||||
$config_data{vars} = {
|
||||
map {
|
||||
my ($variable, $value) = @$_;
|
||||
# Starting from MySQL 5.7.6, SHOW VARIABLES retrieves records from
|
||||
# the performance_schema table named GLOBAL_VARIABLES. This table
|
||||
# stores variable values in a VARCHAR(1024) column, meaning longer
|
||||
# values may be truncated. However, the full value can still be
|
||||
# retrieved by accessing the variable with SELECT @@GLOBAL.
|
||||
# https://dev.mysql.com/doc/refman/5.7/en/information-schema-variables-table.html
|
||||
if ( length($value) == 1024 && $mysql_version ge '5.7.0' ) {
|
||||
my $var_sql = "SELECT \@\@global.$variable";
|
||||
PTDEBUG && _d($dbh, $var_sql);
|
||||
my $var_sth = $dbh->prepare($var_sql);
|
||||
$var_sth->execute();
|
||||
($value) = $var_sth->fetchrow_array();
|
||||
}
|
||||
$variable => $value
|
||||
} @$rows
|
||||
};
|
||||
$config_data{mysql_version} = $mysql_version;
|
||||
}
|
||||
else {
|
||||
die "Unknown config source";
|
||||
|
||||
@@ -181,7 +181,7 @@ sub version_check {
|
||||
}
|
||||
|
||||
# Always update the vc file, even if the version check fails.
|
||||
if ( @$instances_to_check ) {
|
||||
if ( $instances_to_check and @$instances_to_check ) {
|
||||
eval {
|
||||
# Update the check time for things we checked. I.e. if we
|
||||
# didn't check it, do _not_ update its time.
|
||||
|
||||
@@ -16,6 +16,6 @@ sphinx-tabs
|
||||
certifi>=2024.7.4 # not directly required, pinned by Snyk to avoid a vulnerability
|
||||
jinja2>=3.1.6 # not directly required, pinned by Snyk to avoid a vulnerability
|
||||
pygments>=2.15.0 # not directly required, pinned by Snyk to avoid a vulnerability
|
||||
requests>=2.31.0 # not directly required, pinned by Snyk to avoid a vulnerability
|
||||
setuptools>=70.0.0 # not directly required, pinned by Snyk to avoid a vulnerability
|
||||
requests>=2.32.4 # not directly required, pinned by Snyk to avoid a vulnerability
|
||||
setuptools>=78.1.1 # not directly required, pinned by Snyk to avoid a vulnerability
|
||||
idna>=3.7 # not directly required, pinned by Snyk to avoid a vulnerability
|
||||
|
||||
@@ -8,103 +8,104 @@ import (
|
||||
)
|
||||
|
||||
// docsExamined is renamed from nscannedObjects in 3.2.0
|
||||
// json tags are used for PMM purposes
|
||||
// https://docs.mongodb.com/manual/reference/database-profiler/#system.profile.docsExamined
|
||||
type SystemProfile struct {
|
||||
AllUsers []interface{} `bson:"allUsers"`
|
||||
Client string `bson:"client"`
|
||||
CursorExhausted bool `bson:"cursorExhausted"`
|
||||
AllUsers []interface{} `bson:"allUsers" json:"allUsers"`
|
||||
Client string `bson:"client" json:"client"`
|
||||
CursorExhausted bool `bson:"cursorExhausted" json:"cursorExhausted"`
|
||||
ExecStats struct {
|
||||
Advanced int `bson:"advanced"`
|
||||
ExecutionTimeMillisEstimate int `bson:"executionTimeMillisEstimate"`
|
||||
Advanced int `bson:"advanced" json:"advanced"`
|
||||
ExecutionTimeMillisEstimate int `bson:"executionTimeMillisEstimate" json:"executionTimeMillisEstimate"`
|
||||
InputStage struct {
|
||||
Advanced int `bson:"advanced"`
|
||||
Direction string `bson:"direction"`
|
||||
DocsExamined int `bson:"docsExamined"`
|
||||
ExecutionTimeMillisEstimate int `bson:"executionTimeMillisEstimate"`
|
||||
Advanced int `bson:"advanced" json:"advanced"`
|
||||
Direction string `bson:"direction" json:"direction"`
|
||||
DocsExamined int `bson:"docsExamined" json:"docsExamined"`
|
||||
ExecutionTimeMillisEstimate int `bson:"executionTimeMillisEstimate" json:"executionTimeMillisEstimate"`
|
||||
Filter struct {
|
||||
Date struct {
|
||||
Eq string `bson:"$eq"`
|
||||
} `bson:"date"`
|
||||
} `bson:"filter"`
|
||||
Invalidates int `bson:"invalidates"`
|
||||
IsEOF int `bson:"isEOF"`
|
||||
NReturned int `bson:"nReturned"`
|
||||
NeedTime int `bson:"needTime"`
|
||||
NeedYield int `bson:"needYield"`
|
||||
RestoreState int `bson:"restoreState"`
|
||||
SaveState int `bson:"saveState"`
|
||||
Stage string `bson:"stage"`
|
||||
Works int `bson:"works"`
|
||||
} `bson:"inputStage"`
|
||||
Invalidates int `bson:"invalidates"`
|
||||
IsEOF int `bson:"isEOF"`
|
||||
LimitAmount int `bson:"limitAmount"`
|
||||
NReturned int `bson:"nReturned"`
|
||||
NeedTime int `bson:"needTime"`
|
||||
NeedYield int `bson:"needYield"`
|
||||
RestoreState int `bson:"restoreState"`
|
||||
SaveState int `bson:"saveState"`
|
||||
Stage string `bson:"stage"`
|
||||
Works int `bson:"works"`
|
||||
DocsExamined int `bson:"docsExamined"`
|
||||
} `bson:"execStats"`
|
||||
KeyUpdates int `bson:"keyUpdates"`
|
||||
KeysExamined int `bson:"keysExamined"`
|
||||
Eq string `bson:"$eq" json:"$eq"`
|
||||
} `bson:"date" json:"date"`
|
||||
} `bson:"filter" json:"filter"`
|
||||
Invalidates int `bson:"invalidates" json:"invalidates"`
|
||||
IsEOF int `bson:"isEOF" json:"isEOF"`
|
||||
NReturned int `bson:"nReturned" json:"nReturned"`
|
||||
NeedTime int `bson:"needTime" json:"needTime"`
|
||||
NeedYield int `bson:"needYield" json:"needYield"`
|
||||
RestoreState int `bson:"restoreState" json:"restoreState"`
|
||||
SaveState int `bson:"saveState" json:"saveState"`
|
||||
Stage string `bson:"stage" json:"stage"`
|
||||
Works int `bson:"works" json:"works"`
|
||||
} `bson:"inputStage" json:"inputStage"`
|
||||
Invalidates int `bson:"invalidates" json:"invalidates"`
|
||||
IsEOF int `bson:"isEOF" json:"isEOF"`
|
||||
LimitAmount int `bson:"limitAmount" json:"limitAmount"`
|
||||
NReturned int `bson:"nReturned" json:"nReturned"`
|
||||
NeedTime int `bson:"needTime" json:"needTime"`
|
||||
NeedYield int `bson:"needYield" json:"needYield"`
|
||||
RestoreState int `bson:"restoreState" json:"restoreState"`
|
||||
SaveState int `bson:"saveState" json:"saveState"`
|
||||
Stage string `bson:"stage" json:"stage"`
|
||||
Works int `bson:"works" json:"works"`
|
||||
DocsExamined int `bson:"docsExamined" json:"docsExamined"`
|
||||
} `bson:"execStats" json:"execStats"`
|
||||
KeyUpdates int `bson:"keyUpdates" json:"keyUpdates"`
|
||||
KeysExamined int `bson:"keysExamined" json:"keysExamined"`
|
||||
Locks struct {
|
||||
Collection struct {
|
||||
AcquireCount struct {
|
||||
Read int `bson:"R"`
|
||||
ReadShared int `bson:"r"`
|
||||
} `bson:"acquireCount"`
|
||||
} `bson:"Collection"`
|
||||
Read int `bson:"R" json:"R"`
|
||||
ReadShared int `bson:"r" json:"r"`
|
||||
} `bson:"acquireCount" json:"acquireCount"`
|
||||
} `bson:"Collection" json:"Collection"`
|
||||
Database struct {
|
||||
AcquireCount struct {
|
||||
ReadShared int `bson:"r"`
|
||||
} `bson:"acquireCount"`
|
||||
ReadShared int `bson:"r" json:"r"`
|
||||
} `bson:"acquireCount" json:"acquireCount"`
|
||||
AcquireWaitCount struct {
|
||||
ReadShared int `bson:"r"`
|
||||
} `bson:"acquireWaitCount"`
|
||||
ReadShared int `bson:"r" json:"r"`
|
||||
} `bson:"acquireWaitCount" json:"acquireWaitCount"`
|
||||
TimeAcquiringMicros struct {
|
||||
ReadShared int64 `bson:"r"`
|
||||
} `bson:"timeAcquiringMicros"`
|
||||
} `bson:"Database"`
|
||||
ReadShared int64 `bson:"r" json:"r"`
|
||||
} `bson:"timeAcquiringMicros" json:"timeAcquiringMicros"`
|
||||
} `bson:"Database" json:"Database"`
|
||||
Global struct {
|
||||
AcquireCount struct {
|
||||
ReadShared int `bson:"r"`
|
||||
WriteShared int `bson:"w"`
|
||||
} `bson:"acquireCount"`
|
||||
} `bson:"Global"`
|
||||
ReadShared int `bson:"r" json:"r"`
|
||||
WriteShared int `bson:"w" json:"w"`
|
||||
} `bson:"acquireCount" json:"acquireCount"`
|
||||
} `bson:"Global" json:"Global"`
|
||||
MMAPV1Journal struct {
|
||||
AcquireCount struct {
|
||||
ReadShared int `bson:"r"`
|
||||
} `bson:"acquireCount"`
|
||||
} `bson:"MMAPV1Journal"`
|
||||
} `bson:"locks"`
|
||||
Millis int `bson:"millis"`
|
||||
Nreturned int `bson:"nreturned"`
|
||||
Ns string `bson:"ns"`
|
||||
NumYield int `bson:"numYield"`
|
||||
Op string `bson:"op"`
|
||||
PlanSummary string `bson:"planSummary"`
|
||||
Protocol string `bson:"protocol"`
|
||||
Query bson.D `bson:"query"`
|
||||
UpdateObj bson.D `bson:"updateobj"`
|
||||
Command bson.D `bson:"command"`
|
||||
OriginatingCommand bson.D `bson:"originatingCommand"`
|
||||
ResponseLength int `bson:"responseLength"`
|
||||
Ts time.Time `bson:"ts"`
|
||||
User string `bson:"user"`
|
||||
WriteConflicts int `bson:"writeConflicts"`
|
||||
DocsExamined int `bson:"docsExamined"`
|
||||
QueryHash string `bson:"queryHash"`
|
||||
ReadShared int `bson:"r" json:"r"`
|
||||
} `bson:"acquireCount" json:"acquireCount"`
|
||||
} `bson:"MMAPV1Journal" json:"MMAPV1Journal"`
|
||||
} `bson:"locks" json:"locks"`
|
||||
Millis int `bson:"millis" json:"durationMillis"`
|
||||
Nreturned int `bson:"nreturned" json:"nreturned"`
|
||||
Ns string `bson:"ns" json:"ns"`
|
||||
NumYield int `bson:"numYield" json:"numYield"`
|
||||
Op string `bson:"op" json:"op"`
|
||||
PlanSummary string `bson:"planSummary" json:"planSummary"`
|
||||
Protocol string `bson:"protocol" json:"protocol"`
|
||||
Query bson.D `bson:"query" json:"query"`
|
||||
UpdateObj bson.D `bson:"updateobj" json:"updateobj"`
|
||||
Command bson.D `bson:"command" json:"command"`
|
||||
OriginatingCommand bson.D `bson:"originatingCommand" json:"originatingCommand"`
|
||||
ResponseLength int `bson:"responseLength" json:"reslen"`
|
||||
Ts time.Time `bson:"ts" json:"ts"`
|
||||
User string `bson:"user" json:"user"`
|
||||
WriteConflicts int `bson:"writeConflicts" json:"writeConflicts"`
|
||||
DocsExamined int `bson:"docsExamined" json:"docsExamined"`
|
||||
QueryHash string `bson:"queryHash" json:"queryHash"`
|
||||
Storage struct {
|
||||
Data struct {
|
||||
BytesRead int64 `bson:"bytesRead"`
|
||||
TimeReadingMicros int64 `bson:"timeReadingMicros"`
|
||||
} `bson:"data"`
|
||||
} `bson:"storage"`
|
||||
AppName string `bson:"appName"`
|
||||
Comments string `bson:"comments"`
|
||||
BytesRead int64 `bson:"bytesRead" json:"bytesRead"`
|
||||
TimeReadingMicros int64 `bson:"timeReadingMicros" json:"timeReadingMicros"`
|
||||
} `bson:"data" json:"data"`
|
||||
} `bson:"storage" json:"storage"`
|
||||
AppName string `bson:"appName" json:"appName"`
|
||||
Comments string `bson:"comments" json:"comments"`
|
||||
}
|
||||
|
||||
func NewExampleQuery(doc SystemProfile) ExampleQuery {
|
||||
|
||||
@@ -16,7 +16,6 @@ import (
|
||||
|
||||
const (
|
||||
planSummaryCollScan = "COLLSCAN"
|
||||
planSummaryIXScan = "IXSCAN"
|
||||
)
|
||||
|
||||
type StatsError struct {
|
||||
@@ -106,9 +105,8 @@ func (s *Stats) Add(doc proto.SystemProfile) error {
|
||||
if qiac.PlanSummary == planSummaryCollScan {
|
||||
qiac.CollScanCount++
|
||||
}
|
||||
if strings.HasPrefix(qiac.PlanSummary, planSummaryIXScan) {
|
||||
qiac.PlanSummary = planSummaryIXScan
|
||||
}
|
||||
|
||||
qiac.PlanSummary = strings.Split(qiac.PlanSummary, " ")[0]
|
||||
|
||||
qiac.NReturned = append(qiac.NReturned, float64(doc.Nreturned))
|
||||
qiac.QueryTime = append(qiac.QueryTime, float64(doc.Millis))
|
||||
|
||||
@@ -28,31 +28,33 @@ type sslSecret struct {
|
||||
|
||||
// Dumper struct is for dumping cluster
|
||||
type Dumper struct {
|
||||
cmd string
|
||||
kubeconfig string
|
||||
resources []string
|
||||
filePaths []string
|
||||
fileContainer string
|
||||
namespace string
|
||||
location string
|
||||
errors string
|
||||
mode int64
|
||||
crType string
|
||||
forwardport string
|
||||
sslSecrets []sslSecret
|
||||
cmd string
|
||||
kubeconfig string
|
||||
resources []string
|
||||
filePaths []string
|
||||
fileContainer string
|
||||
namespace string
|
||||
location string
|
||||
errors string
|
||||
mode int64
|
||||
crType string
|
||||
forwardport string
|
||||
sslSecrets []sslSecret
|
||||
skipPodSummary bool
|
||||
}
|
||||
|
||||
var resourcesRe = regexp.MustCompile(`(\w+\.(\w+).percona\.com)`)
|
||||
|
||||
// New return new Dumper object
|
||||
func New(location, namespace, resource string, kubeconfig string, forwardport string) Dumper {
|
||||
func New(location, namespace, resource string, kubeconfig string, forwardport string, skipPodSummary bool) Dumper {
|
||||
d := Dumper{
|
||||
cmd: "kubectl",
|
||||
kubeconfig: kubeconfig,
|
||||
location: "cluster-dump",
|
||||
mode: int64(0o777),
|
||||
namespace: namespace,
|
||||
forwardport: forwardport,
|
||||
cmd: "kubectl",
|
||||
kubeconfig: kubeconfig,
|
||||
location: "cluster-dump",
|
||||
mode: int64(0o777),
|
||||
namespace: namespace,
|
||||
forwardport: forwardport,
|
||||
skipPodSummary: skipPodSummary,
|
||||
}
|
||||
resources := []string{
|
||||
"pods",
|
||||
@@ -352,18 +354,20 @@ func (d *Dumper) DumpCluster() error {
|
||||
crName = pod.Labels["app.kubernetes.io/instance"]
|
||||
}
|
||||
// Get summary
|
||||
output, err = d.getPodSummary(resourceType(d.crType), pod.Name, crName, ns.Name)
|
||||
if err != nil {
|
||||
d.logError(err.Error(), d.crType, pod.Name)
|
||||
err = addToArchive(location, d.mode, []byte(err.Error()), tw)
|
||||
if !d.skipPodSummary {
|
||||
output, err = d.getPodSummary(resourceType(d.crType), pod.Name, crName, ns.Name)
|
||||
if err != nil {
|
||||
log.Printf("Error: create summary errors archive for pod %s in namespace %s: %v", pod.Name, ns.Name, err)
|
||||
}
|
||||
} else {
|
||||
err = addToArchive(location, d.mode, output, tw)
|
||||
if err != nil {
|
||||
d.logError(err.Error(), "create summary archive for pod "+pod.Name)
|
||||
log.Printf("Error: create summary archive for pod %s: %v", pod.Name, err)
|
||||
d.logError(err.Error(), d.crType, pod.Name)
|
||||
err = addToArchive(location, d.mode, []byte(err.Error()), tw)
|
||||
if err != nil {
|
||||
log.Printf("Error: create summary errors archive for pod %s in namespace %s: %v", pod.Name, ns.Name, err)
|
||||
}
|
||||
} else {
|
||||
err = addToArchive(location, d.mode, output, tw)
|
||||
if err != nil {
|
||||
d.logError(err.Error(), "create summary archive for pod "+pod.Name)
|
||||
log.Printf("Error: create summary archive for pod %s: %v", pod.Name, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -28,6 +28,7 @@ func main() {
|
||||
kubeconfig := ""
|
||||
forwardport := ""
|
||||
version := false
|
||||
skipPodSummary := false
|
||||
|
||||
flag.StringVar(&namespace, "namespace", "", "Namespace for collecting data. If empty data will be collected from all namespaces")
|
||||
flag.StringVar(&resource, "resource", "auto", "Collect data, specific to the resource. Supported values: pxc, psmdb, pg, pgv2, ps, none, auto")
|
||||
@@ -35,6 +36,7 @@ func main() {
|
||||
flag.StringVar(&kubeconfig, "kubeconfig", "", "Path to kubeconfig")
|
||||
flag.StringVar(&forwardport, "forwardport", "", "Port to use for port forwarding")
|
||||
flag.BoolVar(&version, "version", false, "Print version")
|
||||
flag.BoolVar(&skipPodSummary, "skip-pod-summary", false, "Skip pod summary collection")
|
||||
flag.Parse()
|
||||
|
||||
if version {
|
||||
@@ -50,7 +52,7 @@ func main() {
|
||||
resource += "/" + clusterName
|
||||
}
|
||||
|
||||
d := dumper.New("", namespace, resource, kubeconfig, forwardport)
|
||||
d := dumper.New("", namespace, resource, kubeconfig, forwardport, skipPodSummary)
|
||||
log.Println("Start collecting cluster data")
|
||||
|
||||
err := d.DumpCluster()
|
||||
|
||||
@@ -365,6 +365,106 @@ func TestSSLResourceOption(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Tests for option --skip-pod-summary
|
||||
*/
|
||||
func TestPT_2453(t *testing.T) {
|
||||
testcmd := []string{"sh", "-c", "tar -tf cluster-dump.tar.gz --wildcards '*/summary.txt' 2>/dev/null | wc -l"}
|
||||
tests := []struct {
|
||||
name string
|
||||
resource string
|
||||
want string
|
||||
kubeconfig string
|
||||
}{
|
||||
{
|
||||
name: "none",
|
||||
resource: "none",
|
||||
want: "0",
|
||||
kubeconfig: "",
|
||||
},
|
||||
{
|
||||
name: "pxc",
|
||||
resource: "pxc",
|
||||
want: "0",
|
||||
kubeconfig: os.Getenv("KUBECONFIG_PXC"),
|
||||
},
|
||||
{
|
||||
name: "ps",
|
||||
resource: "ps",
|
||||
want: "0",
|
||||
kubeconfig: os.Getenv("KUBECONFIG_PS"),
|
||||
},
|
||||
{
|
||||
name: "psmdb",
|
||||
resource: "psmdb",
|
||||
want: "0",
|
||||
kubeconfig: os.Getenv("KUBECONFIG_PSMDB"),
|
||||
},
|
||||
{
|
||||
name: "pg",
|
||||
resource: "pg",
|
||||
want: "0",
|
||||
kubeconfig: os.Getenv("KUBECONFIG_PG"),
|
||||
},
|
||||
{
|
||||
name: "pgv2",
|
||||
resource: "pgv2",
|
||||
want: "0",
|
||||
kubeconfig: os.Getenv("KUBECONFIG_PG2"),
|
||||
},
|
||||
{
|
||||
name: "auto pxc",
|
||||
resource: "auto",
|
||||
want: "0",
|
||||
kubeconfig: os.Getenv("KUBECONFIG_PXC"),
|
||||
},
|
||||
{
|
||||
name: "auto ps",
|
||||
resource: "auto",
|
||||
want: "0",
|
||||
kubeconfig: os.Getenv("KUBECONFIG_PS"),
|
||||
},
|
||||
{
|
||||
name: "auto psmdb",
|
||||
resource: "auto",
|
||||
want: "0",
|
||||
kubeconfig: os.Getenv("KUBECONFIG_PSMDB"),
|
||||
},
|
||||
{
|
||||
name: "auto pg",
|
||||
resource: "auto",
|
||||
want: "0",
|
||||
kubeconfig: os.Getenv("KUBECONFIG_PG"),
|
||||
},
|
||||
{
|
||||
name: "auto pgv2",
|
||||
resource: "auto",
|
||||
want: "0",
|
||||
kubeconfig: os.Getenv("KUBECONFIG_PG2"),
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
cmd := exec.Command("../../../bin/pt-k8s-debug-collector", "--kubeconfig", test.kubeconfig, "--forwardport", os.Getenv("FORWARDPORT"), "--resource", test.resource, "--skip-pod-summary")
|
||||
if err := cmd.Run(); err != nil {
|
||||
t.Errorf("error executing pt-k8s-debug-collector: %s\nCommand: %s", err.Error(), cmd.String())
|
||||
}
|
||||
defer func() {
|
||||
cmd = exec.Command("rm", "-f", "cluster-dump.tar.gz")
|
||||
if err := cmd.Run(); err != nil {
|
||||
t.Errorf("error cleaning up test data: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
out, err := exec.Command(testcmd[0], testcmd[1:]...).Output()
|
||||
if err != nil {
|
||||
t.Errorf("test %s, error running command %s:\n%s\n\nCommand output:\n%s", test.name, testcmd, err.Error(), out)
|
||||
}
|
||||
if strings.TrimRight(bytes.NewBuffer(out).String(), "\n") != test.want {
|
||||
t.Errorf("test %s, output is not as expected\nOutput: %s\nWanted: %s", test.name, out, test.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Option --version
|
||||
*/
|
||||
|
||||
@@ -15,7 +15,7 @@ Usage
|
||||
|
||||
::
|
||||
|
||||
pt-secure-data [<flags>] <command> [<args> ...]
|
||||
pt-secure-collect [<flags>] <command> [<args> ...]
|
||||
|
||||
By default, :program:`pt-secure-collect` will collect the output of:
|
||||
|
||||
|
||||
@@ -123,11 +123,11 @@ $output = output(
|
||||
);
|
||||
$output = `cat archive.test.table_2`;
|
||||
is($output, <<EOF
|
||||
1, 2, 3, 4
|
||||
2, "\\N", 3, 4
|
||||
3, 2, 3, "\\\t"
|
||||
4, 2, 3, "\\\n"
|
||||
5, 2, 3, "Zapp \\"Brannigan"
|
||||
1,2,3,4
|
||||
2,\\N,3,4
|
||||
3,2,3,"\\\t"
|
||||
4,2,3,"\\\n"
|
||||
5,2,3,"Zapp \\"Brannigan"
|
||||
EOF
|
||||
, '--output-format=csv');
|
||||
`rm -f archive.test.table_2`;
|
||||
|
||||
75
t/pt-archiver/pt-2410.t
Normal file
75
t/pt-archiver/pt-2410.t
Normal file
@@ -0,0 +1,75 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
|
||||
use charnames ':full';
|
||||
|
||||
use PerconaTest;
|
||||
use Sandbox;
|
||||
require "$trunk/bin/pt-archiver";
|
||||
|
||||
my $dp = new DSNParser(opts=>$dsn_opts);
|
||||
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
|
||||
my $dbh = $sb->get_dbh_for('source');
|
||||
|
||||
if ( !$dbh ) {
|
||||
plan skip_all => 'Cannot connect to sandbox source';
|
||||
}
|
||||
|
||||
my $output;
|
||||
my $exit_status;
|
||||
my $cnf = "/tmp/12345/my.sandbox.cnf";
|
||||
my $cmd = "$trunk/bin/pt-archiver";
|
||||
|
||||
$sb->wipe_clean($dbh);
|
||||
$sb->create_dbs($dbh, ['test']);
|
||||
|
||||
$sb->load_file('source', 't/pt-archiver/samples/pt-2410.sql');
|
||||
|
||||
($output, $exit_status) = full_output(
|
||||
sub { pt_archiver::main(
|
||||
qw(--where 1=1 --output-format=csv),
|
||||
'--source', "L=1,D=pt_2410,t=test,F=$cnf",
|
||||
'--file', '/tmp/pt-2410.csv') },
|
||||
);
|
||||
|
||||
is(
|
||||
$exit_status,
|
||||
0,
|
||||
'pt-archiver comleted'
|
||||
);
|
||||
|
||||
$output = `cat /tmp/pt-2410.csv`;
|
||||
like(
|
||||
$output,
|
||||
qr/1,\\N,"testing..."/,
|
||||
'NULL values stored correctly'
|
||||
) or diag($output);
|
||||
|
||||
$dbh->do("load data local infile '/tmp/pt-2410.csv' into table pt_2410.test COLUMNS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"'");
|
||||
|
||||
$output = `/tmp/12345/use pt_2410 -N -e 'SELECT * FROM test'`;
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/1 NULL testing.../,
|
||||
'NULL values loaded correctly'
|
||||
) or diag($output);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
diag(`rm -f /tmp/pt-2410.csv`);
|
||||
$sb->wipe_clean($dbh);
|
||||
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
|
||||
done_testing;
|
||||
exit;
|
||||
10
t/pt-archiver/samples/pt-2410.sql
Normal file
10
t/pt-archiver/samples/pt-2410.sql
Normal file
@@ -0,0 +1,10 @@
|
||||
CREATE DATABASE pt_2410;
|
||||
USE pt_2410;
|
||||
|
||||
CREATE TABLE test(
|
||||
id int not null primary key auto_increment,
|
||||
column1 int default null,
|
||||
column2 varchar(50) not null);
|
||||
|
||||
INSERT INTO test VALUES (null,null,'testing...');
|
||||
INSERT INTO test VALUES (null,null,'testing...');
|
||||
117
t/pt-config-diff/long_vars.t
Normal file
117
t/pt-config-diff/long_vars.t
Normal file
@@ -0,0 +1,117 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
|
||||
use PerconaTest;
|
||||
use Sandbox;
|
||||
require "$trunk/bin/pt-config-diff";
|
||||
|
||||
require VersionParser;
|
||||
my $dp = new DSNParser(opts=>$dsn_opts);
|
||||
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
|
||||
my $dbh = $sb->get_dbh_for('source');
|
||||
|
||||
my ($ver, $reset, $set_short, $set_long);
|
||||
|
||||
if ( !$dbh ) {
|
||||
plan skip_all => 'Cannot connect to sandbox source';
|
||||
}
|
||||
|
||||
if ( $sandbox_version ge '5.7' ) {
|
||||
$reset = q{SET GLOBAL session_track_system_variables = ''};
|
||||
$set_short =
|
||||
q{SET GLOBAL session_track_system_variables = '}.
|
||||
q{activate_all_roles_on_login,admin_address,}.
|
||||
q{admin_port,admin_ssl_ca,admin_ssl_capath,admin_ssl_cert,}.
|
||||
q{admin_ssl_cipher,admin_ssl_crl,admin_ssl_crlpath,admin_ssl_key,}.
|
||||
q{admin_tls_ciphersuites,admin_tls_version,authentication_policy,}.
|
||||
q{auto_generate_certs,auto_increment_increment,}.
|
||||
q{auto_increment_offset,autocommit,automatic_sp_privileges,}.
|
||||
q{back_log,basedir,big_tables,bind_address,binlog_cache_size,}.
|
||||
q{binlog_checksum,binlog_ddl_skip_rewrite,}.
|
||||
q{binlog_direct_non_transactional_updates,binlog_encryption,}.
|
||||
q{binlog_error_action,binlog_expire_logs_auto_purge,}.
|
||||
q{binlog_expire_logs_seconds,binlog_format,binlog_group_commit_sync_delay,}.
|
||||
q{binlog_group_commit_sync_no_delay_count,binlog_gtid_simple_recovery,}.
|
||||
q{binlog_max_flush_queue_time,binlog_order_commits,}.
|
||||
q{binlog_rotate_encryption_master_key_at_startup,binlog_row_event_max_size,}.
|
||||
q{binlog_row_image,binlog_row_metadata,binlog_row_value_options,}.
|
||||
q{binlog_rows_query_log_events,binlog_skip_flush_commands,}.
|
||||
q{binlog_space_limit,binlog_stmt_cache_size'};
|
||||
$set_long =
|
||||
q{SET GLOBAL session_track_system_variables = '}.
|
||||
q{activate_all_roles_on_login,admin_address,}.
|
||||
q{admin_port,admin_ssl_ca,admin_ssl_capath,admin_ssl_cert,}.
|
||||
q{admin_ssl_cipher,admin_ssl_crl,admin_ssl_crlpath,admin_ssl_key,}.
|
||||
q{admin_tls_ciphersuites,admin_tls_version,authentication_policy,}.
|
||||
q{auto_generate_certs,auto_increment_increment,}.
|
||||
q{auto_increment_offset,autocommit,automatic_sp_privileges,}.
|
||||
q{back_log,basedir,big_tables,bind_address,binlog_cache_size,}.
|
||||
q{binlog_checksum,binlog_ddl_skip_rewrite,}.
|
||||
q{binlog_direct_non_transactional_updates,binlog_encryption,}.
|
||||
q{binlog_error_action,binlog_expire_logs_auto_purge,}.
|
||||
q{binlog_expire_logs_seconds,binlog_format,binlog_group_commit_sync_delay,}.
|
||||
q{binlog_group_commit_sync_no_delay_count,binlog_gtid_simple_recovery,}.
|
||||
q{binlog_max_flush_queue_time,binlog_order_commits,}.
|
||||
q{binlog_rotate_encryption_master_key_at_startup,binlog_row_event_max_size,}.
|
||||
q{binlog_row_image,binlog_row_metadata,binlog_row_value_options,}.
|
||||
q{binlog_rows_query_log_events,binlog_skip_flush_commands,}.
|
||||
q{binlog_space_limit,binlog_stmt_cache_size,}.
|
||||
q{binlog_transaction_compression,binlog_transaction_compression_level_zstd'};
|
||||
}
|
||||
else {
|
||||
plan skip_all => "Requires MySQL 5.7 or newer";
|
||||
}
|
||||
|
||||
my $output;
|
||||
my $retval;
|
||||
|
||||
$sb->do_as_root('source', $set_short);
|
||||
|
||||
$output = output(
|
||||
sub { $retval = pt_config_diff::main(
|
||||
"${trunk}/t/pt-config-diff/samples/long_vars_1.cnf",
|
||||
'h=127.1,P=12345,u=msandbox,p=msandbox')
|
||||
},
|
||||
stderr => 1,
|
||||
);
|
||||
|
||||
is(
|
||||
$retval,
|
||||
0,
|
||||
"No diff on variable value up to 1024 bytes long"
|
||||
);
|
||||
|
||||
$sb->do_as_root('source', $set_long);
|
||||
|
||||
$output = output(
|
||||
sub { $retval = pt_config_diff::main(
|
||||
"${trunk}/t/pt-config-diff/samples/long_vars_2.cnf",
|
||||
'h=127.1,P=12345,u=msandbox,p=msandbox')
|
||||
},
|
||||
stderr => 1,
|
||||
);
|
||||
|
||||
is(
|
||||
$retval,
|
||||
0,
|
||||
"No diff on variable value longer than 1024 bytes"
|
||||
);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
$sb->do_as_root('source', $reset);
|
||||
|
||||
$sb->wipe_clean($dbh);
|
||||
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
|
||||
done_testing;
|
||||
2
t/pt-config-diff/samples/long_vars_1.cnf
Normal file
2
t/pt-config-diff/samples/long_vars_1.cnf
Normal file
@@ -0,0 +1,2 @@
|
||||
[mysqld]
|
||||
session_track_system_variables = 'activate_all_roles_on_login,admin_address,admin_port,admin_ssl_ca,admin_ssl_capath,admin_ssl_cert,admin_ssl_cipher,admin_ssl_crl,admin_ssl_crlpath,admin_ssl_key,admin_tls_ciphersuites,admin_tls_version,authentication_policy,auto_generate_certs,auto_increment_increment,auto_increment_offset,autocommit,automatic_sp_privileges,back_log,basedir,big_tables,bind_address,binlog_cache_size,binlog_checksum,binlog_ddl_skip_rewrite,binlog_direct_non_transactional_updates,binlog_encryption,binlog_error_action,binlog_expire_logs_auto_purge,binlog_expire_logs_seconds,binlog_format,binlog_group_commit_sync_delay,binlog_group_commit_sync_no_delay_count,binlog_gtid_simple_recovery,binlog_max_flush_queue_time,binlog_order_commits,binlog_rotate_encryption_master_key_at_startup,binlog_row_event_max_size,binlog_row_image,binlog_row_metadata,binlog_row_value_options,binlog_rows_query_log_events,binlog_skip_flush_commands,binlog_space_limit,binlog_stmt_cache_size'
|
||||
2
t/pt-config-diff/samples/long_vars_2.cnf
Normal file
2
t/pt-config-diff/samples/long_vars_2.cnf
Normal file
@@ -0,0 +1,2 @@
|
||||
[mysqld]
|
||||
session_track_system_variables = 'activate_all_roles_on_login,admin_address,admin_port,admin_ssl_ca,admin_ssl_capath,admin_ssl_cert,admin_ssl_cipher,admin_ssl_crl,admin_ssl_crlpath,admin_ssl_key,admin_tls_ciphersuites,admin_tls_version,authentication_policy,auto_generate_certs,auto_increment_increment,auto_increment_offset,autocommit,automatic_sp_privileges,back_log,basedir,big_tables,bind_address,binlog_cache_size,binlog_checksum,binlog_ddl_skip_rewrite,binlog_direct_non_transactional_updates,binlog_encryption,binlog_error_action,binlog_expire_logs_auto_purge,binlog_expire_logs_seconds,binlog_format,binlog_group_commit_sync_delay,binlog_group_commit_sync_no_delay_count,binlog_gtid_simple_recovery,binlog_max_flush_queue_time,binlog_order_commits,binlog_rotate_encryption_master_key_at_startup,binlog_row_event_max_size,binlog_row_image,binlog_row_metadata,binlog_row_value_options,binlog_rows_query_log_events,binlog_skip_flush_commands,binlog_space_limit,binlog_stmt_cache_size,binlog_transaction_compression,binlog_transaction_compression_level_zstd'
|
||||
@@ -137,7 +137,7 @@ set_delay();
|
||||
# We need to sleep, otherwise pt-osc can finish before replica is delayed
|
||||
sleep($max_lag);
|
||||
|
||||
my $args = "$source_dsn,D=test,t=pt1717 --execute --chunk-size ${chunk_size} --max-lag $max_lag --alter 'engine=INNODB' --pid $tmp_file_name --progress time,5 --no-drop-new-table --no-drop-triggers --history";
|
||||
my $args = "$source_dsn,D=test,t=pt1717 --execute --chunk-size ${chunk_size} --max-lag $max_lag --alter 'ADD COLUMN foo varchar(32)' --pid $tmp_file_name --progress time,5 --no-drop-new-table --no-drop-triggers --history";
|
||||
|
||||
$output = run_broken_job($args);
|
||||
|
||||
@@ -165,7 +165,7 @@ my @args = (qw(--execute --chunk-size=10 --history));
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=INNODB', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
'--chunk-index=f2'
|
||||
) }
|
||||
);
|
||||
@@ -186,7 +186,7 @@ like(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--max-lag', $max_lag,
|
||||
'--resume', $job_id,
|
||||
'--alter', 'engine=INNODB',
|
||||
'--alter', 'ADD COLUMN foo varchar(32)',
|
||||
'--plugin', "$plugin/pt-1717.pm",
|
||||
),
|
||||
},
|
||||
@@ -208,8 +208,10 @@ ok(
|
||||
'All rows copied correctly'
|
||||
) or diag("New table checksum: '${new_table_checksum}', original content checksum: '${old_table_checksum}'");
|
||||
|
||||
diag(`/tmp/12345/use test -N -e "ALTER TABLE pt1717 DROP COLUMN foo"`);
|
||||
|
||||
# Tests for chunk-index and chunk-index-columns options
|
||||
$args = "$source_dsn,D=test,t=pt1717 --alter engine=innodb --execute --history --chunk-size=10 --no-drop-new-table --no-drop-triggers --reverse-triggers --chunk-index=f2";
|
||||
$args = "$source_dsn,D=test,t=pt1717 --alter 'ADD COLUMN foo varchar(32)' --execute --history --chunk-size=10 --no-drop-new-table --no-drop-triggers --reverse-triggers --chunk-index=f2";
|
||||
|
||||
set_delay();
|
||||
$output = run_broken_job($args);
|
||||
@@ -220,7 +222,7 @@ $job_id = $1;
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=innodb', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
) }
|
||||
);
|
||||
|
||||
@@ -238,7 +240,7 @@ like(
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=innodb', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
'--chunk-index=f1'
|
||||
) }
|
||||
);
|
||||
@@ -257,7 +259,7 @@ like(
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=innodb', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
'--chunk-index=f2', '--chunk-index-columns=1'
|
||||
) }
|
||||
);
|
||||
@@ -288,7 +290,7 @@ is(
|
||||
$output + 0,
|
||||
3,
|
||||
'Triggers were not dropped'
|
||||
);
|
||||
) or diag($output);
|
||||
|
||||
$output = `/tmp/12345/use -N -e "select count(*) from information_schema.triggers where TRIGGER_SCHEMA='test' AND EVENT_OBJECT_TABLE like '%pt1717%_new' AND trigger_name LIKE 'rt_%'"`;
|
||||
|
||||
@@ -300,7 +302,7 @@ is(
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=innodb', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
'--chunk-size=4',
|
||||
'--chunk-index=f2'
|
||||
) }
|
||||
@@ -348,7 +350,7 @@ ok(
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=innodb', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
'--chunk-size=4',
|
||||
'--chunk-index=f2'
|
||||
) }
|
||||
@@ -372,7 +374,7 @@ $output =~ /New table `test`.`([_]+pt1717_new)` not found, restart operation fro
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$source_dsn,D=test,t=pt1717",
|
||||
'--alter', 'engine=innodb', '--execute', "--resume=${job_id}",
|
||||
'--alter', 'ADD COLUMN foo varchar(32)', '--execute', "--resume=${job_id}",
|
||||
'--chunk-size=4',
|
||||
'--chunk-index=f2'
|
||||
) }
|
||||
|
||||
239
t/pt-online-schema-change/pt-2355.t
Normal file
239
t/pt-online-schema-change/pt-2355.t
Normal file
@@ -0,0 +1,239 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use threads;
|
||||
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
|
||||
use Data::Dumper;
|
||||
use PerconaTest;
|
||||
use Sandbox;
|
||||
use SqlModes;
|
||||
use File::Temp qw/ tempdir tempfile /;
|
||||
|
||||
our $delay = 10;
|
||||
my $max_lag = $delay / 2;
|
||||
my $output;
|
||||
my $exit;
|
||||
|
||||
my $tmp_file = File::Temp->new();
|
||||
my $tmp_file_name = $tmp_file->filename;
|
||||
unlink $tmp_file_name;
|
||||
|
||||
require "$trunk/bin/pt-online-schema-change";
|
||||
|
||||
my $dp = new DSNParser(opts=>$dsn_opts);
|
||||
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
|
||||
if ($sb->is_cluster_mode) {
|
||||
plan skip_all => 'Not for PXC';
|
||||
}
|
||||
|
||||
my $source_dbh = $sb->get_dbh_for('source');
|
||||
my $replica_dbh1 = $sb->get_dbh_for('replica1');
|
||||
my $replica_dbh2 = $sb->get_dbh_for('replica2');
|
||||
my $source_dsn = 'h=127.0.0.1,P=12345,u=msandbox,p=msandbox';
|
||||
my $replica_dsn1 = 'h=127.0.0.1,P=12346,u=msandbox,p=msandbox';
|
||||
my $replica_dsn2 = 'h=127.0.0.1,P=12347,u=msandbox,p=msandbox';
|
||||
my $sample = "t/pt-online-schema-change/samples";
|
||||
my $plugin = "$trunk/$sample/plugins";
|
||||
|
||||
# We need sync_relay_log=1 to keep changes after replica restart
|
||||
my $cnf = '/tmp/12347/my.sandbox.cnf';
|
||||
diag(`cp $cnf $cnf.bak`);
|
||||
diag(`echo "[mysqld]" > /tmp/12347/my.sandbox.2.cnf`);
|
||||
diag(`echo "sync_relay_log=1" >> /tmp/12347/my.sandbox.2.cnf`);
|
||||
diag(`echo "sync_relay_log_info=1" >> /tmp/12347/my.sandbox.2.cnf`);
|
||||
diag(`echo "relay_log_recovery=1" >> /tmp/12347/my.sandbox.2.cnf`);
|
||||
diag(`echo "!include /tmp/12347/my.sandbox.2.cnf" >> $cnf`);
|
||||
diag(`/tmp/12347/stop >/dev/null`);
|
||||
sleep 1;
|
||||
diag(`/tmp/12347/start >/dev/null`);
|
||||
|
||||
sub reset_query_cache {
|
||||
my @dbhs = @_;
|
||||
return if ($sandbox_version ge '8.0');
|
||||
foreach my $dbh (@dbhs) {
|
||||
$dbh->do('RESET QUERY CACHE');
|
||||
}
|
||||
}
|
||||
|
||||
sub run_broken_job {
|
||||
my ($args) = @_;
|
||||
my ($fh, $filename) = tempfile();
|
||||
my $pid = fork();
|
||||
|
||||
if (!$pid) {
|
||||
open(STDERR, '>', $filename);
|
||||
open(STDOUT, '>', $filename);
|
||||
exec("$trunk/bin/pt-online-schema-change $args");
|
||||
}
|
||||
|
||||
sleep($max_lag + $max_lag/2);
|
||||
# stop replica 12347
|
||||
diag(`/tmp/12347/stop >/dev/null`);
|
||||
sleep 1;
|
||||
|
||||
waitpid($pid, 0);
|
||||
my $output = do {
|
||||
local $/ = undef;
|
||||
<$fh>;
|
||||
};
|
||||
|
||||
return $output;
|
||||
}
|
||||
|
||||
sub set_delay {
|
||||
$sb->wait_for_replicas();
|
||||
|
||||
diag("Setting replica delay to $delay seconds");
|
||||
diag(`/tmp/12345/use -N test -e "DROP TABLE IF EXISTS pt1717_back"`);
|
||||
|
||||
$replica_dbh1->do("STOP ${replica_name}");
|
||||
$replica_dbh1->do("CHANGE ${source_change} TO ${source_name}_DELAY=$delay");
|
||||
$replica_dbh1->do("START ${replica_name}");
|
||||
|
||||
# Run a full table scan query to ensure the replica is behind the source
|
||||
# There is no query cache in MySQL 8.0+
|
||||
reset_query_cache($source_dbh, $source_dbh);
|
||||
# Update one row so replica is delayed
|
||||
$source_dbh->do('UPDATE `test`.`pt1717` SET f2 = f2 + 1 LIMIT 1');
|
||||
$source_dbh->do('UPDATE `test`.`pt1717` SET f2 = f2 + 1 WHERE f1 = ""');
|
||||
|
||||
# Creating copy of table pt1717, so we can compare data later
|
||||
diag(`/tmp/12345/use -N test -e "CREATE TABLE pt1717_back like pt1717"`);
|
||||
diag(`/tmp/12345/use -N test -e "INSERT INTO pt1717_back SELECT * FROM pt1717"`);
|
||||
}
|
||||
|
||||
# 1) Set the replica delay to 0 just in case we are re-running the tests without restarting the sandbox.
|
||||
# 2) Load sample data
|
||||
# 3) Set the replica delay to 30 seconds to be able to see the 'waiting' message.
|
||||
diag("Setting replica delay to 0 seconds");
|
||||
$replica_dbh1->do("STOP ${replica_name}");
|
||||
$source_dbh->do("RESET ${source_reset}");
|
||||
$replica_dbh1->do("RESET ${replica_name}");
|
||||
$replica_dbh1->do("START ${replica_name}");
|
||||
|
||||
diag('Loading test data');
|
||||
$sb->load_file('source', "t/pt-online-schema-change/samples/pt-1717.sql");
|
||||
|
||||
# Should be greater than chunk-size and big enough, so pt-osc will wait for delay
|
||||
my $num_rows = 5000;
|
||||
my $chunk_size = 10;
|
||||
diag("Loading $num_rows into the table. This might take some time.");
|
||||
diag(`util/mysql_random_data_load --host=127.0.0.1 --port=12345 --user=msandbox --password=msandbox test pt1717 $num_rows`);
|
||||
|
||||
diag("Starting tests...");
|
||||
|
||||
set_delay();
|
||||
|
||||
# We need to sleep, otherwise pt-osc can finish before replica is delayed
|
||||
sleep($max_lag);
|
||||
|
||||
my $args = "$source_dsn,D=test,t=pt1717 --execute --chunk-size ${chunk_size} --max-lag $max_lag --alter 'ADD INDEX idx1(f1)' --pid $tmp_file_name --progress time,5 --no-drop-new-table --no-drop-triggers --history";
|
||||
|
||||
$output = run_broken_job($args);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/`test`.`pt1717` was not altered/s,
|
||||
"pt-osc stopped with error as expected",
|
||||
) or diag($output);
|
||||
|
||||
diag(`/tmp/12347/start >/dev/null`);
|
||||
$sb->wait_for_replicas();
|
||||
|
||||
$output = `/tmp/12345/use -N -e "select job_id, upper_boundary from percona.pt_osc_history"`;
|
||||
my ($job_id, $upper_boundary) = split(/\s+/, $output);
|
||||
|
||||
my $copied_rows = `/tmp/12345/use -N -e "select count(*) from test._pt1717_new"`;
|
||||
chomp($copied_rows);
|
||||
|
||||
ok(
|
||||
$copied_rows eq $upper_boundary,
|
||||
'Upper chunk boundary stored correctly'
|
||||
) or diag("Copied_rows: ${copied_rows}, upper boundary: ${upper_boundary}");;
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main("$source_dsn,D=test,t=pt1717",
|
||||
"--execute", "--chunk-size=${chunk_size}", "--max-lag=${max_lag}",
|
||||
"--alter=ADD INDEX idx1(f1)",
|
||||
"--resume=${job_id}",
|
||||
) }
|
||||
);
|
||||
|
||||
is(
|
||||
$exit,
|
||||
0,
|
||||
'pt-osc works correctly with --resume'
|
||||
) or diag($exit);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Successfully altered/,
|
||||
'Success message printed'
|
||||
) or diag($output);
|
||||
|
||||
# Corrupting job record, so we can test error message
|
||||
diag(`/tmp/12345/use -N -e "update percona.pt_osc_history set new_table_name=NULL where job_id=${job_id}"`);
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main("$source_dsn,D=test,t=pt1717",
|
||||
"--execute", "--chunk-size=${chunk_size}", "--max-lag=${max_lag}",
|
||||
"--alter=ADD INDEX idx1(f1)",
|
||||
"--resume=${job_id}",
|
||||
) }
|
||||
);
|
||||
|
||||
is(
|
||||
$exit,
|
||||
17,
|
||||
'pt-osc works correctly fails with empty boundaries'
|
||||
) or diag($exit);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Option --resume refers job \d+ with empty boundaries. Exiting./,
|
||||
'Correct error message printed'
|
||||
) or diag($output);
|
||||
|
||||
unlike(
|
||||
$output,
|
||||
qr/Option --resume refers non-existing job ID: \d+. Exiting./,
|
||||
'Misleading error message not printed'
|
||||
) or diag($output);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
diag("Cleaning");
|
||||
$replica_dbh2 = $sb->get_dbh_for('replica2');
|
||||
diag("Setting replica delay to 0 seconds");
|
||||
$replica_dbh1->do("STOP ${replica_name}");
|
||||
$replica_dbh2->do("STOP ${replica_name}");
|
||||
$source_dbh->do("RESET ${source_reset}");
|
||||
$replica_dbh1->do("RESET ${source_reset}");
|
||||
$replica_dbh1->do("RESET ${replica_name}");
|
||||
$replica_dbh2->do("RESET ${replica_name}");
|
||||
$replica_dbh1->do("START ${replica_name}");
|
||||
$replica_dbh2->do("START ${replica_name}");
|
||||
|
||||
diag(`mv $cnf.bak $cnf`);
|
||||
|
||||
diag(`/tmp/12347/stop >/dev/null`);
|
||||
diag(`/tmp/12347/start >/dev/null`);
|
||||
|
||||
diag("Dropping test database");
|
||||
$source_dbh->do("DROP DATABASE IF EXISTS test");
|
||||
$sb->wait_for_replicas();
|
||||
|
||||
$sb->wipe_clean($source_dbh);
|
||||
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
|
||||
done_testing;
|
||||
71
t/pt-online-schema-change/pt-2407.t
Normal file
71
t/pt-online-schema-change/pt-2407.t
Normal file
@@ -0,0 +1,71 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
|
||||
use PerconaTest;
|
||||
use Sandbox;
|
||||
require "$trunk/bin/pt-online-schema-change";
|
||||
require VersionParser;
|
||||
|
||||
use Data::Dumper;
|
||||
|
||||
my $dp = new DSNParser(opts=>$dsn_opts);
|
||||
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
|
||||
my $source_dbh = $sb->get_dbh_for('source');
|
||||
my $replica_dbh = $sb->get_dbh_for('replica1');
|
||||
|
||||
if ( !$source_dbh ) {
|
||||
plan skip_all => 'Cannot connect to sandbox source';
|
||||
}
|
||||
elsif ( !$replica_dbh ) {
|
||||
plan skip_all => 'Cannot connect to sandbox replica';
|
||||
}
|
||||
|
||||
my @args = qw(--set-vars innodb_lock_wait_timeout=3);
|
||||
my $output = "";
|
||||
my $dsn = "h=127.1,P=12345,u=msandbox,p=msandbox";
|
||||
my $exit = 0;
|
||||
my $sample = "t/pt-online-schema-change/samples";
|
||||
|
||||
$sb->load_file('source', "$sample/pt-2407.sql");
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$dsn,D=pt_2407,t=t1",
|
||||
'--alter', 'alter table t1 ADD COLUMN payout_group_id VARCHAR(255) DEFAULT NULL, ALGORITHM=INSTANT;', '--execute') }
|
||||
);
|
||||
|
||||
is(
|
||||
$exit,
|
||||
11,
|
||||
'Return code non-zero for failed operation'
|
||||
) or diag($exit);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/You have an error in your SQL syntax/,
|
||||
'Job failed due to SQL syntax error'
|
||||
) or diag($output);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Error altering new table/,
|
||||
'Error altering new table message printed'
|
||||
) or diag($output);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
|
||||
$sb->wipe_clean($source_dbh);
|
||||
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
|
||||
#
|
||||
done_testing;
|
||||
120
t/pt-online-schema-change/pt-2422.t
Normal file
120
t/pt-online-schema-change/pt-2422.t
Normal file
@@ -0,0 +1,120 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
|
||||
use PerconaTest;
|
||||
use Sandbox;
|
||||
require "$trunk/bin/pt-online-schema-change";
|
||||
require VersionParser;
|
||||
|
||||
use Data::Dumper;
|
||||
|
||||
my $dp = new DSNParser(opts=>$dsn_opts);
|
||||
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
|
||||
my $source_dbh = $sb->get_dbh_for('source');
|
||||
my $replica_dbh = $sb->get_dbh_for('replica1');
|
||||
|
||||
if ( !$source_dbh ) {
|
||||
plan skip_all => 'Cannot connect to sandbox source';
|
||||
}
|
||||
elsif ( !$replica_dbh ) {
|
||||
plan skip_all => 'Cannot connect to sandbox replica';
|
||||
}
|
||||
|
||||
my @args = qw(--set-vars innodb_lock_wait_timeout=3);
|
||||
my $output = "";
|
||||
my $dsn = "h=127.1,P=12345,u=msandbox,p=msandbox";
|
||||
my $exit = 0;
|
||||
my $sample = "t/pt-online-schema-change/samples";
|
||||
|
||||
$sb->load_file('source', "$sample/basic_no_fks_innodb.sql");
|
||||
$source_dbh->do('CREATE TABLE pt_osc.pt_2422 LIKE pt_osc.t');
|
||||
$source_dbh->do('INSERT INTO pt_osc.pt_2422 SELECT * FROM pt_osc.t');
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$dsn,D=pt_osc,t=t",
|
||||
'--alter', 'engine=innodb', '--execute', '--history') }
|
||||
);
|
||||
|
||||
is(
|
||||
$exit,
|
||||
0,
|
||||
'basic test with option --history finished OK'
|
||||
) or diag($output);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Job \d started/,
|
||||
'Job id printed in the beginning of the tool output'
|
||||
);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Job \d finished successfully/,
|
||||
'Job id printed for successful copy'
|
||||
);
|
||||
|
||||
$output = `/tmp/12345/use -N -e "SELECT new_table_name FROM percona.pt_osc_history WHERE job_id=1"`;
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/_t_new/,
|
||||
'Correct new table name inserted'
|
||||
) or diag($output);
|
||||
|
||||
($output, $exit) = full_output(
|
||||
sub { pt_online_schema_change::main(@args, "$dsn,D=pt_osc,t=pt_2422",
|
||||
'--alter', 'engine=innodb', '--execute', '--history') }
|
||||
);
|
||||
|
||||
is(
|
||||
$exit,
|
||||
0,
|
||||
'basic test with second table and option --history finished OK'
|
||||
) or diag($output);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Job \d started/,
|
||||
'Job id printed in the beginning of the tool output for the second table'
|
||||
);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Job \d finished successfully/,
|
||||
'Job id printed for successful copy of the second table'
|
||||
);
|
||||
|
||||
$output = `/tmp/12345/use -N -e "SELECT new_table_name FROM percona.pt_osc_history WHERE job_id=1"`;
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/_t_new/,
|
||||
'New table name for previouse job was not updated'
|
||||
) or diag($output);
|
||||
|
||||
$output = `/tmp/12345/use -N -e "SELECT new_table_name FROM percona.pt_osc_history WHERE job_id=2"`;
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/_pt_2422_new/,
|
||||
'Correct new table name inserted for the second table'
|
||||
) or diag($output);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
|
||||
$sb->wipe_clean($source_dbh);
|
||||
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
|
||||
#
|
||||
done_testing;
|
||||
12
t/pt-online-schema-change/samples/pt-2407.sql
Normal file
12
t/pt-online-schema-change/samples/pt-2407.sql
Normal file
@@ -0,0 +1,12 @@
|
||||
CREATE DATABASE pt_2407;
|
||||
|
||||
USE pt_2407;
|
||||
|
||||
CREATE TABLE t1 (
|
||||
c1 int NOT NULL,
|
||||
c2 varchar(100) NOT NULL,
|
||||
PRIMARY KEY (c1),
|
||||
KEY idx (c2)
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
INSERT INTO t1 VALUES(1,1),(2,2),(3,3),(4,4),(5,5);
|
||||
@@ -17,12 +17,6 @@ use PerconaTest;
|
||||
use Sandbox;
|
||||
require "$trunk/bin/pt-query-digest";
|
||||
|
||||
ok (1,
|
||||
"version checking site offline for now"
|
||||
);
|
||||
done_testing;
|
||||
exit 0;
|
||||
|
||||
my $output;
|
||||
my $cmd = "$trunk/bin/pt-query-digest --limit 1 $trunk/t/lib/samples/slowlogs/slow001.txt";
|
||||
|
||||
@@ -49,7 +43,7 @@ ok(
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/# Query 1: 0 QPS, 0x concurrency, ID 0x7F7D57ACDD8A346E at byte 0/,
|
||||
qr/# Query 1: 0 QPS, 0x concurrency, ID 0xA853B50CDEB4866B3A99CC42AEDCCFCD at byte 359/,
|
||||
"Tool ran after version-check"
|
||||
) or diag(Dumper($output));
|
||||
|
||||
@@ -172,6 +166,23 @@ ok(
|
||||
|
||||
unlink "/tmp/pt-query-digest.$PID" if "/tmp/pt-query-digest.$PID";
|
||||
|
||||
# #############################################################################
|
||||
# # PT-2129 - tools fail on non-readable version check file
|
||||
# #############################################################################
|
||||
|
||||
system("touch $vc_file");
|
||||
chmod 0000, $vc_file;
|
||||
|
||||
$output = `$cmd --version-check 2>&1`;
|
||||
|
||||
unlike(
|
||||
$output,
|
||||
qr/Can't use an undefined value as an ARRAY reference/,
|
||||
'No undefined value error'
|
||||
) or diag($output);
|
||||
|
||||
chmod 0664, $vc_file;
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
|
||||
@@ -378,6 +378,29 @@ is(
|
||||
diag(`/tmp/12346/stop >/dev/null`);
|
||||
diag(`/tmp/12346/start >/dev/null`);
|
||||
|
||||
# #############################################################################
|
||||
# typo in pt-table-checksum error message
|
||||
# https://perconadev.atlassian.net/browse/PT-2424
|
||||
# #############################################################################
|
||||
|
||||
$output = output(sub {
|
||||
pt_table_checksum::main($source_dsn,
|
||||
qw(--no-empty-replicate-table --truncate-replicate-table)
|
||||
)},
|
||||
stderr => 1,
|
||||
);
|
||||
|
||||
unlike(
|
||||
$output,
|
||||
qr/--resume and --no-empty-replicate-table are mutually exclusive/,
|
||||
"PT-2424: no typo in the error message"
|
||||
);
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/--truncate-replicate-table and --no-empty-replicate-table are mutually exclusive/,
|
||||
"PT-2424: correct error message"
|
||||
);
|
||||
|
||||
#
|
||||
# #############################################################################
|
||||
|
||||
Reference in New Issue
Block a user