Merge 2.0 r187.

This commit is contained in:
Daniel Nichter
2012-02-21 14:26:44 -07:00
39 changed files with 872 additions and 693 deletions

View File

@@ -8,12 +8,12 @@ v2.0.3 released 2012-02-03
* Fixed bug 871438: Bash tools are insecure
* Fixed bug 897758: Failed to prepare TableSyncChunk plugin: Use of uninitialized value $args{"chunk_range"} in lc at pt-table-sync line 3055
* Fixed bug 919819: pt-kill --execute-command creates zombies
* Fixed bug 894255: pt-kill: when --daemonize is given, should not check that stdin is a tty
* Fixed bug 925778: pt-ioprofile doesn't run without a file
* Fixed bug 925477: pt-ioprofile docs refer to pt-iostats
* Fixed bug 857091: pt-sift downloads http://percona.com/get/pt-pmp, which does not work
* Fixed bug 857104: pt-sift tries to invoke mext, should be pt-mext
* Fixed bug 872699: pt-diskstats: rd_avkb & wr_avkb derived incorrectly
* Fixed bug 897029: pt-diskstats computes wrong values for md0
* Fixed bug 882918: pt-stalk spams warning if oprofile isn't installed
* Fixed bug 884504: pt-stalk doesn't check pt-collect
* Fixed bug 897483: pt-online-schema-change "uninitialized value" due to update-foreign-keys-method

View File

@@ -4556,8 +4556,8 @@ rows. Specifying the index with the 'i' part of the L<"--source"> argument can
be crucial for this; use L<"--dry-run"> to examine the generated queries and be
sure to EXPLAIN them to see if they are efficient (most of the time you probably
want to scan the PRIMARY key, which is the default). Even better, profile
pt-archiver with pt-query-profiler and make sure it is not scanning the whole
table every query.
pt-archiver with mk-query-profiler (L<http://maatkit.org/get/mk-query-profiler>)
and make sure it is not scanning the whole table every query.
You can disable the seek-then-scan optimizations partially or wholly with
L<"--no-ascend"> and L<"--ascend-first">. Sometimes this may be more efficient

View File

@@ -4085,7 +4085,7 @@ Print DROP KEY statement for each duplicate key. By default an ALTER TABLE
DROP KEY statement is printed below each duplicate key so that, if you want to
remove the duplicate key, you can copy-paste the statement into MySQL.
To disable printing these statements, specify --nosql.
To disable printing these statements, specify C<--no-sql>.
=item --[no]summary

View File

@@ -959,7 +959,7 @@ sub _parse_size {
$opt->{value} = ($pre || '') . $num;
}
else {
$self->save_error("Invalid size for --$opt->{long}");
$self->save_error("Invalid size for --$opt->{long}: $val");
}
return;
}
@@ -1249,12 +1249,14 @@ sub parse_options {
sub as_string {
my ( $self, $dsn, $props ) = @_;
return $dsn unless ref $dsn;
my %allowed = $props ? map { $_=>1 } @$props : ();
my @keys = $props ? @$props : sort keys %$dsn;
return join(',',
map { "$_=" . ($_ eq 'p' ? '...' : $dsn->{$_}) }
grep { defined $dsn->{$_} && $self->{opts}->{$_} }
grep { !$props || $allowed{$_} }
sort keys %$dsn );
grep {
exists $self->{opts}->{$_}
&& exists $dsn->{$_}
&& defined $dsn->{$_}
} @keys);
}
sub usage {
@@ -3543,14 +3545,9 @@ sub main {
unless $o->got('interval');
}
if ( !-t STDIN ) {
PTDEBUG && _d("STDIN is piped");
@ARGV = ('-');
}
# Disable opts that don't make sense when reading proclist
# from a file (or STDIN).
if ( @ARGV ) {
if ( $o->get('test-matching') ) {
$o->set('run-time', 0);
$o->set('interval', 0);
$o->set('ignore-self', 0);
@@ -3580,6 +3577,27 @@ sub main {
return 0;
}
# ########################################################################
# Create the --filter sub.
# ########################################################################
my $filter_sub;
if ( my $filter = $o->get('filter') ) {
if ( -f $filter && -r $filter ) {
PTDEBUG && _d('Reading file', $filter, 'for --filter code');
open my $fh, "<", $filter or die "Cannot open $filter: $OS_ERROR";
$filter = do { local $/ = undef; <$fh> };
close $fh;
}
else {
$filter = "( $filter )"; # issue 565
}
my $code = 'sub { my ( $event ) = @_; '
. "$filter && return \$event; };";
PTDEBUG && _d('--filter code:', $code);
$filter_sub = eval $code
or die "Error compiling --filter code: $code\n$EVAL_ERROR";
}
# ########################################################################
# Make input sub that will either get processlist from MySQL or a file.
# ########################################################################
@@ -3587,13 +3605,14 @@ sub main {
my $dbh;
my $kill_sth;
my $get_proclist;
if ( @ARGV ) {
PTDEBUG && _d('Getting processlist from files:', @ARGV);
my $files;
if ( $files = $o->get('test-matching') ) {
PTDEBUG && _d('Getting processlist from files:', @$files);
my $trp = new TextResultSetParser();
my $fh;
$get_proclist = sub {
if ( !$fh ) {
my $file = shift @ARGV;
my $file = shift @$files;
die 'No more files' unless $file;
if ( $file eq '-' ) {
$fh = *STDIN;
@@ -3655,14 +3674,15 @@ sub main {
# ########################################################################
msg("$PROGRAM_NAME starting");
msg($dbh ? "Connected to host " . $dp->as_string($dsn)
: "Reading files @ARGV");
: "Test matching files @$files");
# Class-based match criteria.
my $query_count = $o->get('query-count');
my $each_busy_time = $o->get('each-busy-time');
my $any_busy_time = $o->get('any-busy-time');
my $group_by = $o->get('group-by');
if ( $group_by ) {
if ( $group_by
&& $group_by =~ m/id|user|host|db|command|time|state|info/i ) {
# Processlist.pm is case-sensitive. It matches Id, Host, db, etc.
# So we'll do the same because if we set NAME_lc on the dbh then
# we'll break our Processlist obj.
@@ -3724,6 +3744,17 @@ sub main {
die "Error getting SHOW PROCESSLIST: $EVAL_ERROR";
}
# Apply --filter to the processlist events.
my $filtered_proclist;
if ( $filter_sub && $proclist && @$proclist ) {
foreach my $proc ( @$proclist ) {
push @$filtered_proclist, $proc if $filter_sub->($proc);
}
}
else {
$filtered_proclist = $proclist;
}
my @queries;
if ( $proclist ) {
# ##################################################################
@@ -3742,27 +3773,26 @@ sub main {
# ##################################################################
CLASS:
foreach my $class ( keys %$query_classes ) {
PTDEBUG && _d("Finding matching queries for class", $class);
PTDEBUG && _d('Finding matching queries in class', $class);
my @matches = $pl->find($query_classes->{$class}, %find_spec);
if ( !@matches ) {
PTDEBUG && _d("Class has no matching queries");
next CLASS;
}
PTDEBUG && _d(scalar @matches, 'queries in class', $class);
next CLASS unless scalar @matches;
# ###############################################################
# Apply class-based filters.
# ###############################################################
if ( $query_count && @matches < $query_count ) {
PTDEBUG && _d("Class does not have enough queries; has",
scalar @matches, "but needs at least", $query_count);
PTDEBUG && _d('Not enough queries in class', $class,
'; has', scalar @matches, 'but needs at least', $query_count);
next CLASS;
}
if ( $each_busy_time ) {
foreach my $proc ( @matches ) {
if ( ($proc->{Time} || 0) <= $each_busy_time ) {
PTDEBUG && _d("This proc hasn't been running long enough:",
Dumper($proc));
PTDEBUG && _d('This query in class', $class,
'hasn\'t been running long enough:', Dumper($proc));
next CLASS;
}
}
@@ -3776,7 +3806,7 @@ sub main {
}
}
if ( !$busy_enough ) {
PTDEBUG && _d("No proc is busy enough");
PTDEBUG && _d('No query is busy enough in class', $class);
next CLASS;
}
}
@@ -3803,8 +3833,9 @@ sub main {
# ###############################################################
# Save matching queries in this class.
# ###############################################################
PTDEBUG && _d(scalar @matches, "queries in class to kill");
PTDEBUG && _d(scalar @matches, 'queries to kill in class', $class);
push @queries, @matches;
} # CLASS
msg('Matched ' . scalar @queries . ' queries');
@@ -3959,7 +3990,7 @@ pt-kill - Kill MySQL queries that match certain criteria.
=head1 SYNOPSIS
Usage: pt-kill [OPTION]... [FILE...]
Usage: pt-kill [OPTIONS]
pt-kill kills MySQL connections. pt-kill connects to MySQL and gets queries
from SHOW PROCESSLIST if no FILE is given. Else, it reads queries from one
@@ -3984,7 +4015,8 @@ Print all login processes:
See which queries in the processlist right now would match:
mysql -e "SHOW PROCESSLIST" | pt-kill --busy-time 60 --print
mysql -e "SHOW PROCESSLIST" > proclist.txt
pt-kill --test-matching proclist.txt --busy-time 60 --print
=head1 RISKS
@@ -4097,8 +4129,7 @@ L<"--any-busy-time"> and L<"--each-busy-time"> are mutually exclusive.
L<"--kill"> and L<"--kill-query"> are mutually exclusive.
This tool accepts additional command-line arguments. Refer to the
L<"SYNOPSIS"> and usage information for details.
L<"--daemonize"> and L<"--test-matching"> are mutually exclusive.
=over
@@ -4134,6 +4165,48 @@ short form: -F; type: string
Only read mysql options from the given file. You must give an absolute
pathname.
=item --filter
type: string
Discard events for which this Perl code doesn't return true.
This option is a string of Perl code or a file containing Perl code that gets
compiled into a subroutine with one argument: $event. This is a hashref.
If the given value is a readable file, then pt-kill reads the entire
file and uses its contents as the code. The file should not contain
a shebang (#!/usr/bin/perl) line.
If the code returns true, the chain of callbacks continues; otherwise it ends.
The code is the last statement in the subroutine other than C<return $event>.
The subroutine template is:
sub { $event = shift; filter && return $event; }
Filters given on the command line are wrapped inside parentheses like like
C<( filter )>. For complex, multi-line filters, you must put the code inside
a file so it will not be wrapped inside parentheses. Either way, the filter
must produce syntactically valid code given the template. For example, an
if-else branch given on the command line would not be valid:
--filter 'if () { } else { }' # WRONG
Since it's given on the command line, the if-else branch would be wrapped inside
parentheses which is not syntactically valid. So to accomplish something more
complex like this would require putting the code in a file, for example
filter.txt:
my $event_ok; if (...) { $event_ok=1; } else { $event_ok=0; } $event_ok
Then specify C<--filter filter.txt> to read the code from filter.txt.
If the filter code won't compile, pt-kill will die with an error.
If the filter code does compile, an error may still occur at runtime if the
code tries to do something wrong (like pattern match an undefined value).
pt-kill does not provide any safeguards so code carefully!
It is permissible for the code to have side effects (to alter C<$event>).
=item --group-by
type: string
@@ -4503,6 +4576,17 @@ By default, matches do not apply to replication threads; i.e. replication
threads are completely ignored. Specifying this option allows matches to
match (and potentially kill) replication threads on masters and slaves.
=item --test-matching
type: array; group: Query Matches
Files with processlist snapshots to test matching options against. Since
the matching options can be complex, you can save snapshots of processlist
in files, then test matching options against queries in those files.
This option disables L<"--run-time">, L<"--interval">,
and L<"--[no]ignore-self">.
=back
=head2 CLASS MATCHES

View File

@@ -3066,7 +3066,7 @@ L<"--base-dir">. These session files are played with L<"--play">.
pt-log-player will L<"--play"> session files in parallel using N number of
L<"--threads">. (They're not technically threads, but we call them that
anyway.) Each thread will play all the sessions in its given session files.
The sessions are played as fast as possible--there are no delays--because the
The sessions are played as fast as possible (there are no delays) because the
goal is to stress-test and load-test the server. So be careful using this
script on a production server!

View File

@@ -959,7 +959,7 @@ sub _parse_size {
$opt->{value} = ($pre || '') . $num;
}
else {
$self->save_error("Invalid size for --$opt->{long}");
$self->save_error("Invalid size for --$opt->{long}: $val");
}
return;
}
@@ -1249,12 +1249,14 @@ sub parse_options {
sub as_string {
my ( $self, $dsn, $props ) = @_;
return $dsn unless ref $dsn;
my %allowed = $props ? map { $_=>1 } @$props : ();
my @keys = $props ? @$props : sort keys %$dsn;
return join(',',
map { "$_=" . ($_ eq 'p' ? '...' : $dsn->{$_}) }
grep { defined $dsn->{$_} && $self->{opts}->{$_} }
grep { !$props || $allowed{$_} }
sort keys %$dsn );
grep {
exists $self->{opts}->{$_}
&& exists $dsn->{$_}
&& defined $dsn->{$_}
} @keys);
}
sub usage {
@@ -1729,6 +1731,48 @@ sub join_quote {
return $db ? "$db.$tbl" : $tbl;
}
sub serialize_list {
my ( $self, @args ) = @_;
return unless @args;
return $args[0] if @args == 1 && !defined $args[0];
die "Cannot serialize multiple values with undef/NULL"
if grep { !defined $_ } @args;
return join ',', map { quotemeta } @args;
}
sub deserialize_list {
my ( $self, $string ) = @_;
return $string unless defined $string;
my @escaped_parts = $string =~ /
\G # Start of string, or end of previous match.
( # Each of these is an element in the original list.
[^\\,]* # Anything not a backslash or a comma
(?: # When we get here, we found one of the above.
\\. # A backslash followed by something so we can continue
[^\\,]* # Same as above.
)* # Repeat zero of more times.
)
, # Comma dividing elements
/sxgc;
push @escaped_parts, pos($string) ? substr( $string, pos($string) ) : $string;
my @unescaped_parts = map {
my $part = $_;
my $char_class = utf8::is_utf8($part) # If it's a UTF-8 string,
? qr/(?=\p{ASCII})\W/ # We only care about non-word
: qr/(?=\p{ASCII})\W|[\x{80}-\x{FF}]/; # Otherwise,
$part =~ s/\\($char_class)/$1/g;
$part;
} @escaped_parts;
return @unescaped_parts;
}
1;
}
# ###########################################################################
@@ -2022,19 +2066,56 @@ sub new {
return bless $self, $class;
}
sub get_create_table {
my ( $self, $dbh, $db, $tbl ) = @_;
die "I need a dbh parameter" unless $dbh;
die "I need a db parameter" unless $db;
die "I need a tbl parameter" unless $tbl;
my $q = $self->{Quoter};
my $sql = '/*!40101 SET @OLD_SQL_MODE := @@SQL_MODE, '
. q{@@SQL_MODE := REPLACE(REPLACE(@@SQL_MODE, 'ANSI_QUOTES', ''), ',,', ','), }
. '@OLD_QUOTE := @@SQL_QUOTE_SHOW_CREATE, '
. '@@SQL_QUOTE_SHOW_CREATE := 1 */';
PTDEBUG && _d($sql);
eval { $dbh->do($sql); };
PTDEBUG && $EVAL_ERROR && _d($EVAL_ERROR);
$sql = 'USE ' . $q->quote($db);
PTDEBUG && _d($dbh, $sql);
$dbh->do($sql);
$sql = "SHOW CREATE TABLE " . $q->quote($db, $tbl);
PTDEBUG && _d($sql);
my $href;
eval { $href = $dbh->selectrow_hashref($sql); };
if ( $EVAL_ERROR ) {
PTDEBUG && _d($EVAL_ERROR);
return;
}
$sql = '/*!40101 SET @@SQL_MODE := @OLD_SQL_MODE, '
. '@@SQL_QUOTE_SHOW_CREATE := @OLD_QUOTE */';
PTDEBUG && _d($sql);
$dbh->do($sql);
my ($key) = grep { m/create table/i } keys %$href;
if ( $key ) {
PTDEBUG && _d('This table is a base table');
$href->{$key} =~ s/\b[ ]{2,}/ /g;
$href->{$key} .= "\n";
}
else {
PTDEBUG && _d('This table is a view');
($key) = grep { m/create view/i } keys %$href;
}
return $href->{$key};
}
sub parse {
my ( $self, $ddl, $opts ) = @_;
return unless $ddl;
if ( ref $ddl eq 'ARRAY' ) {
if ( lc $ddl->[0] eq 'table' ) {
$ddl = $ddl->[1];
}
else {
return {
engine => 'VIEW',
};
}
}
if ( $ddl !~ m/CREATE (?:TEMPORARY )?TABLE `/ ) {
die "Cannot parse table definition; is ANSI quoting "
@@ -2341,41 +2422,31 @@ sub remove_auto_increment {
return $ddl;
}
sub remove_secondary_indexes {
my ( $self, $ddl ) = @_;
my $sec_indexes_ddl;
my $tbl_struct = $self->parse($ddl);
if ( ($tbl_struct->{engine} || '') =~ m/InnoDB/i ) {
my $clustered_key = $tbl_struct->{clustered_key};
$clustered_key ||= '';
my @sec_indexes = map {
my $key_def = $_->{ddl};
$key_def =~ s/([\(\)])/\\$1/g;
$ddl =~ s/\s+$key_def//i;
my $key_ddl = "ADD $_->{ddl}";
$key_ddl .= ',' unless $key_ddl =~ m/,$/;
$key_ddl;
sub get_table_status {
my ( $self, $dbh, $db, $like ) = @_;
my $q = $self->{Quoter};
my $sql = "SHOW TABLE STATUS FROM " . $q->quote($db);
my @params;
if ( $like ) {
$sql .= ' LIKE ?';
push @params, $like;
}
grep { $_->{name} ne $clustered_key }
values %{$tbl_struct->{keys}};
PTDEBUG && _d('Secondary indexes:', Dumper(\@sec_indexes));
if ( @sec_indexes ) {
$sec_indexes_ddl = join(' ', @sec_indexes);
$sec_indexes_ddl =~ s/,$//;
PTDEBUG && _d($sql, @params);
my $sth = $dbh->prepare($sql);
eval { $sth->execute(@params); };
if ($EVAL_ERROR) {
PTDEBUG && _d($EVAL_ERROR);
return;
}
$ddl =~ s/,(\n\) )/$1/s;
}
else {
PTDEBUG && _d('Not removing secondary indexes from',
$tbl_struct->{engine}, 'table');
}
return $ddl, $sec_indexes_ddl, $tbl_struct;
my @tables = @{$sth->fetchall_arrayref({})};
@tables = map {
my %tbl; # Make a copy with lowercased keys
@tbl{ map { lc $_ } keys %$_ } = values %$_;
$tbl{engine} ||= $tbl{type} || $tbl{comment};
delete $tbl{type};
\%tbl;
} @tables;
return @tables;
}
sub _d {
@@ -2392,311 +2463,6 @@ sub _d {
# End TableParser package
# ###########################################################################
# ###########################################################################
# MySQLDump package
# This package is a copy without comments from the original. The original
# with comments and its test file can be found in the Bazaar repository at,
# lib/MySQLDump.pm
# t/lib/MySQLDump.t
# See https://launchpad.net/percona-toolkit for more information.
# ###########################################################################
{
package MySQLDump;
use strict;
use warnings FATAL => 'all';
use English qw(-no_match_vars);
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
( our $before = <<'EOF') =~ s/^ //gm;
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
EOF
( our $after = <<'EOF') =~ s/^ //gm;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
EOF
sub new {
my ( $class, %args ) = @_;
my $self = {
cache => 0, # Afaik no script uses this cache any longer because
};
return bless $self, $class;
}
sub dump {
my ( $self, $dbh, $quoter, $db, $tbl, $what ) = @_;
if ( $what eq 'table' ) {
my $ddl = $self->get_create_table($dbh, $quoter, $db, $tbl);
return unless $ddl;
if ( $ddl->[0] eq 'table' ) {
return $before
. 'DROP TABLE IF EXISTS ' . $quoter->quote($tbl) . ";\n"
. $ddl->[1] . ";\n";
}
else {
return 'DROP TABLE IF EXISTS ' . $quoter->quote($tbl) . ";\n"
. '/*!50001 DROP VIEW IF EXISTS '
. $quoter->quote($tbl) . "*/;\n/*!50001 "
. $self->get_tmp_table($dbh, $quoter, $db, $tbl) . "*/;\n";
}
}
elsif ( $what eq 'triggers' ) {
my $trgs = $self->get_triggers($dbh, $quoter, $db, $tbl);
if ( $trgs && @$trgs ) {
my $result = $before . "\nDELIMITER ;;\n";
foreach my $trg ( @$trgs ) {
if ( $trg->{sql_mode} ) {
$result .= qq{/*!50003 SET SESSION SQL_MODE='$trg->{sql_mode}' */;;\n};
}
$result .= "/*!50003 CREATE */ ";
if ( $trg->{definer} ) {
my ( $user, $host )
= map { s/'/''/g; "'$_'"; }
split('@', $trg->{definer}, 2);
$result .= "/*!50017 DEFINER=$user\@$host */ ";
}
$result .= sprintf("/*!50003 TRIGGER %s %s %s ON %s\nFOR EACH ROW %s */;;\n\n",
$quoter->quote($trg->{trigger}),
@{$trg}{qw(timing event)},
$quoter->quote($trg->{table}),
$trg->{statement});
}
$result .= "DELIMITER ;\n\n/*!50003 SET SESSION SQL_MODE=\@OLD_SQL_MODE */;\n\n";
return $result;
}
else {
return undef;
}
}
elsif ( $what eq 'view' ) {
my $ddl = $self->get_create_table($dbh, $quoter, $db, $tbl);
return '/*!50001 DROP TABLE IF EXISTS ' . $quoter->quote($tbl) . "*/;\n"
. '/*!50001 DROP VIEW IF EXISTS ' . $quoter->quote($tbl) . "*/;\n"
. '/*!50001 ' . $ddl->[1] . "*/;\n";
}
else {
die "You didn't say what to dump.";
}
}
sub _use_db {
my ( $self, $dbh, $quoter, $new ) = @_;
if ( !$new ) {
PTDEBUG && _d('No new DB to use');
return;
}
my $sql = 'USE ' . $quoter->quote($new);
PTDEBUG && _d($dbh, $sql);
$dbh->do($sql);
return;
}
sub get_create_table {
my ( $self, $dbh, $quoter, $db, $tbl ) = @_;
if ( !$self->{cache} || !$self->{tables}->{$db}->{$tbl} ) {
my $sql = '/*!40101 SET @OLD_SQL_MODE := @@SQL_MODE, '
. q{@@SQL_MODE := REPLACE(REPLACE(@@SQL_MODE, 'ANSI_QUOTES', ''), ',,', ','), }
. '@OLD_QUOTE := @@SQL_QUOTE_SHOW_CREATE, '
. '@@SQL_QUOTE_SHOW_CREATE := 1 */';
PTDEBUG && _d($sql);
eval { $dbh->do($sql); };
PTDEBUG && $EVAL_ERROR && _d($EVAL_ERROR);
$self->_use_db($dbh, $quoter, $db);
$sql = "SHOW CREATE TABLE " . $quoter->quote($db, $tbl);
PTDEBUG && _d($sql);
my $href;
eval { $href = $dbh->selectrow_hashref($sql); };
if ( $EVAL_ERROR ) {
warn "Failed to $sql. The table may be damaged.\nError: $EVAL_ERROR";
return;
}
$sql = '/*!40101 SET @@SQL_MODE := @OLD_SQL_MODE, '
. '@@SQL_QUOTE_SHOW_CREATE := @OLD_QUOTE */';
PTDEBUG && _d($sql);
$dbh->do($sql);
my ($key) = grep { m/create table/i } keys %$href;
if ( $key ) {
PTDEBUG && _d('This table is a base table');
$self->{tables}->{$db}->{$tbl} = [ 'table', $href->{$key} ];
}
else {
PTDEBUG && _d('This table is a view');
($key) = grep { m/create view/i } keys %$href;
$self->{tables}->{$db}->{$tbl} = [ 'view', $href->{$key} ];
}
}
return $self->{tables}->{$db}->{$tbl};
}
sub get_columns {
my ( $self, $dbh, $quoter, $db, $tbl ) = @_;
PTDEBUG && _d('Get columns for', $db, $tbl);
if ( !$self->{cache} || !$self->{columns}->{$db}->{$tbl} ) {
$self->_use_db($dbh, $quoter, $db);
my $sql = "SHOW COLUMNS FROM " . $quoter->quote($db, $tbl);
PTDEBUG && _d($sql);
my $cols = $dbh->selectall_arrayref($sql, { Slice => {} });
$self->{columns}->{$db}->{$tbl} = [
map {
my %row;
@row{ map { lc $_ } keys %$_ } = values %$_;
\%row;
} @$cols
];
}
return $self->{columns}->{$db}->{$tbl};
}
sub get_tmp_table {
my ( $self, $dbh, $quoter, $db, $tbl ) = @_;
my $result = 'CREATE TABLE ' . $quoter->quote($tbl) . " (\n";
$result .= join(",\n",
map { ' ' . $quoter->quote($_->{field}) . ' ' . $_->{type} }
@{$self->get_columns($dbh, $quoter, $db, $tbl)});
$result .= "\n)";
PTDEBUG && _d($result);
return $result;
}
sub get_triggers {
my ( $self, $dbh, $quoter, $db, $tbl ) = @_;
if ( !$self->{cache} || !$self->{triggers}->{$db} ) {
$self->{triggers}->{$db} = {};
my $sql = '/*!40101 SET @OLD_SQL_MODE := @@SQL_MODE, '
. q{@@SQL_MODE := REPLACE(REPLACE(@@SQL_MODE, 'ANSI_QUOTES', ''), ',,', ','), }
. '@OLD_QUOTE := @@SQL_QUOTE_SHOW_CREATE, '
. '@@SQL_QUOTE_SHOW_CREATE := 1 */';
PTDEBUG && _d($sql);
eval { $dbh->do($sql); };
PTDEBUG && $EVAL_ERROR && _d($EVAL_ERROR);
$sql = "SHOW TRIGGERS FROM " . $quoter->quote($db);
PTDEBUG && _d($sql);
my $sth = $dbh->prepare($sql);
$sth->execute();
if ( $sth->rows ) {
my $trgs = $sth->fetchall_arrayref({});
foreach my $trg (@$trgs) {
my %trg;
@trg{ map { lc $_ } keys %$trg } = values %$trg;
push @{ $self->{triggers}->{$db}->{ $trg{table} } }, \%trg;
}
}
$sql = '/*!40101 SET @@SQL_MODE := @OLD_SQL_MODE, '
. '@@SQL_QUOTE_SHOW_CREATE := @OLD_QUOTE */';
PTDEBUG && _d($sql);
$dbh->do($sql);
}
if ( $tbl ) {
return $self->{triggers}->{$db}->{$tbl};
}
return values %{$self->{triggers}->{$db}};
}
sub get_databases {
my ( $self, $dbh, $quoter, $like ) = @_;
if ( !$self->{cache} || !$self->{databases} || $like ) {
my $sql = 'SHOW DATABASES';
my @params;
if ( $like ) {
$sql .= ' LIKE ?';
push @params, $like;
}
my $sth = $dbh->prepare($sql);
PTDEBUG && _d($sql, @params);
$sth->execute( @params );
my @dbs = map { $_->[0] } @{$sth->fetchall_arrayref()};
$self->{databases} = \@dbs unless $like;
return @dbs;
}
return @{$self->{databases}};
}
sub get_table_status {
my ( $self, $dbh, $quoter, $db, $like ) = @_;
if ( !$self->{cache} || !$self->{table_status}->{$db} || $like ) {
my $sql = "SHOW TABLE STATUS FROM " . $quoter->quote($db);
my @params;
if ( $like ) {
$sql .= ' LIKE ?';
push @params, $like;
}
PTDEBUG && _d($sql, @params);
my $sth = $dbh->prepare($sql);
$sth->execute(@params);
my @tables = @{$sth->fetchall_arrayref({})};
@tables = map {
my %tbl; # Make a copy with lowercased keys
@tbl{ map { lc $_ } keys %$_ } = values %$_;
$tbl{engine} ||= $tbl{type} || $tbl{comment};
delete $tbl{type};
\%tbl;
} @tables;
$self->{table_status}->{$db} = \@tables unless $like;
return @tables;
}
return @{$self->{table_status}->{$db}};
}
sub get_table_list {
my ( $self, $dbh, $quoter, $db, $like ) = @_;
if ( !$self->{cache} || !$self->{table_list}->{$db} || $like ) {
my $sql = "SHOW /*!50002 FULL*/ TABLES FROM " . $quoter->quote($db);
my @params;
if ( $like ) {
$sql .= ' LIKE ?';
push @params, $like;
}
PTDEBUG && _d($sql, @params);
my $sth = $dbh->prepare($sql);
$sth->execute(@params);
my @tables = @{$sth->fetchall_arrayref()};
@tables = map {
my %tbl = (
name => $_->[0],
engine => ($_->[1] || '') eq 'VIEW' ? 'VIEW' : '',
);
\%tbl;
} @tables;
$self->{table_list}->{$db} = \@tables unless $like;
return @tables;
}
return @{$self->{table_list}->{$db}};
}
sub _d {
my ($package, undef, $line) = caller 0;
@_ = map { (my $temp = $_) =~ s/\n/\n# /g; $temp; }
map { defined $_ ? $_ : 'undef' }
@_;
print STDERR "# $package:$line $PID ", join(' ', @_), "\n";
}
1;
}
# ###########################################################################
# End MySQLDump package
# ###########################################################################
# ###########################################################################
# TableChunker package
# This package is a copy without comments from the original. The original
@@ -3698,14 +3464,21 @@ sub set_callback {
sub start {
my ( $self, $start ) = @_;
$self->{start} = $self->{last_reported} = $start || time();
$self->{first_report} = 0;
}
sub update {
my ( $self, $callback, $now ) = @_;
my ( $self, $callback, %args ) = @_;
my $jobsize = $self->{jobsize};
$now ||= time();
my $now ||= $args{now} || time;
$self->{iterations}++; # How many updates have happened;
if ( !$self->{first_report} && $args{first_report} ) {
$args{first_report}->();
$self->{first_report} = 1;
}
if ( $self->{report} eq 'time'
&& $self->{interval} > $now - $self->{last_reported}
) {
@@ -3786,13 +3559,13 @@ use constant PTDEBUG => $ENV{PTDEBUG} || 0;
sub new {
my ( $class, %args ) = @_;
my @required_args = qw();
my @required_args = qw(Quoter);
foreach my $arg ( @required_args ) {
die "I need a $arg argument" unless $args{$arg};
}
my $self = {
%args,
Quoter => $args{Quoter},
};
return bless $self, $class;
@@ -3822,11 +3595,14 @@ sub _make_triggers {
die "I need a $arg argument" unless $args{$arg};
}
my ($db, $tbl, $tmp_tbl, $chunk_column) = @args{@required_args};
my $q = $self->{Quoter};
my $old_table = "`$db`.`$tbl`";
my $new_table = "`$db`.`$tmp_tbl`";
my $new_values = join(', ', map { "NEW.$_" } @{$args{columns}});
my $columns = join(', ', @{$args{columns}});
$chunk_column = $q->quote($chunk_column);
my $old_table = $q->quote($db, $tbl);
my $new_table = $q->quote($db, $tmp_tbl);
my $new_values = join(', ', map { "NEW.".$q->quote($_) } @{$args{columns}});
my $columns = join(', ', map { $q->quote($_) } @{$args{columns}});
my $delete_trigger = "CREATE TRIGGER mk_osc_del AFTER DELETE ON $old_table "
. "FOR EACH ROW "
@@ -3862,9 +3638,10 @@ sub cleanup {
die "I need a $arg argument" unless $args{$arg};
}
my ($dbh, $db, $msg) = @args{@required_args};
my $q = $self->{Quoter};
foreach my $trigger ( qw(del ins upd) ) {
my $sql = "DROP TRIGGER IF EXISTS `$db`.`mk_osc_$trigger`";
my $sql = "DROP TRIGGER IF EXISTS " . $q->quote($db, "mk_osc_$trigger");
$msg->($sql);
$dbh->do($sql) unless $args{print};
}
@@ -3904,13 +3681,14 @@ use constant PTDEBUG => $ENV{PTDEBUG} || 0;
sub new {
my ( $class, %args ) = @_;
my @required_args = qw(Retry);
my @required_args = qw(Retry Quoter);
foreach my $arg ( @required_args ) {
die "I need a $arg argument" unless $args{$arg};
}
my $self = {
%args,
Retry => $args{Retry},
Quoter => $args{Quoter},
};
return bless $self, $class;
@@ -3923,9 +3701,10 @@ sub copy {
die "I need a $arg argument" unless $args{$arg};
}
my ($dbh, $msg, $from_table, $to_table, $chunks) = @args{@required_args};
my $q = $self->{Quoter};
my $pr = $args{Progress};
my $sleep = $args{sleep};
my $columns = join(', ', @{$args{columns}});
my $columns = join(', ', map { $q->quote($_) } @{$args{columns}});
my $n_chunks = @$chunks - 1;
for my $chunkno ( 0..$n_chunks ) {
@@ -3950,24 +3729,23 @@ sub copy {
wait => sub { sleep 1; },
tries => 3,
try => sub {
my ( %args ) = @_;
eval {
$dbh->do($sql);
};
if ( $EVAL_ERROR ) {
PTDEBUG && _d($EVAL_ERROR);
if ( $EVAL_ERROR =~ m/Lock wait timeout exceeded/ ) {
$error = $EVAL_ERROR;
if ( $args{tryno} > 1 ) {
$msg->("Lock wait timeout exceeded; retrying $sql");
}
return;
}
die $EVAL_ERROR;
}
return 1;
},
on_failure => sub { die $error; },
fail => sub {
my (%args) = @_;
my $error = $args{error};
PTDEBUG && _d($error);
if ( $error =~ m/Lock wait timeout exceeded/ ) {
$msg->("Lock wait timeout exceeded; retrying $sql");
return 1; # call wait, call try
}
return 0; # call final_fail
},
final_fail => sub {
my (%args) = @_;
die $args{error};
},
);
}
@@ -4024,48 +3802,42 @@ sub new {
sub retry {
my ( $self, %args ) = @_;
my @required_args = qw(try wait);
my @required_args = qw(try fail final_fail);
foreach my $arg ( @required_args ) {
die "I need a $arg argument" unless $args{$arg};
};
my ($try, $wait) = @args{@required_args};
my ($try, $fail, $final_fail) = @args{@required_args};
my $wait = $args{wait} || sub { sleep 1; };
my $tries = $args{tries} || 3;
my $last_error;
my $tryno = 0;
TRY:
while ( ++$tryno <= $tries ) {
PTDEBUG && _d("Retry", $tryno, "of", $tries);
PTDEBUG && _d("Try", $tryno, "of", $tries);
my $result;
eval {
$result = $try->(tryno=>$tryno);
};
if ( defined $result ) {
PTDEBUG && _d("Try code succeeded");
if ( my $on_success = $args{on_success} ) {
PTDEBUG && _d("Calling on_success code");
$on_success->(tryno=>$tryno, result=>$result);
}
return $result;
}
if ( $EVAL_ERROR ) {
PTDEBUG && _d("Try code died:", $EVAL_ERROR);
die $EVAL_ERROR unless $args{retry_on_die};
}
PTDEBUG && _d("Try code failed:", $EVAL_ERROR);
$last_error = $EVAL_ERROR;
if ( $tryno < $tries ) {
PTDEBUG && _d("Try code failed, calling wait code");
if ( $tryno < $tries ) { # more retries
my $retry = $fail->(tryno=>$tryno, error=>$last_error);
last TRY unless $retry;
PTDEBUG && _d("Calling wait code");
$wait->(tryno=>$tryno);
}
}
PTDEBUG && _d("Try code did not succeed");
if ( my $on_failure = $args{on_failure} ) {
PTDEBUG && _d("Calling on_failure code");
$on_failure->();
else {
PTDEBUG && _d("Try code succeeded");
return $result;
}
}
return;
PTDEBUG && _d('Try code did not succeed');
return $final_fail->(error=>$last_error);
}
sub _d {
@@ -4110,7 +3882,6 @@ sub main {
my $vp = new VersionParser();
my $q = new Quoter();
my $tp = new TableParser(Quoter => $q);
my $du = new MySQLDump();
my $chunker = new TableChunker(Quoter => $q, TableParser => $tp);
# ########################################################################
@@ -4225,7 +3996,6 @@ sub main {
Quoter => $q,
TableParser => $tp,
TableChunker => $chunker,
MySQLDump => $du,
VersionParser => $vp,
);
@@ -4233,8 +4003,11 @@ sub main {
# Create the capture-sync and copy-rows plugins. Currently, we just have
# one method for each.
# ########################################################################
my $capture_sync = new OSCCaptureSync();
my $copy_rows = new CopyRowsInsertSelect(Retry => new Retry());
my $capture_sync = new OSCCaptureSync(Quoter => $q);
my $copy_rows = new CopyRowsInsertSelect(
Retry => new Retry(),
Quoter => $q,
);
# More values are added later. These are the minimum need to do --cleanup.
my %plugin_args = (
@@ -4379,7 +4152,7 @@ sub main {
# it manually.
if ( !$o->get('print') ) {
my $tmp_tbl_struct = $tp->parse(
$du->get_create_table($dbh, $q, $db, $tmp_tbl));
$tp->get_create_table($dbh, $db, $tmp_tbl));
@columns = intersection([
$plugin_args{tbl_struct}->{is_col},
@@ -4489,7 +4262,7 @@ sub main {
# ############################################################################
sub check_tables {
my ( %args ) = @_;
my @required_args = qw(dbh db tbl tmp_tbl old_tbl VersionParser Quoter TableParser OptionParser TableChunker MySQLDump);
my @required_args = qw(dbh db tbl tmp_tbl old_tbl VersionParser Quoter TableParser OptionParser TableChunker);
foreach my $arg ( @required_args ) {
die "I need a $arg argument" unless $args{$arg};
}
@@ -4528,8 +4301,7 @@ sub check_tables {
# For now, we require that the old table has an exact-chunkable
# column (i.e. unique single-column).
$tbl_info{tbl_struct} = $tp->parse(
$args{MySQLDump}->get_create_table($dbh, $args{Quoter}, $db, $tbl));
$tbl_info{tbl_struct} = $tp->parse($tp->get_create_table($dbh, $db, $tbl));
my ($exact, @chunkable_cols) = $args{TableChunker}->find_chunk_columns(
tbl_struct => $tbl_info{tbl_struct},
exact => 1,

View File

@@ -4,6 +4,48 @@
# See "COPYRIGHT, LICENSE, AND WARRANTY" at the end of this file for legal
# notices and disclaimers.
# ###########################################################################
# tmpdir package
# This package is a copy without comments from the original. The original
# with comments and its test file can be found in the Bazaar repository at,
# lib/bash/tmpdir.sh
# t/lib/bash/tmpdir.sh
# See https://launchpad.net/percona-toolkit for more information.
# ###########################################################################
set -u
TMPDIR=""
mk_tmpdir() {
local dir="${1:-""}"
if [ -n "$dir" ]; then
if [ ! -d "$dir" ]; then
mkdir "$dir" || die "Cannot make tmpdir $dir"
fi
TMPDIR="$dir"
else
local tool="${0##*/}"
local pid="$$"
TMPDIR=`mktemp -d /tmp/${tool}.${pid}.XXXXX` \
|| die "Cannot make secure tmpdir"
fi
}
rm_tmpdir() {
if [ -n "$TMPDIR" ] && [ -d "$TMPDIR" ]; then
rm -rf "$TMPDIR"
fi
TMPDIR=""
}
# ###########################################################################
# End tmpdir package
# ###########################################################################
set +u
usage() {
if [ "${OPT_ERR}" ]; then
echo "${OPT_ERR}" >&2
@@ -100,7 +142,6 @@ aggregate_stacktrace() {
# The main program to run.
main() {
rm -f /tmp/percona-toolkit
# Get command-line options
for o; do
@@ -155,15 +196,14 @@ main() {
fi
date;
for x in $(seq 1 $OPT_i); do
gdb -ex "set pagination 0" -ex "thread apply all bt" -batch -p $OPT_p >> "${OPT_k:-/tmp/percona-toolkit}"
date +'TS %N.%s %F %T' >> "${OPT_k:-/tmp/percona-toolkit}"
gdb -ex "set pagination 0" -ex "thread apply all bt" -batch -p $OPT_p >> "${OPT_k:-$TMPDIR/percona-toolkit}"
date +'TS %N.%s %F %T' >> "${OPT_k:-$TMPDIR/percona-toolkit}"
sleep $OPT_s
done
fi
if [ $# -eq 0 ]; then
aggregate_stacktrace "${OPT_l}" "${OPT_k:-/tmp/percona-toolkit}"
rm -f /tmp/percona-toolkit
aggregate_stacktrace "${OPT_l}" "${OPT_k:-$TMPDIR/percona-toolkit}"
else
aggregate_stacktrace "${OPT_l}" "$@"
fi
@@ -172,7 +212,9 @@ main() {
# Execute the program if it was not included from another file. This makes it
# possible to include without executing, and thus test.
if [ "$(basename "$0")" = "pt-pmp" ] || [ "$(basename "$0")" = "bash" -a "$_" = "$0" ]; then
mk_tmpdir
main "$@"
rm_tmpdir
fi
# ############################################################################

View File

@@ -14530,7 +14530,7 @@ and see also L<"FINGERPRINTS">.
A report is printed for each L<"--group-by"> value (unless C<--no-report> is
given). Therefore, C<--group-by user,db> means "report on queries with the
same user and report on queries with the same db"--it does not mean "report
same user and report on queries with the same db"; it does not mean "report
on queries with the same user and db." See also L<"OUTPUT">.
Every value must have a corresponding value in the same position in
@@ -15236,7 +15236,7 @@ The MySQL time expression is wrapped inside a query like
valid inside this query. For example, do not use UNIX_TIMESTAMP() because
UNIX_TIMESTAMP(UNIX_TIMESTAMP()) returns 0.
Events are assumed to be in chronological--older events at the beginning of
Events are assumed to be in chronological: older events at the beginning of
the log and newer events at the end of the log. L<"--since"> is strict: it
ignores all queries until one is found that is new enough. Therefore, if
the query events are not consistently timestamped, some may be ignored which

View File

@@ -1477,7 +1477,7 @@ L<"--variable"> column matches the L<"--match"> option. For example, to trigger
when more than 10 processes are in the "statistics" state, use the following
options:
--trigger processlist --variable State \
--function processlist --variable State \
--match statistics --threshold 10
=back

View File

@@ -1771,41 +1771,43 @@ sub get_create_table {
die "I need a tbl parameter" unless $tbl;
my $q = $self->{Quoter};
my $sql = '/*!40101 SET @OLD_SQL_MODE := @@SQL_MODE, '
my $new_sql_mode
= '/*!40101 SET @OLD_SQL_MODE := @@SQL_MODE, '
. q{@@SQL_MODE := REPLACE(REPLACE(@@SQL_MODE, 'ANSI_QUOTES', ''), ',,', ','), }
. '@OLD_QUOTE := @@SQL_QUOTE_SHOW_CREATE, '
. '@@SQL_QUOTE_SHOW_CREATE := 1 */';
PTDEBUG && _d($sql);
eval { $dbh->do($sql); };
my $old_sql_mode = '/*!40101 SET @@SQL_MODE := @OLD_SQL_MODE, '
. '@@SQL_QUOTE_SHOW_CREATE := @OLD_QUOTE */';
PTDEBUG && _d($new_sql_mode);
eval { $dbh->do($new_sql_mode); };
PTDEBUG && $EVAL_ERROR && _d($EVAL_ERROR);
$sql = 'USE ' . $q->quote($db);
PTDEBUG && _d($dbh, $sql);
$dbh->do($sql);
my $use_sql = 'USE ' . $q->quote($db);
PTDEBUG && _d($dbh, $use_sql);
$dbh->do($use_sql);
$sql = "SHOW CREATE TABLE " . $q->quote($db, $tbl);
PTDEBUG && _d($sql);
my $show_sql = "SHOW CREATE TABLE " . $q->quote($db, $tbl);
PTDEBUG && _d($show_sql);
my $href;
eval { $href = $dbh->selectrow_hashref($sql); };
eval { $href = $dbh->selectrow_hashref($show_sql); };
if ( $EVAL_ERROR ) {
PTDEBUG && _d($EVAL_ERROR);
PTDEBUG && _d($old_sql_mode);
$dbh->do($old_sql_mode);
return;
}
$sql = '/*!40101 SET @@SQL_MODE := @OLD_SQL_MODE, '
. '@@SQL_QUOTE_SHOW_CREATE := @OLD_QUOTE */';
PTDEBUG && _d($sql);
$dbh->do($sql);
PTDEBUG && _d($old_sql_mode);
$dbh->do($old_sql_mode);
my ($key) = grep { m/create table/i } keys %$href;
if ( $key ) {
PTDEBUG && _d('This table is a base table');
$href->{$key} =~ s/\b[ ]{2,}/ /g;
$href->{$key} .= "\n";
}
else {
PTDEBUG && _d('This table is a view');
($key) = grep { m/create view/i } keys %$href;
my ($key) = grep { m/create (?:table|view)/i } keys %$href;
if ( !$key ) {
die "Error: no 'Create Table' or 'Create View' in result set from "
. "$show_sql: " . Dumper($href);
}
return $href->{$key};

View File

@@ -38,12 +38,12 @@ Changelog
* Fixed bug 871438: Bash tools are insecure
* Fixed bug 897758: Failed to prepare TableSyncChunk plugin: Use of uninitialized value $args{"chunk_range"} in lc at pt-table-sync line 3055
* Fixed bug 919819: pt-kill --execute-command creates zombies
* Fixed bug 894255: pt-kill: when --daemonize is given, should not check that stdin is a tty
* Fixed bug 925778: pt-ioprofile doesn't run without a file
* Fixed bug 925477: pt-ioprofile docs refer to pt-iostats
* Fixed bug 857091: pt-sift downloads http://percona.com/get/pt-pmp, which does not work
* Fixed bug 857104: pt-sift tries to invoke mext, should be pt-mext
* Fixed bug 872699: pt-diskstats: rd_avkb & wr_avkb derived incorrectly
* Fixed bug 897029: pt-diskstats computes wrong values for md0
* Fixed bug 882918: pt-stalk spams warning if oprofile isn't installed
* Fixed bug 884504: pt-stalk doesn't check pt-collect
* Fixed bug 897483: pt-online-schema-change "uninitialized value" due to update-foreign-keys-method

View File

@@ -37,13 +37,14 @@ use constant PTDEBUG => $ENV{PTDEBUG} || 0;
# CopyRowsInsertSelect object
sub new {
my ( $class, %args ) = @_;
my @required_args = qw(Retry);
my @required_args = qw(Retry Quoter);
foreach my $arg ( @required_args ) {
die "I need a $arg argument" unless $args{$arg};
}
my $self = {
%args,
Retry => $args{Retry},
Quoter => $args{Quoter},
};
return bless $self, $class;
@@ -56,9 +57,10 @@ sub copy {
die "I need a $arg argument" unless $args{$arg};
}
my ($dbh, $msg, $from_table, $to_table, $chunks) = @args{@required_args};
my $q = $self->{Quoter};
my $pr = $args{Progress};
my $sleep = $args{sleep};
my $columns = join(', ', @{$args{columns}});
my $columns = join(', ', map { $q->quote($_) } @{$args{columns}});
my $n_chunks = @$chunks - 1;
for my $chunkno ( 0..$n_chunks ) {

View File

@@ -37,13 +37,13 @@ use constant PTDEBUG => $ENV{PTDEBUG} || 0;
# OSCCaptureSync object
sub new {
my ( $class, %args ) = @_;
my @required_args = qw();
my @required_args = qw(Quoter);
foreach my $arg ( @required_args ) {
die "I need a $arg argument" unless $args{$arg};
}
my $self = {
%args,
Quoter => $args{Quoter},
};
return bless $self, $class;
@@ -73,11 +73,14 @@ sub _make_triggers {
die "I need a $arg argument" unless $args{$arg};
}
my ($db, $tbl, $tmp_tbl, $chunk_column) = @args{@required_args};
my $q = $self->{Quoter};
my $old_table = "`$db`.`$tbl`";
my $new_table = "`$db`.`$tmp_tbl`";
my $new_values = join(', ', map { "NEW.$_" } @{$args{columns}});
my $columns = join(', ', @{$args{columns}});
$chunk_column = $q->quote($chunk_column);
my $old_table = $q->quote($db, $tbl);
my $new_table = $q->quote($db, $tmp_tbl);
my $new_values = join(', ', map { "NEW.".$q->quote($_) } @{$args{columns}});
my $columns = join(', ', map { $q->quote($_) } @{$args{columns}});
my $delete_trigger = "CREATE TRIGGER mk_osc_del AFTER DELETE ON $old_table "
. "FOR EACH ROW "
@@ -113,9 +116,10 @@ sub cleanup {
die "I need a $arg argument" unless $args{$arg};
}
my ($dbh, $db, $msg) = @args{@required_args};
my $q = $self->{Quoter};
foreach my $trigger ( qw(del ins upd) ) {
my $sql = "DROP TRIGGER IF EXISTS `$db`.`mk_osc_$trigger`";
my $sql = "DROP TRIGGER IF EXISTS " . $q->quote($db, "mk_osc_$trigger");
$msg->($sql);
$dbh->do($sql) unless $args{print};
}

View File

@@ -58,43 +58,63 @@ sub get_create_table {
die "I need a tbl parameter" unless $tbl;
my $q = $self->{Quoter};
my $sql = '/*!40101 SET @OLD_SQL_MODE := @@SQL_MODE, '
# To ensure a consistent output, we save the current (old) SQL mode,
# then set it to the new SQL mode that what we need. When done, even
# if an error occurs, we restore the old SQL mode.
my $new_sql_mode
= '/*!40101 SET @OLD_SQL_MODE := @@SQL_MODE, '
. q{@@SQL_MODE := REPLACE(REPLACE(@@SQL_MODE, 'ANSI_QUOTES', ''), ',,', ','), }
. '@OLD_QUOTE := @@SQL_QUOTE_SHOW_CREATE, '
. '@@SQL_QUOTE_SHOW_CREATE := 1 */';
PTDEBUG && _d($sql);
eval { $dbh->do($sql); };
my $old_sql_mode = '/*!40101 SET @@SQL_MODE := @OLD_SQL_MODE, '
. '@@SQL_QUOTE_SHOW_CREATE := @OLD_QUOTE */';
# Set new SQL mode.
PTDEBUG && _d($new_sql_mode);
eval { $dbh->do($new_sql_mode); };
PTDEBUG && $EVAL_ERROR && _d($EVAL_ERROR);
# Must USE the tbl's db because some bug with SHOW CREATE TABLE on a
# view when the current db isn't the view's db causes MySQL to crash.
$sql = 'USE ' . $q->quote($db);
PTDEBUG && _d($dbh, $sql);
$dbh->do($sql);
my $use_sql = 'USE ' . $q->quote($db);
PTDEBUG && _d($dbh, $use_sql);
$dbh->do($use_sql);
$sql = "SHOW CREATE TABLE " . $q->quote($db, $tbl);
PTDEBUG && _d($sql);
my $show_sql = "SHOW CREATE TABLE " . $q->quote($db, $tbl);
PTDEBUG && _d($show_sql);
my $href;
eval { $href = $dbh->selectrow_hashref($sql); };
eval { $href = $dbh->selectrow_hashref($show_sql); };
if ( $EVAL_ERROR ) {
# TODO: I think we fail silently for tools which may try to call
# this on temp tables, or don't care if the table goes away. We
# should warn $EVAL_ERROR and require callers to eval us and do
# what they want with the warning.
PTDEBUG && _d($EVAL_ERROR);
# Restore old SQL mode.
PTDEBUG && _d($old_sql_mode);
$dbh->do($old_sql_mode);
return;
}
$sql = '/*!40101 SET @@SQL_MODE := @OLD_SQL_MODE, '
. '@@SQL_QUOTE_SHOW_CREATE := @OLD_QUOTE */';
PTDEBUG && _d($sql);
$dbh->do($sql);
# Restore old SQL mode.
PTDEBUG && _d($old_sql_mode);
$dbh->do($old_sql_mode);
my ($key) = grep { m/create table/i } keys %$href;
if ( $key ) {
PTDEBUG && _d('This table is a base table');
$href->{$key} =~ s/\b[ ]{2,}/ /g;
$href->{$key} .= "\n";
}
else {
PTDEBUG && _d('This table is a view');
($key) = grep { m/create view/i } keys %$href;
# SHOW CREATE TABLE has at least 2 columns like:
# mysql> show create table city\G
# *************************** 1. row ***************************
# Table: city
# Create Table: CREATE TABLE `city` (
# `city_id` smallint(5) unsigned NOT NULL AUTO_INCREMENT,
# ...
# We want the second column.
my ($key) = grep { m/create (?:table|view)/i } keys %$href;
if ( !$key ) {
die "Error: no 'Create Table' or 'Create View' in result set from "
. "$show_sql: " . Dumper($href);
}
return $href->{$key};

View File

@@ -17,6 +17,7 @@ use PerconaTest;
use Progress;
use Transformers;
use Retry;
use Quoter;
use CopyRowsInsertSelect;
Transformers->import(qw(secs_to_time));
@@ -38,36 +39,84 @@ elsif ( !@{$dbh->selectcol_arrayref('SHOW DATABASES LIKE "sakila"')} ) {
plan skip_all => "Sandbox master does not have the sakila database";
}
else {
plan tests => 8;
plan tests => 14;
}
my $q = new Quoter();
my $rr = new Retry();
my $osc = new CopyRowsInsertSelect(Retry => $rr);
my $osc = new CopyRowsInsertSelect(Retry => $rr, Quoter => $q);
my $msg = sub { print "$_[0]\n"; };
my $output = "";
my $rows;
$sb->load_file("master", "t/lib/samples/osc/tbl001.sql");
# ###########################################################################
# Copy simple tables.
# ###########################################################################
sub test_copy_table {
my (%args) = @_;
my ($tbl, $col, $expect) = @args{qw(tbl col expect)};
$sb->load_file("master", "t/lib/samples/osc/$tbl");
PerconaTest::wait_for_table($dbh, "osc.t", "id=5");
$dbh->do("USE osc");
$osc->copy(
dbh => $dbh,
from_table => 'osc.t',
to_table => 'osc.__new_t',
columns => [qw(id c)],
columns => ['id', $col],
chunks => ['1=1'],
msg => $msg,
);
my $rows = $dbh->selectall_arrayref("select id, c from __new_t order by id");
$rows = $dbh->selectall_arrayref("select id, `$col` from __new_t order by id");
is_deeply(
$rows,
[ [1, 'a'], [2, 'b'], [3, 'c'], [4, 'd'], [5, 'e'], ],
"One chunk copy"
"$tbl: One chunk copy"
) or print Dumper($rows);
$dbh->do("truncate table osc.__new_t");
$output = output( sub {
ok(
no_diff(
sub {
$osc->copy(
dbh => $dbh,
from_table => 'osc.t',
to_table => 'osc.__new_t',
columns => ['id', $col],
chunks => ['id < 4', 'id >= 4 AND id < 6'],
msg => $msg,
print => 1,
engine_flags => 'LOCK IN SHARE MODE',
);
},
"t/lib/samples/osc/$expect",
stderr => 1,
),
"$tbl: 2 chunk copy"
);
$rows = $dbh->selectall_arrayref("select id, `$col` from __new_t order by id");
is_deeply(
$rows,
[],
"$tbl: print doesn't exec statements"
);
}
test_copy_table(
tbl => "tbl001.sql",
col => "c",
expect => "copyins001.txt",
);
# Sleep callback.
my $sleep_cnt = 0;
$dbh->do("truncate table osc.__new_t");
output( sub {
$osc->copy(
dbh => $dbh,
from_table => 'osc.t',
@@ -75,27 +124,39 @@ $output = output( sub {
columns => [qw(id c)],
chunks => ['id < 4', 'id >= 4 AND id < 6'],
msg => $msg,
print => 1,
engine_flags => 'LOCK IN SHARE MODE',
sleep => sub { $sleep_cnt++; },
);
});
is(
$sleep_cnt,
1,
"Calls sleep callback after each chunk (except last chunk)"
);
eval {
$output = output(sub { $osc->cleanup(); } );
};
ok(
no_diff(
$output,
"t/lib/samples/osc/copyins001.txt",
cmd_output => 1,
),
"Prints 2 SQL statments for the 2 chunks"
!$EVAL_ERROR && !$output,
"cleanup() works but doesn't do anything"
);
$rows = $dbh->selectall_arrayref("select id, c from __new_t order by id");
is_deeply(
$rows,
[],
"Doesn't exec those statements if print is true"
test_copy_table(
tbl => "tbl002.sql",
col => "default",
expect => "copyins002.txt",
);
test_copy_table(
tbl => "tbl003.sql",
col => "space col",
expect => "copyins003.txt",
);
# ###########################################################################
# Copy a larger, more complex sakila table.
# ###########################################################################
$dbh->do('create table osc.city like sakila.city');
$dbh->do('alter table osc.city engine=myisam');
my $chunks = [
@@ -140,35 +201,6 @@ like(
"Reports copy progress if Progress obj given"
);
my $sleep_cnt = 0;
$dbh->do("truncate table osc.__new_t");
output( sub {
$osc->copy(
dbh => $dbh,
from_table => 'osc.t',
to_table => 'osc.__new_t',
columns => [qw(id c)],
chunks => ['id < 4', 'id >= 4 AND id < 6'],
msg => $msg,
sleep => sub { $sleep_cnt++; },
);
});
is(
$sleep_cnt,
1,
"Calls sleep callback after each chunk (except last chunk)"
);
eval {
$output = output(sub { $osc->cleanup(); } );
};
ok(
!$EVAL_ERROR && !$output,
"cleanup() works but doesn't do anything"
);
# #############################################################################
# Done.
# #############################################################################

View File

@@ -14,6 +14,7 @@ use Test::More;
use DSNParser;
use Sandbox;
use PerconaTest;
use Quoter;
use OSCCaptureSync;
use Data::Dumper;
@@ -30,51 +31,53 @@ if ( !$dbh ) {
}
else {
plan tests => 4;
plan tests => 10;
}
$sb->load_file("master", "t/lib/samples/osc/tbl001.sql");
my $q = new Quoter();
my $osc = new OSCCaptureSync(Quoter => $q);
my $msg = sub { print "$_[0]\n"; };
my $output;
sub test_table {
my (%args) = @_;
my ($tbl, $col, $expect) = @args{qw(tbl col expect)};
$sb->load_file("master", "t/lib/samples/osc/$tbl");
PerconaTest::wait_for_table($dbh, "osc.t", "id=5");
$dbh->do("USE osc");
my $osc = new OSCCaptureSync();
my $msg = sub { print "$_[0]\n"; };
my $output = output(
ok(
no_diff(
sub {
$osc->capture(
dbh => $dbh,
db => 'osc',
tbl => 't',
tmp_tbl => '__new_t',
columns => [qw(id c)],
columns => ['id', $col],
chunk_column => 'id',
msg => $msg,
)
},
);
ok(
no_diff(
$output,
"t/lib/samples/osc/capsync001.txt",
cmd_output => 1,
"t/lib/samples/osc/$expect",
stderr => 1,
),
"SQL statments to create triggers"
"$tbl: SQL statments to create triggers"
);
$dbh->do('insert into t values (6, "f")');
$dbh->do('update t set c="z" where id=1');
$dbh->do('delete from t where id=3');
$dbh->do("insert into t values (6, 'f')");
$dbh->do("update t set `$col`='z' where id=1");
$dbh->do("delete from t where id=3");
my $rows = $dbh->selectall_arrayref("select id, c from __new_t order by id");
my $rows = $dbh->selectall_arrayref("select id, `$col` from __new_t order by id");
is_deeply(
$rows,
[
[1, 'z'], # update t set c="z" where id=1
[6, 'f'], # insert into t values (6, "f")
],
"Triggers work"
"$tbl: Triggers work"
) or print Dumper($rows);
output(sub {
@@ -89,13 +92,31 @@ $rows = $dbh->selectall_arrayref("show triggers from `osc` like 't'");
is_deeply(
$rows,
[],
"Cleanup removes the triggers"
"$tbl: Cleanup removes the triggers"
);
}
test_table(
tbl => "tbl001.sql",
col => "c",
expect => "capsync001.txt",
);
test_table(
tbl => "tbl002.sql",
col => "default",
expect => "capsync002.txt",
);
test_table(
tbl => "tbl003.sql",
col => "space col",
expect => "capsync003.txt",
);
# #############################################################################
# Done.
# #############################################################################
$output = '';
{
local *STDERR;
open STDERR, '>', \$output;

View File

@@ -9,7 +9,7 @@ BEGIN {
use strict;
use warnings FATAL => 'all';
use English qw(-no_match_vars);
use Test::More tests => 38;
use Test::More tests => 39;
use TableParser;
use Quoter;
@@ -38,15 +38,26 @@ SKIP: {
"get_create_table(nonexistent table)"
);
my $ddl = $tp->get_create_table($dbh, 'sakila', 'actor');
ok(
no_diff(
$tp->get_create_table($dbh, 'sakila', 'actor'),
"$ddl\n",
$sandbox_version ge '5.1' ? "$sample/sakila.actor"
: "$sample/sakila.actor-5.0",
cmd_output => 1,
),
"get_create_table(sakila.actor)"
);
# Bug 932442: column with 2 spaces
$sb->load_file('master', "t/pt-table-checksum/samples/2-space-col.sql");
PerconaTest::wait_for_table($dbh, "test.t");
$ddl = $tp->get_create_table($dbh, qw(test t));
like(
$ddl,
qr/`a b`\s+/,
"Does not compress spaces (bug 932442)"
);
};
eval {

View File

@@ -1,3 +1,3 @@
CREATE TRIGGER mk_osc_del AFTER DELETE ON `osc`.`t` FOR EACH ROW DELETE IGNORE FROM `osc`.`__new_t` WHERE `osc`.`__new_t`.id = OLD.id
CREATE TRIGGER mk_osc_upd AFTER UPDATE ON `osc`.`t` FOR EACH ROW REPLACE INTO `osc`.`__new_t` (id, c) VALUES (NEW.id, NEW.c)
CREATE TRIGGER mk_osc_ins AFTER INSERT ON `osc`.`t` FOR EACH ROW REPLACE INTO `osc`.`__new_t` (id, c) VALUES(NEW.id, NEW.c)
CREATE TRIGGER mk_osc_del AFTER DELETE ON `osc`.`t` FOR EACH ROW DELETE IGNORE FROM `osc`.`__new_t` WHERE `osc`.`__new_t`.`id` = OLD.`id`
CREATE TRIGGER mk_osc_upd AFTER UPDATE ON `osc`.`t` FOR EACH ROW REPLACE INTO `osc`.`__new_t` (`id`, `c`) VALUES (NEW.`id`, NEW.`c`)
CREATE TRIGGER mk_osc_ins AFTER INSERT ON `osc`.`t` FOR EACH ROW REPLACE INTO `osc`.`__new_t` (`id`, `c`) VALUES(NEW.`id`, NEW.`c`)

View File

@@ -0,0 +1,3 @@
CREATE TRIGGER mk_osc_del AFTER DELETE ON `osc`.`t` FOR EACH ROW DELETE IGNORE FROM `osc`.`__new_t` WHERE `osc`.`__new_t`.`id` = OLD.`id`
CREATE TRIGGER mk_osc_upd AFTER UPDATE ON `osc`.`t` FOR EACH ROW REPLACE INTO `osc`.`__new_t` (`id`, `default`) VALUES (NEW.`id`, NEW.`default`)
CREATE TRIGGER mk_osc_ins AFTER INSERT ON `osc`.`t` FOR EACH ROW REPLACE INTO `osc`.`__new_t` (`id`, `default`) VALUES(NEW.`id`, NEW.`default`)

View File

@@ -0,0 +1,3 @@
CREATE TRIGGER mk_osc_del AFTER DELETE ON `osc`.`t` FOR EACH ROW DELETE IGNORE FROM `osc`.`__new_t` WHERE `osc`.`__new_t`.`id` = OLD.`id`
CREATE TRIGGER mk_osc_upd AFTER UPDATE ON `osc`.`t` FOR EACH ROW REPLACE INTO `osc`.`__new_t` (`id`, `space col`) VALUES (NEW.`id`, NEW.`space col`)
CREATE TRIGGER mk_osc_ins AFTER INSERT ON `osc`.`t` FOR EACH ROW REPLACE INTO `osc`.`__new_t` (`id`, `space col`) VALUES(NEW.`id`, NEW.`space col`)

View File

@@ -1,2 +1,2 @@
INSERT IGNORE INTO osc.__new_t (id, c) SELECT id, c FROM osc.t WHERE (id < 4) LOCK IN SHARE MODE
INSERT IGNORE INTO osc.__new_t (id, c) SELECT id, c FROM osc.t WHERE (id >= 4 AND id < 6) LOCK IN SHARE MODE
INSERT IGNORE INTO osc.__new_t (`id`, `c`) SELECT `id`, `c` FROM osc.t WHERE (id < 4) LOCK IN SHARE MODE
INSERT IGNORE INTO osc.__new_t (`id`, `c`) SELECT `id`, `c` FROM osc.t WHERE (id >= 4 AND id < 6) LOCK IN SHARE MODE

View File

@@ -0,0 +1,2 @@
INSERT IGNORE INTO osc.__new_t (`id`, `default`) SELECT `id`, `default` FROM osc.t WHERE (id < 4) LOCK IN SHARE MODE
INSERT IGNORE INTO osc.__new_t (`id`, `default`) SELECT `id`, `default` FROM osc.t WHERE (id >= 4 AND id < 6) LOCK IN SHARE MODE

View File

@@ -0,0 +1,2 @@
INSERT IGNORE INTO osc.__new_t (`id`, `space col`) SELECT `id`, `space col` FROM osc.t WHERE (id < 4) LOCK IN SHARE MODE
INSERT IGNORE INTO osc.__new_t (`id`, `space col`) SELECT `id`, `space col` FROM osc.t WHERE (id >= 4 AND id < 6) LOCK IN SHARE MODE

View File

@@ -0,0 +1,13 @@
DROP DATABASE IF EXISTS osc;
CREATE DATABASE osc;
USE osc;
CREATE TABLE t (
id INT UNSIGNED PRIMARY KEY,
`default` VARCHAR(16)
) ENGINE=InnoDB;
CREATE TABLE __new_t LIKE t;
INSERT INTO t VALUES (1, 'a'), (2, 'b'), (3, 'c'), (4, 'd'), (5, 'e');

View File

@@ -0,0 +1,13 @@
DROP DATABASE IF EXISTS osc;
CREATE DATABASE osc;
USE osc;
CREATE TABLE t (
id INT UNSIGNED PRIMARY KEY,
`space col` VARCHAR(16)
) ENGINE=InnoDB;
CREATE TABLE __new_t LIKE t;
INSERT INTO t VALUES (1, 'a'), (2, 'b'), (3, 'c'), (4, 'd'), (5, 'e');

View File

@@ -0,0 +1,45 @@
*************************** 1. row ***************************
Id: 1
User: foo
Host: 127.0.0.1:3306
db: db
Command: Query
Time: 5
State: statistics
Info: /* fruit=apple */ select 1 from fuits;
*************************** 2. row ***************************
Id: 2
User: foo
Host: 127.0.0.1:3306
db: db
Command: Query
Time: 5
State: statistics
Info: /* fruit=apple */ select 1 from fuits;
*************************** 3. row ***************************
Id: 3
User: foo
Host: 127.0.0.1:3306
db: db
Command: Query
Time: 6
State: statistics
Info: /* fruit=orange */ select 1 from fuits;
*************************** 4. row ***************************
Id: 4
User: foo
Host: 127.0.0.1:3306
db: db
Command: Query
Time: 6
State: statistics
Info: /* fruit=orange */ select 1 from fuits;
*************************** 5. row ***************************
Id: 5
User: foo
Host: 127.0.0.1:3306
db: db
Command: Query
Time: 4
State: statistics
Info: /* fruit=pear */ select 1 from fuits;

View File

@@ -25,7 +25,7 @@ if ( !$master_dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
}
else {
plan tests => 4;
plan tests => 3;
}
my $output;
@@ -80,16 +80,6 @@ like(
'--verbose'
);
# ############################################################################
# Reading file (or STDIN) should require connection.
# ############################################################################
$output = `/tmp/12345/use -e "SHOW PROCESSLIST" | $trunk/bin/pt-kill -F $cnf --busy-time 1 --print --verbose`;
like(
$output,
qr/Reading files -/,
"Read STDIN from pipe"
);
# #############################################################################
# Done.
# #############################################################################

View File

@@ -29,7 +29,7 @@ my $out = "/tmp/mk-kill-test.txt";
# #############################################################################
diag(`rm $out 2>/dev/null`);
$output = `$cmd $trunk/t/lib/samples/pl/recset001.txt --match-command Query --execute-command 'echo hello > $out'`;
$output = `$cmd --test-matching $trunk/t/lib/samples/pl/recset001.txt --match-command Query --execute-command 'echo hello > $out'`;
is(
$output,
'',

View File

@@ -9,13 +9,14 @@ BEGIN {
use strict;
use warnings FATAL => 'all';
use English qw(-no_match_vars);
use Test::More tests => 8;
use Test::More tests => 9;
use PerconaTest;
use Sandbox;
require "$trunk/bin/pt-kill";
my $sample = "$trunk/t/lib/samples/pl/";
my @args = qw(--test-matching);
my $output;
# #############################################################################
@@ -25,7 +26,7 @@ my $output;
# The 3rd query (id 4) is user=root. Next we'll test that we can filter
# that one out.
$output = output(
sub { pt_kill::main("$sample/recset010.txt", qw(--print),
sub { pt_kill::main(@args, "$sample/recset010.txt",
qw(--group-by info --query-count 2 --each-busy-time 2 --match-all),
qw(--victims all-but-oldest --print)); }
);
@@ -37,7 +38,7 @@ like(
# Now with --match-user user1, the 3rd query is not matched.
$output = output(
sub { pt_kill::main("$sample/recset010.txt", qw(--print),
sub { pt_kill::main(@args, "$sample/recset010.txt",
qw(--group-by info --query-count 2 --each-busy-time 2 --match-user user1),
qw(--victims all-but-oldest --print)); }
);
@@ -52,7 +53,7 @@ like(
# 9, but the 10 does. This is correct (see issue 1221) because --victims
# is applied *after* per-class query matching.
$output = output(
sub { pt_kill::main("$sample/recset010.txt", qw(--print),
sub { pt_kill::main(@args, "$sample/recset010.txt",
qw(--group-by info --query-count 2 --any-busy-time 10 --match-user user1),
qw(--victims oldest --print)); }
);
@@ -63,7 +64,7 @@ is(
);
$output = output(
sub { pt_kill::main("$sample/recset010.txt", qw(--print),
sub { pt_kill::main(@args, "$sample/recset010.txt",
qw(--group-by info --query-count 2 --any-busy-time 9 --match-user user1),
qw(--victims oldest --print)); }
);
@@ -75,7 +76,7 @@ like(
# Nothing matches because --each-busy-time isn't satifised.
$output = output(
sub { pt_kill::main("$sample/recset010.txt", qw(--print),
sub { pt_kill::main(@args, "$sample/recset010.txt",
qw(--group-by info --query-count 2 --each-busy-time 10 --match-user user1),
qw(--victims all-but-oldest --print)); }
);
@@ -87,7 +88,7 @@ is(
# Each busy time matches on the lowest possible value.
$output = output(
sub { pt_kill::main("$sample/recset010.txt", qw(--print),
sub { pt_kill::main(@args, "$sample/recset010.txt",
qw(--group-by info --query-count 2 --each-busy-time 8 --match-user user1),
qw(--victims all-but-oldest --print)); }
);
@@ -99,7 +100,7 @@ like(
# Nothing matches because --query-count isn't satisified.
$output = output(
sub { pt_kill::main("$sample/recset010.txt", qw(--print),
sub { pt_kill::main(@args, "$sample/recset010.txt",
qw(--group-by info --query-count 4 --each-busy-time 1 --match-user user1),
qw(--victims all-but-oldest --print)); }
);
@@ -111,7 +112,7 @@ is(
# Without stripping comments, the queries won't be grouped into a class.
$output = output(
sub { pt_kill::main("$sample/recset010.txt", qw(--print),
sub { pt_kill::main(@args, "$sample/recset010.txt",
qw(--group-by info --query-count 2 --each-busy-time 2 --match-user user1),
qw(--victims all-but-oldest --print --no-strip-comments)); }
);
@@ -121,6 +122,22 @@ is(
"Queries don't match unless comments are stripped"
);
# ###########################################################################
# Use --filter to create custom --group-by columns.
# ###########################################################################
ok(
no_diff(
sub { pt_kill::main(@args, "$sample/recset011.txt",
"--filter", "$trunk/t/pt-kill/samples/filter001.txt",
qw(--group-by comment --query-count 2 --each-busy-time 5),
qw(--match-user foo --victims all --print --no-strip-comments));
},
"t/pt-kill/samples/kill-recset011-001.txt",
sed => [ "-e 's/^# [^ ]* //g'" ],
),
"--filter and custom --group-by"
);
# #############################################################################
# Done.
# #############################################################################

View File

@@ -18,13 +18,14 @@ my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $slave_dbh = $sb->get_dbh_for('slave1');
my @args = qw(--test-matching);
my $output;
# #############################################################################
# Test match commands.
# #############################################################################
$output = output(
sub { pt_kill::main("$trunk/t/lib/samples/pl/recset001.txt", qw(--match-info show --print)); }
sub { pt_kill::main(@args, "$trunk/t/lib/samples/pl/recset001.txt", qw(--match-info show --print)); }
);
like(
$output,
@@ -33,7 +34,7 @@ like(
);
$output = output(
sub { pt_kill::main("$trunk/t/lib/samples/pl/recset002.txt", qw(--match-command Query --print)); }
sub { pt_kill::main(@args, "$trunk/t/lib/samples/pl/recset002.txt", qw(--match-command Query --print)); }
);
is(
$output,
@@ -42,7 +43,7 @@ is(
);
$output = output(
sub { pt_kill::main("$trunk/t/lib/samples/pl/recset002.txt", qw(--match-command Query --ignore-state), "''", "--print"); }
sub { pt_kill::main(@args, "$trunk/t/lib/samples/pl/recset002.txt", qw(--match-command Query --ignore-state), "''", "--print"); }
);
like(
$output,
@@ -51,7 +52,7 @@ like(
);
$output = output(
sub { pt_kill::main("$trunk/t/lib/samples/pl/recset003.txt", "--match-state", "Sorting result", "--print"); }
sub { pt_kill::main(@args, "$trunk/t/lib/samples/pl/recset003.txt", "--match-state", "Sorting result", "--print"); }
);
like(
$output,
@@ -60,7 +61,7 @@ like(
);
$output = output(
sub { pt_kill::main("$trunk/t/lib/samples/pl/recset003.txt", qw(--match-state Updating --print --victims all)); }
sub { pt_kill::main(@args, "$trunk/t/lib/samples/pl/recset003.txt", qw(--match-state Updating --print --victims all)); }
);
like(
$output,
@@ -69,7 +70,7 @@ like(
);
$output = output(
sub { pt_kill::main("$trunk/t/lib/samples/pl/recset003.txt", qw(--ignore-user remote --match-command Query --print)); }
sub { pt_kill::main(@args, "$trunk/t/lib/samples/pl/recset003.txt", qw(--ignore-user remote --match-command Query --print)); }
);
like(
$output,
@@ -78,7 +79,7 @@ like(
);
$output = output(
sub { pt_kill::main("$trunk/t/lib/samples/pl/recset004.txt", qw(--busy-time 25 --print)); }
sub { pt_kill::main(@args, "$trunk/t/lib/samples/pl/recset004.txt", qw(--busy-time 25 --print)); }
);
like(
$output,
@@ -87,7 +88,7 @@ like(
);
$output = output(
sub { pt_kill::main("$trunk/t/lib/samples/pl/recset004.txt", qw(--busy-time 30 --print)); }
sub { pt_kill::main(@args, "$trunk/t/lib/samples/pl/recset004.txt", qw(--busy-time 30 --print)); }
);
is(
$output,
@@ -96,7 +97,7 @@ is(
);
$output = output(
sub { pt_kill::main("$trunk/t/lib/samples/pl/recset005.txt", qw(--idle-time 15 --print)); }
sub { pt_kill::main(@args, "$trunk/t/lib/samples/pl/recset005.txt", qw(--idle-time 15 --print)); }
);
like(
$output,
@@ -105,7 +106,7 @@ like(
);
$output = output(
sub { pt_kill::main("$trunk/t/lib/samples/pl/recset006.txt", qw(--match-state Locked --ignore-state), "''", qw(--busy-time 5 --print)); }
sub { pt_kill::main(@args, "$trunk/t/lib/samples/pl/recset006.txt", qw(--match-state Locked --ignore-state), "''", qw(--busy-time 5 --print)); }
);
like(
$output,
@@ -116,7 +117,7 @@ like(
# The queries in recset002 are both State: Locked which is ignored
# by default so nothing should match, not even for --match-all.
$output = output(
sub { pt_kill::main("$trunk/t/lib/samples/pl/recset002.txt",
sub { pt_kill::main(@args, "$trunk/t/lib/samples/pl/recset002.txt",
qw(--match-all --print)); }
);
is(
@@ -127,7 +128,7 @@ is(
# Now --match-all should match.
$output = output(
sub { pt_kill::main("$trunk/t/lib/samples/pl/recset002.txt",
sub { pt_kill::main(@args, "$trunk/t/lib/samples/pl/recset002.txt",
qw(--match-all --victims all --print --ignore-state blahblah)); }
);
like(

View File

@@ -0,0 +1,4 @@
my ($comment) = $event->{Info} =~ m!/\*(.+?)\*/!;
PTDEBUG && _d('comment:', $comment);
$event->{comment} = $comment;
1

View File

@@ -0,0 +1,2 @@
KILL 4 (Query 6 sec) /* fruit=orange */ select 1 from fuits;
KILL 3 (Query 6 sec) /* fruit=orange */ select 1 from fuits;

View File

@@ -61,7 +61,7 @@ SKIP: {
# Issue 391: Add --pid option to all scripts
# #########################################################################
`touch /tmp/pt-script.pid`;
$output = `$cmd $trunk/t/lib/samples/pl/recset006.txt --match-state Locked --print --pid /tmp/pt-script.pid 2>&1`;
$output = `$cmd --test-matching $trunk/t/lib/samples/pl/recset006.txt --match-state Locked --print --pid /tmp/pt-script.pid 2>&1`;
like(
$output,
qr{PID file /tmp/pt-script.pid already exists},

View File

@@ -25,7 +25,7 @@ if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
}
else {
plan tests => 18;
plan tests => 22;
}
my $output = "";
@@ -232,6 +232,51 @@ is(
"Updated child table foreign key constraint (drop_old_table method)"
);
# #############################################################################
# Alter tables with columns with resvered words and spaces.
# #############################################################################
sub test_table {
my (%args) = @_;
my ($file, $name) = @args{qw(file name)};
$sb->load_file('master', "t/lib/samples/osc/$file");
PerconaTest::wait_for_table($dbh, "osc.t", "id=5");
PerconaTest::wait_for_table($dbh, "osc.__new_t");
$dbh->do('use osc');
$dbh->do("DROP TABLE IF EXISTS osc.__new_t");
$org_rows = $dbh->selectall_arrayref('select * from osc.t order by id');
output(
sub { $exit = pt_online_schema_change::main(@args,
'D=osc,t=t', qw(--alter ENGINE=InnoDB)) },
);
$new_rows = $dbh->selectall_arrayref('select * from osc.t order by id');
is_deeply(
$new_rows,
$org_rows,
"$name rows"
);
is(
$exit,
0,
"$name exit status 0"
);
}
test_table(
file => "tbl002.sql",
name => "Reserved word column",
);
test_table(
file => "tbl003.sql",
name => "Space column",
);
# #############################################################################
# Done.
# #############################################################################

View File

@@ -29,7 +29,6 @@ else {
my $vp = new VersionParser();
my $q = new Quoter();
my $tp = new TableParser(Quoter => $q);
my $du = new MySQLDump();
my $chunker = new TableChunker(Quoter => $q, TableParser => $tp);
my $o = new OptionParser();
@@ -40,7 +39,7 @@ pt_online_schema_change::__set_quiet(1);
$sb->load_file('master', "t/pt-online-schema-change/samples/small_table.sql");
$dbh->do('use mkosc');
my $old_tbl_struct = $tp->parse($du->get_create_table($dbh, $q, 'mkosc', 'a'));
my $old_tbl_struct = $tp->parse($tp->get_create_table($dbh, 'mkosc', 'a'));
my %args = (
dbh => $dbh,
@@ -53,7 +52,6 @@ my %args = (
TableParser => $tp,
OptionParser => $o,
TableChunker => $chunker,
MySQLDump => $du,
);
my %tbl_info = pt_online_schema_change::check_tables(%args);
@@ -112,7 +110,7 @@ throws_ok(
$dbh->do('DROP TRIGGER mkosc.foo');
$dbh->do('ALTER TABLE mkosc.a DROP COLUMN i');
my $tmp_struct = $tp->parse($du->get_create_table($dbh, $q, 'mkosc', 'a'));
my $tmp_struct = $tp->parse($tp->get_create_table($dbh, 'mkosc', 'a'));
throws_ok(
sub { pt_online_schema_change::check_tables(
%args,

View File

@@ -41,7 +41,7 @@ elsif ( !@{$master_dbh->selectall_arrayref('show databases like "sakila"')} ) {
plan skip_all => 'sakila database is not loaded';
}
else {
plan tests => 30;
plan tests => 32;
}
# The sandbox servers run with lock_wait_timeout=3 and it's not dynamic
@@ -396,6 +396,30 @@ like(
"--where for upper oob chunk"
);
# #############################################################################
# Bug 932442: column with 2 spaces
# #############################################################################
$sb->load_file('master', "t/pt-table-checksum/samples/2-space-col.sql");
PerconaTest::wait_for_table($master_dbh, "test.t", "id=10");
$output = output(
sub { $exit_status = pt_table_checksum::main(@args,
qw(-t test.t --chunk-size 3)) },
stderr => 1,
);
is(
$exit_status,
0,
"Bug 932442: 0 exit"
);
is(
PerconaTest::count_checksum_results($output, 'errors'),
0,
"Bug 932442: 0 errors"
);
# #############################################################################
# Done.
# #############################################################################

View File

@@ -0,0 +1,8 @@
drop database if exists test;
create database test;
use test;
create table t (
id int auto_increment primary key,
`a b` int not null -- 2 spaces between a and b
);
insert into t values (null, 1),(null, 2),(null, 3),(null, 4),(null, 5),(null, 6),(null, 7),(null, 8),(null, 9),(null, 10);

View File

@@ -23,11 +23,12 @@ my $tool_type; # perl or bash
my @check_subs = (qw(
check_alpha_order
check_module_usage
check_option_types
check_pod_header_order
check_pod_formatting
check_option_usage
check_pod_links
check_option_usage
check_option_types
check_option_typos
));
TOOL:
@@ -471,6 +472,22 @@ sub check_option_usage {
return;
}
sub check_option_typos {
my ($fh) = @_;
my %ops = map { $_=>1 } split /\n/, `awk '/^=item --/ {print \$2}' $tool_file`;
my $len = `wc -l $tool_file`;
my $doc = `grep '^=pod' -A $len`;
while ( $doc =~ m/(--[a-z]+[a-z-]+)/sg ) {
my $op = $1;
my $nop = $op;
$nop =~ s/^--no-/--[no]/;
if ( !$ops{$op} && !$ops{$nop} ) {
print "Unknown option in documentation: $op\n"
}
}
}
sub check_pod_links {
my $offset = `cat $tool_file | grep '^=head1 NAME' --byte-offset | cut -d ':' -f 1`;
if ( !$offset ) {

View File

@@ -16,6 +16,7 @@ warn() {
exit_status=$((exit_status | 1))
}
cwd="$PWD"
if [ -n "$PERCONA_TOOLKIT_BRANCH" ]; then
BRANCH=$PERCONA_TOOLKIT_BRANCH
else
@@ -27,6 +28,7 @@ else
fi
BRANCH=`pwd`
fi
cd "$cwd"
# ############################################################################
# Global variables