mirror of
https://github.com/percona/percona-toolkit.git
synced 2025-09-03 11:05:48 +00:00
First working scaffolding and fondation of host-to-host comparison.
This commit is contained in:
1484
bin/pt-upgrade
1484
bin/pt-upgrade
File diff suppressed because it is too large
Load Diff
@@ -25,7 +25,20 @@ use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
|
||||
|
||||
use Mo;
|
||||
use POSIX qw(signal_h);
|
||||
use Data::Dumper;
|
||||
|
||||
use Lmo;
|
||||
|
||||
##
|
||||
# Required
|
||||
##
|
||||
|
||||
has 'file_iter' => (
|
||||
is => 'ro',
|
||||
isa => 'CodeRef',
|
||||
required => 1,
|
||||
);
|
||||
|
||||
has 'parser' => (
|
||||
is => 'ro',
|
||||
@@ -33,13 +46,23 @@ has 'parser' => (
|
||||
required => 1,
|
||||
);
|
||||
|
||||
has 'fingerprint' => (
|
||||
is => 'ro',
|
||||
isa => 'CodeRef',
|
||||
required => 1,
|
||||
);
|
||||
|
||||
has 'oktorun' => (
|
||||
is => 'ro',
|
||||
isa => 'CodeRef',
|
||||
required => 1,
|
||||
);
|
||||
|
||||
has 'database' => (
|
||||
##
|
||||
# Optional
|
||||
##
|
||||
|
||||
has 'default_database' => (
|
||||
is => 'rw',
|
||||
isa => 'Maybe[Str]',
|
||||
required => 0,
|
||||
@@ -58,10 +81,66 @@ has 'read_only' => (
|
||||
default => 0,
|
||||
);
|
||||
|
||||
has 'read_timeout' => (
|
||||
is => 'ro',
|
||||
isa => 'Int',
|
||||
required => 0,
|
||||
default => 0,
|
||||
);
|
||||
|
||||
##
|
||||
# Private
|
||||
##
|
||||
|
||||
has 'stats' => (
|
||||
is => 'ro',
|
||||
isa => 'HashRef',
|
||||
required => 0,
|
||||
default => sub { return {} },
|
||||
);
|
||||
|
||||
has 'database' => (
|
||||
is => 'rw',
|
||||
isa => 'Maybe[Str]',
|
||||
required => 0,
|
||||
);
|
||||
|
||||
has '_fh' => (
|
||||
is => 'rw',
|
||||
isa => 'Maybe[FileHandle]',
|
||||
required => 0,
|
||||
);
|
||||
|
||||
has '_file_name' => (
|
||||
is => 'rw',
|
||||
isa => 'Maybe[Str]',
|
||||
required => 0,
|
||||
);
|
||||
|
||||
has '_file_size' => (
|
||||
is => 'rw',
|
||||
isa => 'Maybe[Int]',
|
||||
required => 0,
|
||||
);
|
||||
|
||||
has '_offset' => (
|
||||
is => 'rw',
|
||||
isa => 'Maybe[Int]',
|
||||
required => 0,
|
||||
);
|
||||
|
||||
has '_parser_args' => (
|
||||
is => 'rw',
|
||||
isa => 'HashRef',
|
||||
required => 0,
|
||||
);
|
||||
|
||||
sub BUILDARGS {
|
||||
my $class = shift;
|
||||
my $args = $class->SUPER::BUILDARGS(@_);
|
||||
|
||||
my $filter_code;
|
||||
if ( my $filter = $args{filter} ) {
|
||||
if ( my $filter = $args->{filter} ) {
|
||||
if ( -f $filter && -r $filter ) {
|
||||
PTDEBUG && _d('Reading file', $filter, 'for --filter code');
|
||||
open my $fh, "<", $filter or die "Cannot open $filter: $OS_ERROR";
|
||||
@@ -71,7 +150,11 @@ sub BUILDARGS {
|
||||
else {
|
||||
$filter = "( $filter )"; # issue 565
|
||||
}
|
||||
my $code = "sub { PTDEBUG && _d('callback: filter'); my(\$event) = shift; $filter && return \$event; };";
|
||||
my $code = "sub {
|
||||
PTDEBUG && _d('callback: filter');
|
||||
my(\$event) = shift;
|
||||
$filter && return \$event;
|
||||
};";
|
||||
PTDEBUG && _d('--filter code:', $code);
|
||||
$filter_code = eval $code
|
||||
or die "Error compiling --filter code: $code\n$EVAL_ERROR";
|
||||
@@ -80,45 +163,82 @@ sub BUILDARGS {
|
||||
$filter_code = sub { return 1 };
|
||||
}
|
||||
|
||||
my $self = {
|
||||
%$args,
|
||||
filter => $filter_code,
|
||||
};
|
||||
|
||||
return $self;
|
||||
}
|
||||
|
||||
sub next {
|
||||
my ($self) = @_;
|
||||
|
||||
if ( !$self->_fh ) {
|
||||
my ($fh, $file_name, $file_size) = $self->file_iter->();
|
||||
return unless $fh;
|
||||
|
||||
PTDEBUG && _d('Reading', $file_name);
|
||||
$self->_fh($fh);
|
||||
$self->_file_name($file_name);
|
||||
$self->_file_size($file_size);
|
||||
|
||||
my $parser_args = {};
|
||||
|
||||
if ( my $read_timeout = $self->read_timeout ) {
|
||||
$parser_args->{next_event}
|
||||
= sub { return _read_timeout($fh, $read_timeout); };
|
||||
}
|
||||
else {
|
||||
$parser_args->{next_event} = sub { return <$fh>; };
|
||||
}
|
||||
|
||||
$parser_args->{tell} = sub {
|
||||
my $offset = tell $fh; # update global $offset
|
||||
$self->_offset($offset);
|
||||
return $offset; # legacy: return global $offset
|
||||
};
|
||||
|
||||
$self->_parser_args($parser_args);
|
||||
}
|
||||
|
||||
EVENT:
|
||||
while (
|
||||
$self->oktorun()
|
||||
&& (my $event = $parser->parse_event(%args))
|
||||
$self->oktorun
|
||||
&& (my $event = $self->parser->parse_event(%{ $self->_parser_args }) )
|
||||
) {
|
||||
|
||||
$self->stats->{events}++;
|
||||
$self->stats->{queries_read}++;
|
||||
|
||||
if ( ($event->{cmd} || '') ne 'Query' ) {
|
||||
PTDEBUG && _d('Skipping non-Query cmd');
|
||||
$stats->{not_query}++;
|
||||
$self->stats->{not_query}++;
|
||||
next EVENT;
|
||||
}
|
||||
|
||||
if ( !$event->{arg} ) {
|
||||
PTDEBUG && _d('Skipping empty arg');
|
||||
$stats->{empty_query}++;
|
||||
$self->stats->{empty_query}++;
|
||||
next EVENT;
|
||||
}
|
||||
|
||||
if ( !$self->filter->($event) ) {
|
||||
$self->stats->{queries_filtered}++;
|
||||
next EVENT;
|
||||
}
|
||||
|
||||
next EVENT unless $self->filter->();
|
||||
|
||||
if ( $self->read_only ) {
|
||||
if ( $event->{arg} !~ m/(?:^SELECT|(?:\*\/\s*SELECT))/i ) {
|
||||
PTDEBUG && _d('Skipping non-SELECT query');
|
||||
$stats->{not_select}++;
|
||||
$self->stats->{not_select}++;
|
||||
next EVENT;
|
||||
}
|
||||
}
|
||||
|
||||
$event->{fingerprint} = $qr->fingerprint($event->{arg});
|
||||
$event->{fingerprint} = $self->fingerprint->($event->{arg});
|
||||
|
||||
my $db = $event->{db} || $event->{Schema} || $hosts->[0]->{dsn}->{D};
|
||||
if ( $db && (!$current_db || $db ne $current_db) ) {
|
||||
my $current_db = $self->database;
|
||||
my $db = $event->{db} || $event->{Schema} || $self->default_database;
|
||||
if ( $db && (!$current_db || $current_db ne $db) ) {
|
||||
$self->database($db);
|
||||
}
|
||||
else {
|
||||
@@ -126,12 +246,58 @@ sub next {
|
||||
}
|
||||
|
||||
return $event;
|
||||
} # EVENT
|
||||
}
|
||||
|
||||
PTDEBUG && _d('Done reading', $self->_file_name);
|
||||
close $self->_fh if $self->_fh;
|
||||
$self->_fh(undef);
|
||||
$self->_file_name(undef);
|
||||
$self->_file_size(undef);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
no Mo;
|
||||
# Read the fh and timeout after t seconds.
|
||||
sub _read_timeout {
|
||||
my ( $fh, $t ) = @_;
|
||||
return unless $fh;
|
||||
$t ||= 0; # will reset alarm and cause read to wait forever
|
||||
|
||||
# Set the SIGALRM handler.
|
||||
my $mask = POSIX::SigSet->new(&POSIX::SIGALRM);
|
||||
my $action = POSIX::SigAction->new(
|
||||
sub {
|
||||
# This sub is called when a SIGALRM is received.
|
||||
die 'read timeout';
|
||||
},
|
||||
$mask,
|
||||
);
|
||||
my $oldaction = POSIX::SigAction->new();
|
||||
sigaction(&POSIX::SIGALRM, $action, $oldaction);
|
||||
|
||||
my $res;
|
||||
eval {
|
||||
alarm $t;
|
||||
$res = <$fh>;
|
||||
alarm 0;
|
||||
};
|
||||
if ( $EVAL_ERROR ) {
|
||||
PTDEBUG && _d('Read error:', $EVAL_ERROR);
|
||||
die $EVAL_ERROR unless $EVAL_ERROR =~ m/read timeout/;
|
||||
$res = undef; # res is a blank string after a timeout
|
||||
}
|
||||
return $res;
|
||||
}
|
||||
|
||||
sub _d {
|
||||
my ($package, undef, $line) = caller 0;
|
||||
@_ = map { (my $temp = $_) =~ s/\n/\n# /g; $temp; }
|
||||
map { defined $_ ? $_ : 'undef' }
|
||||
@_;
|
||||
print STDERR "# $package:$line $PID ", join(' ', @_), "\n";
|
||||
}
|
||||
|
||||
no Lmo;
|
||||
1;
|
||||
}
|
||||
# ###########################################################################
|
||||
|
@@ -20,20 +20,157 @@
|
||||
{
|
||||
package UpgradeResults;
|
||||
|
||||
use Mo;
|
||||
use Scalar::Util qw(blessed);
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
|
||||
|
||||
has 'query_classes' => (
|
||||
use Digest::MD5 qw(md5_hex);
|
||||
|
||||
use Lmo;
|
||||
|
||||
has 'max_class_size' => (
|
||||
is => 'ro',
|
||||
isa => 'Int',
|
||||
required => 1,
|
||||
);
|
||||
|
||||
has 'max_examples' => (
|
||||
is => 'ro',
|
||||
isa => 'Int',
|
||||
required => 1,
|
||||
);
|
||||
|
||||
has 'classes' => (
|
||||
is => 'rw',
|
||||
isa => 'HashRef',
|
||||
required => 0,
|
||||
default => sub { return {} },
|
||||
);
|
||||
|
||||
sub save_diffs {
|
||||
my ($self, %args) = @_;
|
||||
|
||||
no Mo;
|
||||
my $event = $args{event};
|
||||
my $query_time_diff = $args{query_time_diff};
|
||||
my $warning_diffs = $args{warning_diffs};
|
||||
my $row_diffs = $args{row_diffs};
|
||||
|
||||
my $class = $self->class(event => $event);
|
||||
|
||||
if ( my $query = $self->_can_save(event => $event, class => $class) ) {
|
||||
|
||||
if ( $query_time_diff
|
||||
&& scalar @{$class->{query_time_diffs}} < $self->max_examples ) {
|
||||
push @{$class->{query_time_diffs}}, [
|
||||
$query,
|
||||
$query_time_diff,
|
||||
];
|
||||
}
|
||||
|
||||
if ( @$warning_diffs
|
||||
&& scalar @{$class->{warning_diffs}} < $self->max_examples ) {
|
||||
push @{$class->{warnings_diffs}}, [
|
||||
$query,
|
||||
$warning_diffs,
|
||||
];
|
||||
}
|
||||
|
||||
if ( @$row_diffs
|
||||
&& scalar @{$class->{row_diffs}} < $self->max_examples ) {
|
||||
push @{$class->{row_diffs}}, [
|
||||
$query,
|
||||
$row_diffs,
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
sub save_error {
|
||||
my ($self, %args) = @_;
|
||||
|
||||
my $event = $args{event};
|
||||
my $error1 = $args{error1};
|
||||
my $error2 = $args{error2};
|
||||
|
||||
my $class = $self->class(event => $event);
|
||||
|
||||
if ( my $query = $self->_can_save(event => $event, class => $class) ) {
|
||||
if ( scalar @{$class->{errors}} < $self->max_examples ) {
|
||||
push @{$class->{errors}}, [
|
||||
$query,
|
||||
$error1,
|
||||
$error2,
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
sub _can_save {
|
||||
my ($self, %args) = @_;
|
||||
my $event = $args{event};
|
||||
my $class = $args{class};
|
||||
my $query = $event->{arg};
|
||||
if ( exists $class->{unique_queries}->{$query}
|
||||
|| scalar keys %{$class->{unique_queries}} < $self->max_class_size ) {
|
||||
$class->{unique_queries}->{$query}++;
|
||||
return $query;
|
||||
}
|
||||
PTDEBUG && _d('Too many queries in class, discarding', $query);
|
||||
$class->{discarded}++;
|
||||
return;
|
||||
}
|
||||
|
||||
sub class {
|
||||
my ($self, %args) = @_;
|
||||
my $event = $args{event};
|
||||
|
||||
my $id = uc(substr(md5_hex($event->{fingerprint}), -16));
|
||||
my $classes = $self->classes;
|
||||
my $class = $classes->{$id};
|
||||
if ( !$class ) {
|
||||
PTDEBUG && _d('New query class:', $id, $event->{fingerprint});
|
||||
$class = $self->_new_class(
|
||||
id => $id,
|
||||
event => $event,
|
||||
);
|
||||
$classes->{$id} = $class;
|
||||
}
|
||||
return $class;
|
||||
}
|
||||
|
||||
sub _new_class {
|
||||
my ($self, %args) = @_;
|
||||
my $id = $args{id};
|
||||
my $event = $args{event};
|
||||
PTDEBUG && _d('New query class:', $id, $event->{fingerprint});
|
||||
my $class = {
|
||||
id => $id,
|
||||
fingerprint => $event->{fingerprint},
|
||||
discarded => 0,
|
||||
unique_queries => {
|
||||
$event->{arg} => 0,
|
||||
},
|
||||
query_time_diffs => [],
|
||||
warning_diffs => [],
|
||||
row_diffs => [],
|
||||
};
|
||||
return $class;
|
||||
}
|
||||
|
||||
sub _d {
|
||||
my ($package, undef, $line) = caller 0;
|
||||
@_ = map { (my $temp = $_) =~ s/\n/\n# /g; $temp; }
|
||||
map { defined $_ ? $_ : 'undef' }
|
||||
@_;
|
||||
print STDERR "# $package:$line $PID ", join(' ', @_), "\n";
|
||||
}
|
||||
|
||||
no Lmo;
|
||||
1;
|
||||
}
|
||||
# ###########################################################################
|
||||
|
132
t/lib/QueryIterator.t
Normal file
132
t/lib/QueryIterator.t
Normal file
@@ -0,0 +1,132 @@
|
||||
#!/usr/bin/perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
use Data::Dumper;
|
||||
|
||||
use FileIterator;
|
||||
use QueryRewriter;
|
||||
use QueryIterator;
|
||||
use SlowLogParser;
|
||||
use PerconaTest;
|
||||
|
||||
my $parser = SlowLogParser->new();
|
||||
my $qr = QueryRewriter->new();
|
||||
my $file_iter = FileIterator->new();
|
||||
|
||||
my $oktorun = 1;
|
||||
my $sample = "t/lib/samples/slowlogs";
|
||||
|
||||
sub test_query_iter {
|
||||
my (%args) = @_;
|
||||
|
||||
my $files = $file_iter->get_file_itr(
|
||||
@{$args{files}}
|
||||
);
|
||||
|
||||
my $query_iter = QueryIterator->new(
|
||||
file_iter => $files,
|
||||
parser => $args{parser} || $parser,
|
||||
fingerprint => sub { return $qr->fingerprint(@_) },
|
||||
oktorun => sub { return $oktorun },
|
||||
# Optional args
|
||||
default_database => $args{default_database},
|
||||
($args{filter} ? (filter => $args{filter}) : ()),
|
||||
($args{read_only} ? (read_only => $args{read_only}) : ()),
|
||||
($args{read_timeout} ? (read_timeout => $args{read_timeout}) : ()),
|
||||
);
|
||||
|
||||
my @events;
|
||||
while ( my $event = $query_iter->next() ) {
|
||||
push @events, $event;
|
||||
}
|
||||
|
||||
is_deeply(
|
||||
\@events,
|
||||
$args{expect},
|
||||
$args{name}
|
||||
) or diag(Dumper(\@events));
|
||||
}
|
||||
|
||||
my $slow001_events = [
|
||||
{
|
||||
Lock_time => '0',
|
||||
Query_time => '2',
|
||||
Rows_examined => '0',
|
||||
Rows_sent => '1',
|
||||
arg => 'select sleep(2) from n',
|
||||
bytes => 22,
|
||||
cmd => 'Query',
|
||||
db => 'test',
|
||||
fingerprint => 'select sleep(?) from n',
|
||||
host => 'localhost',
|
||||
ip => '',
|
||||
pos_in_log => 0,
|
||||
ts => '071015 21:43:52',
|
||||
user => 'root',
|
||||
},
|
||||
{
|
||||
Lock_time => '0',
|
||||
Query_time => '2',
|
||||
Rows_examined => '0',
|
||||
Rows_sent => '1',
|
||||
arg => 'select sleep(2) from test.n',
|
||||
bytes => 27,
|
||||
cmd => 'Query',
|
||||
db => 'sakila',
|
||||
fingerprint => 'select sleep(?) from test.n',
|
||||
host => 'localhost',
|
||||
ip => '',
|
||||
pos_in_log => 359,
|
||||
ts => '071015 21:45:10',
|
||||
user => 'root',
|
||||
}
|
||||
];
|
||||
|
||||
test_query_iter(
|
||||
name => "slow001.txt, defaults",
|
||||
files => [
|
||||
"$trunk/$sample/slow001.txt"
|
||||
],
|
||||
expect => $slow001_events,
|
||||
);
|
||||
|
||||
test_query_iter(
|
||||
name => "slow001.txt, read_timeout=5",
|
||||
read_timeout => 5,
|
||||
files => [
|
||||
"$trunk/$sample/slow001.txt"
|
||||
],
|
||||
expect => $slow001_events,
|
||||
);
|
||||
|
||||
test_query_iter(
|
||||
name => "slow001.txt, read_only",
|
||||
read_only => 1,
|
||||
files => [
|
||||
"$trunk/$sample/slow001.txt"
|
||||
],
|
||||
expect => $slow001_events,
|
||||
);
|
||||
|
||||
test_query_iter(
|
||||
name => "slow001.txt, in-line filter",
|
||||
filter => '$event->{db} eq "test"',
|
||||
files => [
|
||||
"$trunk/$sample/slow001.txt"
|
||||
],
|
||||
expect => [ $slow001_events->[0] ],
|
||||
);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
done_testing;
|
Reference in New Issue
Block a user